instance_id
stringlengths
10
57
base_commit
stringlengths
40
40
created_at
stringdate
2014-04-30 14:58:36
2025-04-30 20:14:11
environment_setup_commit
stringlengths
40
40
hints_text
stringlengths
0
273k
patch
stringlengths
251
7.06M
problem_statement
stringlengths
11
52.5k
repo
stringlengths
7
53
test_patch
stringlengths
231
997k
meta
dict
version
stringclasses
851 values
install_config
dict
requirements
stringlengths
93
34.2k
environment
stringlengths
760
20.5k
FAIL_TO_PASS
listlengths
1
9.39k
FAIL_TO_FAIL
listlengths
0
2.69k
PASS_TO_PASS
listlengths
0
7.87k
PASS_TO_FAIL
listlengths
0
192
license_name
stringclasses
55 values
__index_level_0__
int64
0
21.4k
before_filepaths
listlengths
1
105
after_filepaths
listlengths
1
105
conan-io__conan-2504
869f10e260b727162b4b502f2af46525327ea063
2018-02-20 17:36:08
c8ee776992121b27d2dcb54be835b501326254bc
diff --git a/conans/client/build_requires.py b/conans/client/build_requires.py index d7235d13c..a7716c9cb 100644 --- a/conans/client/build_requires.py +++ b/conans/client/build_requires.py @@ -105,7 +105,8 @@ class BuildRequires(object): reference = str(reference) output.info("Installing build requirements of: %s" % (reference or "PROJECT")) - output.info("Build requires: [%s]" % ", ".join(str(r) for r in build_requires.values())) + output.info("Build requires: [%s]" + % ", ".join(str(r) for r in build_requires.values())) # clear root package options, they won't match the build-require conanfile.build_requires_options.clear_unscoped_options() virtual = self._loader.load_virtual(build_requires.values(), scope_options=False, diff --git a/conans/client/client_cache.py b/conans/client/client_cache.py index b37344331..7206db020 100644 --- a/conans/client/client_cache.py +++ b/conans/client/client_cache.py @@ -129,17 +129,13 @@ class ClientCache(SimplePaths): if self._default_profile is None: if not os.path.exists(self.default_profile_path): self._output.writeln("Auto detecting your dev setup to initialize the " - "default profile (%s)" % self.default_profile_path, - Color.BRIGHT_YELLOW) + "default profile (%s)" % self.default_profile_path, Color.BRIGHT_YELLOW) default_settings = detect_defaults_settings(self._output) self._output.writeln("Default settings", Color.BRIGHT_YELLOW) - self._output.writeln("\n".join(["\t%s=%s" % (k, v) for (k, v) in default_settings]), - Color.BRIGHT_YELLOW) - self._output.writeln("*** You can change them in %s ***" % self.default_profile_path, - Color.BRIGHT_MAGENTA) - self._output.writeln("*** Or override with -s compiler='other' -s ...s***\n\n", - Color.BRIGHT_MAGENTA) + self._output.writeln("\n".join(["\t%s=%s" % (k, v) for (k, v) in default_settings]), Color.BRIGHT_YELLOW) + self._output.writeln("*** You can change them in %s ***" % self.default_profile_path, Color.BRIGHT_MAGENTA) + self._output.writeln("*** Or override with -s compiler='other' -s ...s***\n\n", Color.BRIGHT_MAGENTA) self._default_profile = Profile() tmp = OrderedDict(default_settings) @@ -178,7 +174,7 @@ class ClientCache(SimplePaths): try: packages = [dirname for dirname in os.listdir(packages_dir) if os.path.isdir(os.path.join(packages_dir, dirname))] - except OSError: # if there isn't any package folder + except: # if there isn't any package folder packages = [] return packages @@ -189,7 +185,7 @@ class ClientCache(SimplePaths): try: builds = [dirname for dirname in os.listdir(builds_dir) if os.path.isdir(os.path.join(builds_dir, dirname))] - except OSError: # if there isn't any package folder + except: # if there isn't any package folder builds = [] return builds diff --git a/conans/client/command.py b/conans/client/command.py index af8e2e8b0..425f9c04e 100644 --- a/conans/client/command.py +++ b/conans/client/command.py @@ -435,6 +435,7 @@ class Command(object): else: self._outputer.info(deps_graph, graph_updates_info, only, args.remote, args.package_filter, args.paths, project_reference) + return def source(self, *args): """ Calls your local conanfile.py 'source()' method. @@ -837,6 +838,7 @@ class Command(object): prog="conan upload") parser.add_argument('pattern', help='Pattern or package recipe reference, ' 'e.g., "openssl/*", "MyPackage/1.2@user/channel"') + # TODO: packageparser.add_argument('package', help='user name') parser.add_argument("-p", "--package", default=None, help='package ID to upload', action=OnceArgument) parser.add_argument("-r", "--remote", help='upload to this specific remote', @@ -956,7 +958,7 @@ class Command(object): parser_new = subparsers.add_parser('new', help='Creates a new empty profile') parser_new.add_argument('profile', help="name for the profile in the '.conan/profiles' " - "folder or path and name for a profile file") + "folder or path and name for a profile file") parser_new.add_argument("--detect", action='store_true', default=False, help='Autodetect settings and fill [settings] section') @@ -969,7 +971,7 @@ class Command(object): parser_get = subparsers.add_parser('get', help='Get a profile key') parser_get.add_argument('item', help='key of the value to get, e.g: settings.compiler') parser_get.add_argument('profile', help="name of the profile in the '.conan/profiles' " - "folder or path to a profile file") + "folder or path to a profile file") parser_remove = subparsers.add_parser('remove', help='Remove a profile key') parser_remove.add_argument('item', help='key, e.g: settings.compiler') @@ -1000,6 +1002,8 @@ class Command(object): elif args.subcommand == "remove": self._conan.delete_profile_key(profile, args.item) + return + def get(self, *args): """ Gets a file or list a directory of a given reference or package. """ @@ -1042,6 +1046,8 @@ class Command(object): self._conan.export_alias(args.reference, args.target) + return + def _show_help(self): """ prints a summary of all commands """ diff --git a/conans/client/importer.py b/conans/client/importer.py index 8a9af6b32..a66c222ae 100644 --- a/conans/client/importer.py +++ b/conans/client/importer.py @@ -116,7 +116,7 @@ class _FileImporter(object): self.copied_files = set() def __call__(self, pattern, dst="", src="", root_package=None, folder=False, - ignore_case=False, excludes=None, keep_path=True): + ignore_case=False, excludes=None): """ param pattern: an fnmatch file pattern of the files that should be copied. Eg. *.dll param dst: the destination local folder, wrt to current conanfile dir, to which @@ -136,7 +136,7 @@ class _FileImporter(object): final_dst_path = os.path.join(real_dst_folder, name) if folder else real_dst_folder file_copier = FileCopier(matching_path, final_dst_path) files = file_copier(pattern, src=src, links=True, ignore_case=ignore_case, - excludes=excludes, keep_path=keep_path) + excludes=excludes) self.copied_files.update(files) def _get_folders(self, pattern): diff --git a/conans/client/loader.py b/conans/client/loader.py index b072b7d26..824217cdb 100644 --- a/conans/client/loader.py +++ b/conans/client/loader.py @@ -8,7 +8,6 @@ from conans.model.options import OptionsValues from conans.model.ref import ConanFileReference from conans.model.settings import Settings from conans.model.values import Values -from conans.model.requires import Requirements from conans.util.files import load @@ -96,11 +95,6 @@ class ConanFileLoader(object): for requirement_text in parser.requirements: ConanFileReference.loads(requirement_text) # Raise if invalid conanfile.requires.add(requirement_text) - for build_requirement_text in parser.build_requirements: - ConanFileReference.loads(build_requirement_text) - if not hasattr(conanfile, "build_requires"): - conanfile.build_requires = [] - conanfile.build_requires.append(build_requirement_text) conanfile.generators = parser.generators diff --git a/conans/client/loader_parse.py b/conans/client/loader_parse.py index e1a47365b..0e52ae2c4 100644 --- a/conans/client/loader_parse.py +++ b/conans/client/loader_parse.py @@ -95,7 +95,7 @@ class ConanFileTextLoader(object): def __init__(self, input_text): # Prefer composition over inheritance, the __getattr__ was breaking things self._config_parser = ConfigParser(input_text, ["requires", "generators", "options", - "imports", "build_requires"], parse_lines=True) + "imports"], parse_lines=True) @property def requirements(self): @@ -104,13 +104,6 @@ class ConanFileTextLoader(object): """ return [r.strip() for r in self._config_parser.requires.splitlines()] - @property - def build_requirements(self): - """returns a list of build_requires - EX: "OpenCV/2.4.10@phil/stable" - """ - return [r.strip() for r in self._config_parser.build_requires.splitlines()] - @property def options(self): return self._config_parser.options @@ -118,7 +111,7 @@ class ConanFileTextLoader(object): @property def _import_parameters(self): def _parse_args(param_string): - root_package, ignore_case, folder, excludes, keep_path = None, False, False, None, True + root_package, ignore_case, folder, excludes = None, False, False, None params = param_string.split(",") params = [p.split("=") for p in params if p] for (var, value) in params: @@ -132,11 +125,9 @@ class ConanFileTextLoader(object): folder = (value.lower() == "true") elif var == "excludes": excludes = value.split() - elif var == "keep_path": - keep_path = (value.lower() == "true") else: raise Exception("Invalid imports. Unknown argument %s" % var) - return root_package, ignore_case, folder, excludes, keep_path + return root_package, ignore_case, folder, excludes def _parse_import(line): pair = line.split("->") @@ -165,9 +156,9 @@ class ConanFileTextLoader(object): params = tokens[1] else: params = "" - root_package, ignore_case, folder, excludes, keep_path = _parse_args(params) + root_package, ignore_case, folder, excludes = _parse_args(params) pattern, dest, src = _parse_import(line) - ret.append((pattern, dest, src, root_package, folder, ignore_case, excludes, keep_path)) + ret.append((pattern, dest, src, root_package, folder, ignore_case, excludes)) except Exception as e: raise ConanException("%s\n%s" % (invalid_line_msg, str(e))) return ret diff --git a/conans/client/remote_registry.py b/conans/client/remote_registry.py index 61ab50519..93fcc5225 100644 --- a/conans/client/remote_registry.py +++ b/conans/client/remote_registry.py @@ -191,7 +191,7 @@ class RemoteRegistry(object): if insert is not None: try: insert_index = int(insert) - except ValueError: + except: raise ConanException("insert argument must be an integer") remotes.pop(remote_name, None) # Remove if exists (update) remotes_list = list(remotes.items()) diff --git a/conans/client/rest/rest_client.py b/conans/client/rest/rest_client.py index 7af2aac66..df21c11b0 100644 --- a/conans/client/rest/rest_client.py +++ b/conans/client/rest/rest_client.py @@ -11,7 +11,7 @@ import os from conans.model.manifest import FileTreeManifest from conans.client.rest.uploader_downloader import Uploader, Downloader from conans.model.ref import ConanFileReference -from six.moves.urllib.parse import urlsplit, parse_qs, urlencode +from six.moves.urllib.parse import urlsplit, parse_qs, urlencode, urlparse, urljoin from conans import COMPLEX_SEARCH_CAPABILITY from conans.search.search import filter_packages from conans.model.info import ConanInfo @@ -103,7 +103,7 @@ class RestApiClient(object): # Obtain the URLs url = "%s/conans/%s/digest" % (self._remote_api_url, "/".join(conan_reference)) - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) # Get the digest contents = self.download_files(urls) @@ -118,7 +118,7 @@ class RestApiClient(object): url = "%s/conans/%s/packages/%s/digest" % (self._remote_api_url, "/".join(package_reference.conan), package_reference.package_id) - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) # Get the digest contents = self.download_files(urls) @@ -132,7 +132,7 @@ class RestApiClient(object): url = "%s/conans/%s/packages/%s/download_urls" % (self._remote_api_url, "/".join(package_reference.conan), package_reference.package_id) - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) if not urls: raise NotFoundException("Package not found!") @@ -149,7 +149,7 @@ class RestApiClient(object): """Gets a dict of filename:contents from conans""" # Get the conanfile snapshot first url = "%s/conans/%s/download_urls" % (self._remote_api_url, "/".join(conan_reference)) - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) urls = filter_files_function(urls) if not urls: @@ -164,7 +164,7 @@ class RestApiClient(object): url = "%s/conans/%s/packages/%s/download_urls" % (self._remote_api_url, "/".join(package_reference.conan), package_reference.package_id) - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) if not urls: raise NotFoundException("Package not found!") # TODO: Get fist an snapshot and compare files and download only required? @@ -198,7 +198,7 @@ class RestApiClient(object): url = "%s/conans/%s/upload_urls" % (self._remote_api_url, "/".join(conan_reference)) filesizes = {filename.replace("\\", "/"): os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} - urls = self._get_json(url, data=filesizes) + urls = self._get_file_to_url_dict(url, data=filesizes) self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: self._remove_conanfile_files(conan_reference, deleted) @@ -230,7 +230,7 @@ class RestApiClient(object): filesizes = {filename: os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} self._output.rewrite_line("Requesting upload permissions...") - urls = self._get_json(url, data=filesizes) + urls = self._get_file_to_url_dict(url, data=filesizes) self._output.rewrite_line("Requesting upload permissions...Done!") self._output.writeln("") self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) @@ -388,6 +388,20 @@ class RestApiClient(object): json=payload) return response + def _complete_url(self, url): + """ Ensures that an url is absolute by completing relative urls with + the remote url. urls that are already absolute are not modified. + """ + if bool(urlparse(url).netloc): + return url + return urljoin(self.remote_url, url) + + def _get_file_to_url_dict(self, url, data=None): + """Call to url and decode the json returning a dict of {filepath: url} dict + converting the url to a complete url when needed""" + urls = self._get_json(url, data=data) + return {filepath: self._complete_url(url) for filepath, url in urls.items()} + def _get_json(self, url, data=None): t1 = time.time() headers = self.custom_headers @@ -509,7 +523,7 @@ class RestApiClient(object): "/".join(conan_reference), package_id) try: - urls = self._get_json(url) + urls = self._get_file_to_url_dict(url) except NotFoundException: if package_id: raise NotFoundException("Package %s:%s not found" % (conan_reference, package_id)) diff --git a/conans/client/tools/pkg_config.py b/conans/client/tools/pkg_config.py index 2e56ac4bc..00ec7e0b8 100644 --- a/conans/client/tools/pkg_config.py +++ b/conans/client/tools/pkg_config.py @@ -2,7 +2,6 @@ # -*- coding: utf-8 -*- import subprocess -from conans.errors import ConanException class PkgConfig(object): @@ -36,13 +35,10 @@ class PkgConfig(object): if self.define_variables: for name, value in self.define_variables.items(): command.append('--define-variable=%s=%s' % (name, value)) - try: - return self._cmd_output(command) - except subprocess.CalledProcessError as e: - raise ConanException('pkg-config command %s failed with error: %s' % (command, e)) + return self._cmd_output(command) def _get_option(self, option): - if option not in self.info: + if not option in self.info: self.info[option] = self._parse_output(option).split() return self.info[option] diff --git a/conans/server/conf/__init__.py b/conans/server/conf/__init__.py index 03114d6d8..1e2a88a63 100644 --- a/conans/server/conf/__init__.py +++ b/conans/server/conf/__init__.py @@ -49,7 +49,9 @@ class ConanServerConfigParser(ConfigParser): "users": get_env("CONAN_SERVER_USERS", None, environment)} def _get_file_conf(self, section, varname=None): - """Gets the section from config file or raises an exception""" + """ Gets the section or variable from config file. + If the queried element is not found an exception is raised. + """ try: if not os.path.exists(self.config_filename): jwt_random_secret = ''.join(random.choice(string.ascii_letters) for _ in range(24)) @@ -71,7 +73,7 @@ class ConanServerConfigParser(ConfigParser): return section[varname] else: return self.items(section) - except NoSectionError as exc: + except NoSectionError: raise ConanException("No section '%s' found" % section) except Exception as exc: logger.debug(exc) @@ -80,51 +82,55 @@ class ConanServerConfigParser(ConfigParser): @property def ssl_enabled(self): - if self.env_config["ssl_enabled"]: - return self.env_config["ssl_enabled"] == "true" or \ - self.env_config["ssl_enabled"] == "1" - else: - return self._get_file_conf("server", "ssl_enabled").lower() == "true" or \ - self._get_file_conf("server", "ssl_enabled").lower() == "1" + try: + ssl_enabled = self._get_conf_server_string("ssl_enabled").lower() + return ssl_enabled == "true" or ssl_enabled == "1" + except ConanException: + return None @property def port(self): - if self.env_config["port"]: - return int(self.env_config["port"]) - else: - return int(self._get_file_conf("server", "port")) + return int(self._get_conf_server_string("port")) @property def public_port(self): - if self.env_config["public_port"]: - return int(self.env_config["public_port"]) - elif self._get_file_conf("server", "public_port"): - return int(self._get_file_conf("server", "public_port")) - else: + try: + return int(self._get_conf_server_string("public_port")) + except ConanException: return self.port @property def host_name(self): - return self._get_conf_server_string("host_name") + try: + return self._get_conf_server_string("host_name") + except ConanException: + return None @property def public_url(self): - protocol = "https" if self.ssl_enabled else "http" - port = ":%s" % self.public_port if self.public_port != 80 else "" - return "%s://%s%s/v1" % (protocol, self.host_name, port) + host_name = self.host_name + ssl_enabled = self.ssl_enabled + protocol_version = "v1" + if host_name is None and ssl_enabled is None: + # No hostname and ssl config means that the transfer and the + # logical endpoint are the same and a relative URL is sufficient + return protocol_version + elif host_name is None or ssl_enabled is None: + raise ConanException("'host_name' and 'ssl_enable' have to be defined together.") + else: + protocol = "https" if ssl_enabled else "http" + port = ":%s" % self.public_port if self.public_port != 80 else "" + return "%s://%s%s/%s" % (protocol, host_name, port, protocol_version) @property def disk_storage_path(self): """If adapter is disk, means the directory for storage""" - if self.env_config["disk_storage_path"]: - ret = self.env_config["disk_storage_path"] - else: - try: - ret = conan_expand_user(self._get_file_conf("server", "disk_storage_path")) - except ConanException: - # If storage_path is not defined in file, use the current dir - # So tests use test folder instead of user/.conan_server - ret = os.path.dirname(self.config_filename) + try: + ret = conan_expand_user(self._get_conf_server_string("disk_storage_path")) + except ConanException: + # If storage_path is not defined, use the current dir + # So tests use test folder instead of user/.conan_server + ret = os.path.dirname(self.config_filename) ret = os.path.normpath(ret) # Convert to O.S paths mkdir(ret) return ret @@ -170,45 +176,45 @@ class ConanServerConfigParser(ConfigParser): @property def jwt_secret(self): - tmp = self._get_conf_server_string("jwt_secret") - if not tmp: + try: + return self._get_conf_server_string("jwt_secret") + except ConanException: raise ConanException("'jwt_secret' setting is needed. Please, write a value " "in server.conf or set CONAN_JWT_SECRET env value.") - return tmp + @property def updown_secret(self): - tmp = self._get_conf_server_string("updown_secret") - if not tmp: + try: + return self._get_conf_server_string("updown_secret") + except ConanException: raise ConanException("'updown_secret' setting is needed. Please, write a value " "in server.conf or set CONAN_UPDOWN_SECRET env value.") - return self._get_conf_server_string("updown_secret") @property def store_adapter(self): return self._get_conf_server_string("store_adapter") def _get_conf_server_string(self, keyname): + """ Gets the value of a server config value either from the environment + or the config file. Values from the environment have priority. If the + value is not defined or empty an exception is raised. + """ if self.env_config[keyname]: return self.env_config[keyname] - else: - return self._get_file_conf("server", keyname) + + value = self._get_file_conf("server", keyname) + if value == "": + raise ConanException("no value for 'server.%s' is defined in the config file" % keyname) + return value @property def authorize_timeout(self): - if self.env_config["authorize_timeout"]: - return timedelta(seconds=int(self.env_config["authorize_timeout"])) - else: - tmp = self._get_file_conf("server", "authorize_timeout") - return timedelta(seconds=int(tmp)) + return timedelta(seconds=int(self._get_conf_server_string("authorize_timeout"))) @property def jwt_expire_time(self): - if self.env_config["jwt_expire_minutes"]: - return timedelta(minutes=int(self.env_config["jwt_expire_minutes"])) - else: - tmp = float(self._get_file_conf("server", "jwt_expire_minutes")) - return timedelta(minutes=tmp) + return timedelta(minutes=float(self._get_conf_server_string("jwt_expire_minutes"))) def get_file_manager(config, public_url=None, updown_auth_manager=None):
Conan server can't be setup in the subdirectory of a domain I want to run Conan at, say, `https://example.com/conan` behind Apache. Currently, there is no way to do this. If I set `host_name` to `example.com/conan` and `port` to `443`, the Conan client will try to upload to `https://example.com/conan:443/v1/...`
conan-io/conan
diff --git a/conans/test/functional/conanfile_loader_test.py b/conans/test/functional/conanfile_loader_test.py index b4152992b..b2c8fbb51 100644 --- a/conans/test/functional/conanfile_loader_test.py +++ b/conans/test/functional/conanfile_loader_test.py @@ -97,8 +97,6 @@ OpenCV2:other_option=Cosa # file_content = '''[requires] OpenCV/2.4.10@phil/stable OpenCV2/2.4.10@phil/stable -[build_requires] -MyPkg/1.0.0@phil/stable [generators] one two @@ -123,11 +121,8 @@ OpenCV2:other_option=Cosa""") requirements = Requirements() requirements.add("OpenCV/2.4.10@phil/stable") requirements.add("OpenCV2/2.4.10@phil/stable") - build_requirements = [] - build_requirements.append("MyPkg/1.0.0@phil/stable") self.assertEquals(ret.requires, requirements) - self.assertEquals(ret.build_requires, build_requirements) self.assertEquals(ret.generators, ["one", "two"]) self.assertEquals(ret.options.values.dumps(), options1.dumps()) @@ -167,7 +162,6 @@ OpenCV/bin, * -> ./bin # I need this binaries OpenCV/lib, * -> ./lib @ root_package=Pkg OpenCV/data, * -> ./data @ root_package=Pkg, folder=True # Irrelevant docs, * -> ./docs @ root_package=Pkg, folder=True, ignore_case=True, excludes="a b c" # Other -licenses, * -> ./licenses @ root_package=Pkg, folder=True, ignore_case=True, excludes="a b c", keep_path=False # Other ''' tmp_dir = temp_folder() file_path = os.path.join(tmp_dir, "file.txt") @@ -177,12 +171,10 @@ licenses, * -> ./licenses @ root_package=Pkg, folder=True, ignore_case=True, exc ret.copy = Mock() ret.imports() - expected = [call(u'*', u'./bin', u'OpenCV/bin', None, False, False, None, True), - call(u'*', u'./lib', u'OpenCV/lib', u'Pkg', False, False, None, True), - call(u'*', u'./data', u'OpenCV/data', u'Pkg', True, False, None, True), - call(u'*', u'./docs', u'docs', u'Pkg', True, True, [u'"a', u'b', u'c"'], True), - call(u'*', u'./licenses', u'licenses', u'Pkg', True, True, [u'"a', u'b', u'c"'], - False)] + expected = [call(u'*', u'./bin', u'OpenCV/bin', None, False, False, None), + call(u'*', u'./lib', u'OpenCV/lib', u'Pkg', False, False, None), + call(u'*', u'./data', u'OpenCV/data', u'Pkg', True, False, None), + call(u'*', u'./docs', u'docs', u'Pkg', True, True, [u'"a', u'b', u'c"'])] self.assertEqual(ret.copy.call_args_list, expected) def test_package_settings(self): diff --git a/conans/test/functional/imports_tests.py b/conans/test/functional/imports_tests.py index 8ab8dbd57..5d93ee436 100644 --- a/conans/test/functional/imports_tests.py +++ b/conans/test/functional/imports_tests.py @@ -15,11 +15,6 @@ class TestConan(ConanFile): self.copy("*") """ -conanfile_txt = """ -[requires] -%s -""" - class ImportTest(unittest.TestCase): def _set_up(self): @@ -122,45 +117,3 @@ bin, *.dll -> @ excludes=Foo/*.dll Baz/*.dll self.assertTrue(os.path.exists(os.path.join(client.current_folder, "a.dll"))) self.assertFalse(os.path.exists(os.path.join(client.current_folder, "Foo/b.dll"))) self.assertFalse(os.path.exists(os.path.join(client.current_folder, "Baz/b.dll"))) - - def imports_keep_path_test(self): - client = TestClient() - client.save({"conanfile.py": conanfile % "LibC", - "path/to/license.txt": "LicenseC"}, clean_first=True) - client.run("export . lasote/testing") - - # keep_path = True AND conanfile.py - testconanfile = conanfile % "LibD" + " requires='LibC/0.1@lasote/testing'" - testconanfile += """ - def imports(self): - self.copy("*license*", dst="licenses", keep_path=True) -""" - client.save({"conanfile.py": testconanfile}, clean_first=True) - client.run("install . --build=missing") - self.assertTrue(os.path.exists(os.path.join(client.current_folder, - "licenses/path/to/license.txt"))) - - # keep_path = False AND conanfile.py - testconanfile = testconanfile.replace("keep_path=True", "keep_path=False") - client.save({"conanfile.py": testconanfile}, clean_first=True) - client.run("install . --build=missing") - self.assertTrue(os.path.exists(os.path.join(client.current_folder, - "licenses/license.txt"))) - - # keep_path = True AND conanfile.txt - testconanfile = conanfile_txt % "LibC/0.1@lasote/testing" - testconanfile += """ -[imports]: -., *license* -> ./licenses @ keep_path=True -""" - client.save({"conanfile.txt": testconanfile}, clean_first=True) - client.run("install . --build=missing") - self.assertTrue(os.path.exists(os.path.join(client.current_folder, - "licenses/path/to/license.txt"))) - - # keep_path = False AND conanfile.txt - testconanfile = testconanfile.replace("keep_path=True", "keep_path=False") - client.save({"conanfile.txt": testconanfile}, clean_first=True) - client.run("install . --build=missing") - self.assertTrue(os.path.exists(os.path.join(client.current_folder, "licenses"))) - self.assertTrue(os.path.exists(os.path.join(client.current_folder, "licenses/license.txt"))) diff --git a/conans/test/integration/build_requires_test.py b/conans/test/integration/build_requires_test.py index e15776a04..a3eb80275 100644 --- a/conans/test/integration/build_requires_test.py +++ b/conans/test/integration/build_requires_test.py @@ -4,7 +4,6 @@ from nose_parameterized.parameterized import parameterized from conans.test.utils.tools import TestClient from conans.paths import CONANFILE import os -from conans.util.files import load tool_conanfile = """from conans import ConanFile @@ -51,24 +50,6 @@ nonexistingpattern*: SomeTool/1.2@user/channel class BuildRequiresTest(unittest.TestCase): - def test_dependents_txt(self): - client = TestClient() - boost = """from conans import ConanFile -class Boost(ConanFile): - def package_info(self): - self.env_info.PATH.append("myboostpath") -""" - client.save({CONANFILE: boost}) - client.run("create . Boost/1.0@user/channel") - other = """[build_requires] -Boost/1.0@user/channel -""" - client.save({"conanfile.txt": other}, clean_first=True) - client.run("install .") - self.assertIn("PROJECT: Build requires: [Boost/1.0@user/channel]", client.out) - conanbuildinfo = load(os.path.join(client.current_folder, "conanbuildinfo.txt")) - self.assertIn('PATH=["myboostpath"]', conanbuildinfo) - def test_dependents(self): client = TestClient() boost = """from conans import ConanFile diff --git a/conans/test/integration/complete_test.py b/conans/test/integration/complete_test.py index cb8e90bfb..153f58fcb 100644 --- a/conans/test/integration/complete_test.py +++ b/conans/test/integration/complete_test.py @@ -1,4 +1,7 @@ import unittest + +from nose_parameterized import parameterized + from conans.test.utils.tools import TestServer, TestClient from conans.model.ref import ConanFileReference, PackageReference import os @@ -11,12 +14,12 @@ from conans.test.utils.test_files import uncompress_packaged_files @attr("slow") class CompleteFlowTest(unittest.TestCase): - def setUp(self): - test_server = TestServer() + @parameterized.expand([(True, ), (False, )]) + def reuse_test(self, complete_urls): + test_server = TestServer(complete_urls=complete_urls) self.servers = {"default": test_server} self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) - def reuse_test(self): conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True) self.client.save(files) @@ -80,7 +83,6 @@ class CompleteFlowTest(unittest.TestCase): self._assert_library_exists(ref, other_conan.paths) client3 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) - conan_reference = ConanFileReference.loads("Hello1/0.2@lasote/stable") files3 = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) client3.save(files3) client3.run('install .') diff --git a/conans/test/remote/rest_api_test.py b/conans/test/remote/rest_api_test.py index 1973ddfe7..d77975995 100644 --- a/conans/test/remote/rest_api_test.py +++ b/conans/test/remote/rest_api_test.py @@ -49,6 +49,37 @@ class RestApiTest(unittest.TestCase): def tearDown(self): RestApiTest.server.clean() + def relative_url_completion_test(self): + api = RestApiClient(TestBufferConanOutput(), requester=requests) + + # test absolute urls + self.assertEquals(api._complete_url("http://host"), "http://host") + self.assertEquals(api._complete_url("http://host:1234"), "http://host:1234") + self.assertEquals(api._complete_url("https://host"), "https://host") + self.assertEquals(api._complete_url("https://host:1234"), "https://host:1234") + + # test relative urls + api.remote_url = "http://host" + self.assertEquals(api._complete_url("v1/path_to_file.txt"), + "http://host/v1/path_to_file.txt") + + api.remote_url = "http://host:1234" + self.assertEquals(api._complete_url("v1/path_to_file.txt"), + "http://host:1234/v1/path_to_file.txt") + + api.remote_url = "https://host" + self.assertEquals(api._complete_url("v1/path_to_file.txt"), + "https://host/v1/path_to_file.txt") + + api.remote_url = "https://host:1234" + self.assertEquals(api._complete_url("v1/path_to_file.txt"), + "https://host:1234/v1/path_to_file.txt") + + # test relative urls with subdirectory + api.remote_url = "https://host:1234/subdir/" + self.assertEquals(api._complete_url("v1/path_to_file.txt"), + "https://host:1234/subdir/v1/path_to_file.txt") + def server_info_test(self): check, version, capabilities = self.api.server_info() self.assertEquals(version, "0.16.0") diff --git a/conans/test/server/conan_server_config_parser_test.py b/conans/test/server/conan_server_config_parser_test.py index feaba8ac0..0b55a82aa 100644 --- a/conans/test/server/conan_server_config_parser_test.py +++ b/conans/test/server/conan_server_config_parser_test.py @@ -53,3 +53,20 @@ demo: %s server_config = ConanServerConfigParser(tmp_dir, environment={"CONAN_SERVER_USERS": "demo:manolito!@"}) self.assertEquals(server_config.users, {"demo": "manolito!@"}) + + def test_relative_public_url(self): + tmp_dir = temp_folder() + server_conf = """ +[server] + +[write_permissions] + +[users] + """ + server_dir = os.path.join(tmp_dir, ".conan_server") + mkdir(server_dir) + conf_path = os.path.join(server_dir, "server.conf") + save(conf_path, server_conf) + + server_config = ConanServerConfigParser(tmp_dir) + self.assertEquals(server_config.public_url, "v1") diff --git a/conans/test/server/conf_test.py b/conans/test/server/conf_test.py index 3cf050945..f52c48736 100644 --- a/conans/test/server/conf_test.py +++ b/conans/test/server/conf_test.py @@ -16,6 +16,8 @@ jwt_expire_minutes: 121 disk_storage_path: %s ssl_enabled: true port: 9220 +host_name: localhost +public_port: 12345 [write_permissions] @@ -80,6 +82,9 @@ text=value self.assertEquals(config.read_permissions, [("*/*@*/*", "*"), ("openssl/2.0.1@lasote/testing", "pepe")]) self.assertEquals(config.users, {"lasote": "defaultpass", "pepe": "pepepass"}) + self.assertEquals(config.host_name, "localhost") + self.assertEquals(config.public_port, 12345) + self.assertEquals(config.public_url, "https://localhost:12345/v1") # Now check with environments tmp_storage = temp_folder() @@ -89,6 +94,8 @@ text=value self.environ["CONAN_SSL_ENABLED"] = "False" self.environ["CONAN_SERVER_PORT"] = "1233" self.environ["CONAN_SERVER_USERS"] = "lasote:lasotepass,pepe2:pepepass2" + self.environ["CONAN_HOST_NAME"] = "remotehost" + self.environ["CONAN_SERVER_PUBLIC_PORT"] = "33333" config = ConanServerConfigParser(self.file_path, environment=self.environ) self.assertEquals(config.jwt_secret, "newkey") @@ -100,3 +107,6 @@ text=value self.assertEquals(config.read_permissions, [("*/*@*/*", "*"), ("openssl/2.0.1@lasote/testing", "pepe")]) self.assertEquals(config.users, {"lasote": "lasotepass", "pepe2": "pepepass2"}) + self.assertEquals(config.host_name, "remotehost") + self.assertEquals(config.public_port, 33333) + self.assertEquals(config.public_url, "http://remotehost:33333/v1") diff --git a/conans/test/util/pkg_config_test.py b/conans/test/util/pkg_config_test.py index ea0b5d578..3f51ffbb2 100644 --- a/conans/test/util/pkg_config_test.py +++ b/conans/test/util/pkg_config_test.py @@ -7,7 +7,6 @@ import os from nose.plugins.attrib import attr from conans.tools import PkgConfig, environment_append from conans.test.utils.test_files import temp_folder -from conans.errors import ConanException libastral_pc = """ PC FILE EXAMPLE: @@ -27,13 +26,6 @@ Cflags: -I${includedir}/libastral -D_USE_LIBASTRAL @attr("unix") class PkgConfigTest(unittest.TestCase): - def test_negative(self): - if platform.system() == "Windows": - return - pc = PkgConfig('libsomething_that_does_not_exist_in_the_world') - with self.assertRaises(ConanException): - pc.libs() - def test_pc(self): if platform.system() == "Windows": return diff --git a/conans/test/utils/tools.py b/conans/test/utils/tools.py index b8a1cb3b8..035e99488 100644 --- a/conans/test/utils/tools.py +++ b/conans/test/utils/tools.py @@ -175,7 +175,7 @@ class TestServer(object): write_permissions=None, users=None, plugins=None, base_path=None, server_version=Version(SERVER_VERSION), min_client_compatible_version=Version(MIN_CLIENT_COMPATIBLE_VERSION), - server_capabilities=None): + server_capabilities=None, complete_urls=False): """ 'read_permissions' and 'write_permissions' is a list of: [("opencv/2.3.4@lasote/testing", "user1, user2")] @@ -195,9 +195,10 @@ class TestServer(object): self.fake_url = "http://fake%s.com" % str(uuid.uuid4()).replace("-", "") min_client_ver = min_client_compatible_version + base_url = "%s/v1" % self.fake_url if complete_urls else "v1" self.test_server = TestServerLauncher(base_path, read_permissions, write_permissions, users, - base_url=self.fake_url + "/v1", + base_url=base_url, plugins=plugins, server_version=server_version, min_client_compatible_version=min_client_ver,
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 10 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "conans/requirements.txt", "conans/requirements_dev.txt", "conans/requirements_server.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==1.6.6 attrs==22.2.0 beautifulsoup4==4.12.3 bottle==0.12.25 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 colorama==0.3.9 -e git+https://github.com/conan-io/conan.git@869f10e260b727162b4b502f2af46525327ea063#egg=conan coverage==4.2 distro==1.1.0 fasteners==0.19 future==0.16.0 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==1.3.0 node-semver==0.2.0 nose==1.3.7 nose-parameterized==0.5.0 packaging==21.3 patch==1.16 pbr==6.1.1 pluggy==1.0.0 pluginbase==0.7 py==1.11.0 Pygments==2.14.0 PyJWT==1.7.1 pylint==1.8.4 pyparsing==3.1.4 pytest==7.0.1 PyYAML==3.12 requests==2.27.1 six==1.17.0 soupsieve==2.3.2.post1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 waitress==2.0.0 WebOb==1.8.9 WebTest==2.0.35 wrapt==1.16.0 zipp==3.6.0
name: conan channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==1.6.6 - attrs==22.2.0 - beautifulsoup4==4.12.3 - bottle==0.12.25 - charset-normalizer==2.0.12 - codecov==2.1.13 - colorama==0.3.9 - coverage==4.2 - distro==1.1.0 - fasteners==0.19 - future==0.16.0 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==1.3.0 - node-semver==0.2.0 - nose==1.3.7 - nose-parameterized==0.5.0 - packaging==21.3 - patch==1.16 - pbr==6.1.1 - pluggy==1.0.0 - pluginbase==0.7 - py==1.11.0 - pygments==2.14.0 - pyjwt==1.7.1 - pylint==1.8.4 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==3.12 - requests==2.27.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - waitress==2.0.0 - webob==1.8.9 - webtest==2.0.35 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/conan
[ "conans/test/server/conan_server_config_parser_test.py::ServerConfigParseTest::test_relative_public_url" ]
[ "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_build_requires_0__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_build_requires_1__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_0_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_build_requires_2__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_def_build_requirements_self_self_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_build_requires_3__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_2_user_channel_def_build_requirements_self_self_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_dependents", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_profile_order", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_profile_override_0__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_profile_override_1__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_0_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_profile_override_2__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_def_build_requirements_self_self_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_profile_override_3__import_os_from_conans_import_ConanFile_tools_class_MyLib_ConanFile_name_MyLib_version_0_1_build_requires_Tool_0_2_user_channel_def_build_requirements_self_self_build_requires_Tool_0_1_lasote_stable_def_build_self_self_output_info_ToolPath_s_os_getenv_TOOL_PATH_", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_require_itself", "conans/test/integration/build_requires_test.py::BuildRequiresTest::test_transitive", "conans/test/util/pkg_config_test.py::PkgConfigTest::test_define_prefix", "conans/test/util/pkg_config_test.py::PkgConfigTest::test_pc" ]
[ "conans/test/functional/conanfile_loader_test.py::ConanLoaderTest::test_package_settings", "conans/test/server/conan_server_config_parser_test.py::ServerConfigParseTest::test_not_allowed_encoding_password", "conans/test/server/conf_test.py::ServerConfTest::test_unexpected_section", "conans/test/server/conf_test.py::ServerConfTest::test_values" ]
[]
MIT License
2,186
[ "conans/client/loader_parse.py", "conans/client/command.py", "conans/client/rest/rest_client.py", "conans/client/client_cache.py", "conans/client/remote_registry.py", "conans/client/importer.py", "conans/client/tools/pkg_config.py", "conans/client/build_requires.py", "conans/client/loader.py", "conans/server/conf/__init__.py" ]
[ "conans/client/loader_parse.py", "conans/client/command.py", "conans/client/rest/rest_client.py", "conans/client/client_cache.py", "conans/client/remote_registry.py", "conans/client/importer.py", "conans/client/tools/pkg_config.py", "conans/client/build_requires.py", "conans/client/loader.py", "conans/server/conf/__init__.py" ]
dropbox__stone-72
aee647260f103dcb78a3b6af43d6000a9cbd8eaa
2018-02-20 21:13:54
aee647260f103dcb78a3b6af43d6000a9cbd8eaa
diff --git a/stone/backends/python_type_stubs.py b/stone/backends/python_type_stubs.py index 6a0c6f0..c087c79 100644 --- a/stone/backends/python_type_stubs.py +++ b/stone/backends/python_type_stubs.py @@ -45,6 +45,7 @@ def emit_pass_if_nothing_emitted(codegen): ending_lineno = codegen.lineno if starting_lineno == ending_lineno: codegen.emit("pass") + codegen.emit() class ImportTracker(object): def __init__(self): @@ -135,6 +136,7 @@ class PythonTypeStubsBackend(CodeBackend): for alias in namespace.linearize_aliases(): self._generate_alias_definition(namespace, alias) + self._generate_routes(namespace) self._generate_imports_needed_for_typing() def _generate_imports_for_referenced_namespaces(self, namespace): @@ -152,8 +154,17 @@ class PythonTypeStubsBackend(CodeBackend): with self.indent(): self._generate_struct_class_init(ns, data_type) self._generate_struct_class_properties(ns, data_type) + + self._generate_validator_for(data_type) self.emit() + def _generate_validator_for(self, data_type): + # type: (DataType) -> None + cls_name = class_name_for_data_type(data_type) + self.emit("{}_validator = ... # type: stone_validators.Validator".format( + cls_name + )) + def _generate_union_class(self, ns, data_type): # type: (ApiNamespace, Union) -> None self.emit(self._class_declaration_for_type(ns, data_type)) @@ -162,6 +173,8 @@ class PythonTypeStubsBackend(CodeBackend): self._generate_union_class_is_set(data_type) self._generate_union_class_variant_creators(ns, data_type) self._generate_union_class_get_helpers(ns, data_type) + + self._generate_validator_for(data_type) self.emit() def _generate_union_class_vars(self, ns, data_type): @@ -356,6 +369,22 @@ class PythonTypeStubsBackend(CodeBackend): return map_stone_type_to_python_type(ns, data_type, override_dict=self._pep_484_type_mapping_callbacks) + def _generate_routes( + self, + namespace, # type: ApiNamespace + ): + # type: (...) -> None + for route in namespace.routes: + var_name = fmt_func(route.name) + self.emit( + "{var_name} = ... # type: bb.Route".format( + var_name=var_name + ) + ) + + if namespace.routes: + self.emit() + def _generate_imports_needed_for_typing(self): # type: () -> None if self.import_tracker.cur_namespace_typing_imports: diff --git a/stone/ir/api.py b/stone/ir/api.py index 2ee40e6..703961e 100644 --- a/stone/ir/api.py +++ b/stone/ir/api.py @@ -104,7 +104,7 @@ class ApiNamespace(object): self.name = name self.doc = None # type: typing.Optional[six.text_type] self.routes = [] # type: typing.List[ApiRoute] - self.route_by_name = {} # type: typing.Dict[str, ApiRoute] + self.route_by_name = {} # type: typing.Dict[typing.Text, ApiRoute] self.data_types = [] # type: typing.List[UserDefined] self.data_type_by_name = {} # type: typing.Dict[str, UserDefined] self.aliases = [] # type: typing.List[Alias] @@ -315,7 +315,7 @@ class ApiRoute(object): def __init__(self, name, ast_node): - # type: (str, AstRouteDef) -> None + # type: (typing.Text, AstRouteDef) -> None """ :param str name: Designated name of the endpoint. :param ast_node: Raw route definition from the parser.
python_type_stubs generator should output _validator and route objects. This was an oversight on my original implementation of `python_type_stubs` - after integrating it into our codebase I've seen a few mypy errors along the lines of ``` has no attribute 'SomeStoneType_validator ``` and ``` Module has no attribute "name_of_a_route" ``` I might work on it this weekend, just wanted to track as an issue
dropbox/stone
diff --git a/test/test_python_type_stubs.py b/test/test_python_type_stubs.py index dd64bc5..e4423fb 100644 --- a/test/test_python_type_stubs.py +++ b/test/test_python_type_stubs.py @@ -1,6 +1,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import textwrap + MYPY = False if MYPY: import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression @@ -30,6 +31,7 @@ from stone.ir import ( UnionField, Void, Float64) +from stone.ir.api import ApiRoute from stone.backends.python_type_stubs import PythonTypeStubsBackend from test.backend_test_util import _mock_emit @@ -167,6 +169,22 @@ def _make_namespace_with_empty_union(): return ns +def _make_namespace_with_route(): + # type: (...) -> ApiNamespace + ns = ApiNamespace("_make_namespace_with_route()") + mock_ast_node = Mock() + route_one = ApiRoute( + name="route_one", + ast_node=mock_ast_node, + ) + route_two = ApiRoute( + name="route_two", + ast_node=mock_ast_node, + ) + ns.add_route(route_one) + ns.add_route(route_two) + return ns + def _api(): api = Api(version="1.0") return api @@ -219,7 +237,8 @@ class TestPythonTypeStubs(unittest.TestCase): @f1.deleter def f1(self) -> None: ... - + + Struct1_validator = ... # type: stone_validators.Validator class Struct2(object): def __init__(self, @@ -256,6 +275,7 @@ class TestPythonTypeStubs(unittest.TestCase): @f4.deleter def f4(self) -> None: ... + Struct2_validator = ... # type: stone_validators.Validator from typing import ( @@ -298,6 +318,7 @@ class TestPythonTypeStubs(unittest.TestCase): @nullable_list.deleter def nullable_list(self) -> None: ... + NestedTypes_validator = ... # type: stone_validators.Validator from typing import ( @@ -322,6 +343,7 @@ class TestPythonTypeStubs(unittest.TestCase): def is_last(self) -> bool: ... + Union_validator = ... # type: stone_validators.Validator class Shape(bb.Union): point = ... # type: Shape @@ -335,7 +357,8 @@ class TestPythonTypeStubs(unittest.TestCase): def get_circle(self) -> float: ... - + Shape_validator = ... # type: stone_validators.Validator + """).format(headers=_headers) self.assertEqual(result, expected) @@ -348,6 +371,21 @@ class TestPythonTypeStubs(unittest.TestCase): class EmptyUnion(bb.Union): pass + + EmptyUnion_validator = ... # type: stone_validators.Validator + + """).format(headers=_headers) + self.assertEqual(result, expected) + + def test__generate_routes(self): + # type: () -> None + ns = _make_namespace_with_route() + result = self._evaluate_namespace(ns) + expected = textwrap.dedent("""\ + {headers} + + route_one = ... # type: bb.Route + route_two = ... # type: bb.Route """).format(headers=_headers) self.assertEqual(result, expected) @@ -372,6 +410,7 @@ class TestPythonTypeStubs(unittest.TestCase): @f1.deleter def f1(self) -> None: ... + Struct1_validator = ... # type: stone_validators.Validator AliasToStruct1 = Struct1 """).format(headers=_headers)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock" ], "pre_install": [], "python": "3.6", "reqs_path": [ "test/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 packaging==21.3 pluggy==1.0.0 ply==3.11 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 six==1.17.0 -e git+https://github.com/dropbox/stone.git@aee647260f103dcb78a3b6af43d6000a9cbd8eaa#egg=stone tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: stone channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - ply==3.11 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/stone
[ "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_base_namespace_module__with_alias", "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_base_namespace_module__with_many_structs", "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_base_namespace_module__with_nested_types", "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_base_namespace_module_with_empty_union__generates_pass", "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_base_namespace_module_with_union__generates_stuff", "test/test_python_type_stubs.py::TestPythonTypeStubs::test__generate_routes" ]
[]
[]
[]
MIT License
2,187
[ "stone/backends/python_type_stubs.py", "stone/ir/api.py" ]
[ "stone/backends/python_type_stubs.py", "stone/ir/api.py" ]
grabbles__grabbit-49
c1a811a2a41153afec970f73923c0a53c66ef694
2018-02-20 22:36:27
5a588731d1a4a42a6b67f09ede110d7770845ed0
diff --git a/grabbit/__init__.py b/grabbit/__init__.py index 35f4778..4d08018 100644 --- a/grabbit/__init__.py +++ b/grabbit/__init__.py @@ -1,10 +1,12 @@ -from .core import File, Entity, Layout, merge_layouts +from .core import File, Entity, Layout, Tag, Domain, merge_layouts from .extensions import (replace_entities, build_path, write_contents_to_file) __all__ = [ 'File', 'Entity', 'Layout', + 'Tag', + 'Domain', 'replace_entities', 'build_path', 'write_contents_to_file', diff --git a/grabbit/core.py b/grabbit/core.py index cc828e0..7767ea6 100644 --- a/grabbit/core.py +++ b/grabbit/core.py @@ -5,9 +5,10 @@ from collections import defaultdict, OrderedDict, namedtuple from grabbit.external import six, inflect from grabbit.utils import natural_sort, listify from grabbit.extensions.writable import build_path, write_contents_to_file -from os.path import join, basename, dirname, abspath, split +from os.path import (join, basename, dirname, abspath, split, isabs, exists) from functools import partial from copy import deepcopy +import warnings __all__ = ['File', 'Entity', 'Layout'] @@ -22,15 +23,26 @@ class File(object): self.path = filename self.filename = basename(self.path) self.dirname = dirname(self.path) - self.entities = {} + self.tags = {} + + @property + def entities(self): + return {k: v.value for k, v in self.tags.items()} + + @property + def domains(self): + return tuple(set([t.entity.domain.name for t in self.tags.values()])) - def _matches(self, entities=None, extensions=None, regex_search=False): + def _matches(self, entities=None, extensions=None, domains=None, + regex_search=False): """ Checks whether the file matches all of the passed entities and extensions. + Args: entities (dict): A dictionary of entity names -> regex patterns. extensions (str, list): One or more file extensions to allow. + domains (str, list): One or more domains the file must match. regex_search (bool): Whether to require exact match (False) or regex search (True) when comparing the query string to each entity. @@ -44,11 +56,16 @@ class File(object): if re.search(extensions, self.path) is None: return False + if domains is not None: + domains = listify(domains) + if not set(self.domains) & set(domains): + return False + if entities is not None: for name, val in entities.items(): - if name not in self.entities: + if name not in self.tags: return False def make_patt(x): @@ -64,7 +81,7 @@ class File(object): ent_patts = [make_patt(x) for x in listify(val)] patt = '|'.join(ent_patts) - if re.search(patt, str(self.entities[name])) is None: + if re.search(patt, str(self.tags[name].value)) is None: return False return True @@ -73,9 +90,9 @@ class File(object): Returns the File as a named tuple. The full path plus all entity key/value pairs are returned as attributes. """ - _File = namedtuple('File', 'filename ' + - ' '.join(self.entities.keys())) - return _File(filename=self.path, **self.entities) + entities = self.entities + _File = namedtuple('File', 'filename ' + ' '.join(entities.keys())) + return _File(filename=self.path, **entities) def copy(self, path_patterns, symbolic_link=False, root=None, conflicts='fail'): @@ -102,12 +119,45 @@ class File(object): conflicts=conflicts) +class Domain(object): + + def __init__(self, name, config, root): + + self.name = name + self.config = config + self.root = root + self.entities = {} + self.files = [] + self.filtering_regex = {} + self.path_patterns = [] + + if 'index' in config: + self.filtering_regex = config['index'] + if self.filtering_regex.get('include') and \ + self.filtering_regex.get('exclude'): + raise ValueError("You can only define either include or " + "exclude regex, not both.") + + if 'default_path_patterns' in config: + self.path_patterns += listify(config['default_path_patterns']) + + def add_entity(self, ent): + self.entities[ent.name] = ent + + def add_file(self, file): + self.files.append(file) + + +Tag = namedtuple('Tag', ['entity', 'value']) + + class Entity(object): - def __init__(self, name, pattern=None, mandatory=False, directory=None, - map_func=None, **kwargs): + def __init__(self, name, pattern=None, domain=None, mandatory=False, + directory=None, map_func=None, **kwargs): """ Represents a single entity defined in the JSON config. + Args: name (str): The name of the entity (e.g., 'subject', 'run', etc.) pattern (str): A regex pattern used to match against file names. @@ -119,6 +169,7 @@ class Entity(object): map_func (callable): Optional callable used to extract the Entity's value from the passed string (instead of trying to match on the defined .pattern). + domain (Domain): The Domain the Entity belongs to. kwargs (dict): Additional keyword arguments. """ if pattern is None and map_func is None: @@ -128,12 +179,14 @@ class Entity(object): "set." % name) self.name = name self.pattern = pattern + self.domain = domain self.mandatory = mandatory self.directory = directory self.map_func = map_func self.files = {} self.regex = re.compile(pattern) if pattern is not None else None self.kwargs = kwargs + self.id = '.'.join([getattr(domain, 'name', ''), name]) def __iter__(self): for i in self.unique(): @@ -150,20 +203,29 @@ class Entity(object): setattr(result, k, new_val) return result - def matches(self, f): + def matches(self, f, update_file=False): """ Determine whether the passed file matches the Entity and update the Entity/File mappings. + Args: f (File): The File instance to match against. + update_file (bool): If True, the file's tag list is updated to + include the current Entity. """ if self.map_func is not None: - f.entities[self.name] = self.map_func(f) + val = self.map_func(f) else: m = self.regex.search(f.path) - if m is not None: - val = m.group(1) - f.entities[self.name] = val + val = m.group(1) if m is not None else None + + if val is None: + return False + + if update_file: + f.tags[self.name] = Tag(self, val) + + return True def add_file(self, filename, value): """ Adds the specified filename to tracking. """ @@ -175,6 +237,7 @@ class Entity(object): def count(self, files=False): """ Returns a count of unique values or files. + Args: files (bool): When True, counts all files mapped to the Entity. When False, counts all unique values. @@ -203,7 +266,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def __init__(self, path, config=None, index=None, dynamic_getters=False, absolute_paths=True, regex_search=False, entity_mapper=None, - path_patterns=None): + path_patterns=None, config_filename='layout.json'): """ A container for all the files and metadata found at the specified path. @@ -245,6 +308,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): path_patterns (str, list): One or more filename patterns to use as a default path pattern for this layout's files. Can also be specified in the config file. + config_filename (str): The name of directory-specific config files. + Every directory will be scanned for this file, and if found, + the config file will be read in and added to the list of + configs. """ self.root = abspath(path) if absolute_paths else path @@ -253,62 +320,100 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): self.mandatory = set() self.dynamic_getters = dynamic_getters self.regex_search = regex_search - self.filtering_regex = {} self.entity_mapper = self if entity_mapper == 'self' else entity_mapper self.path_patterns = path_patterns if path_patterns else [] + self.config_filename = config_filename + self.domains = OrderedDict() if config is not None: - self._load_config(config) + for c in listify(config): + self._load_domain(c) if index is None: self.index() else: self.load_index(index) - def _load_config(self, config): + def _load_domain(self, config, root=None): + if isinstance(config, six.string_types): config = json.load(open(config, 'r')) - elif isinstance(config, list): - merged = {} - for c in config: - if isinstance(c, six.string_types): - c = json.load(open(c, 'r')) - merged.update(c) - config = merged - for e in config['entities']: - self.add_entity(**e) - - if 'index' in config: - self.filtering_regex = config['index'] - if self.filtering_regex.get('include') and \ - self.filtering_regex.get('exclude'): - raise ValueError("You can only define either include or " - "exclude regex, not both.") - - if 'default_path_patterns' in config: - self.path_patterns += listify(config['default_path_patterns']) + if 'name' not in config: + raise ValueError("Config file missing 'name' attribute.") + if config['name'] in self.domains: + raise ValueError("Config with name '%s' already exists in " + "Layout. Name of each config file must be " + "unique across entire Layout.") + if root is not None: + config['root'] = root + + if 'root' not in config: + warnings.warn("No valid root directory found for domain '%s'." + " Falling back on the Layout's root directory. " + "If this isn't the intended behavior, make sure " + "the config file for this domain includes a " + "'root' key." % config['name']) + config['root'] = self.root + elif not isabs(config['root']): + _root = config['root'] + config['root'] = abspath(join(self.root, config['root'])) + if not exists(config['root']): + msg = ("Relative path '%s' for domain '%s' interpreted as '%s'" + ", but this directory doesn't exist. Either specify the" + " domain root as an absolute path, or make sure it " + "points to a valid directory when appended to the " + "Layout's root (%s)." % (_root, config['name'], + config['root'], self.root)) + raise ValueError(msg) + + # Load entities + domain = Domain(config['name'], config, config['root']) + for e in config.get('entities', []): + self.add_entity(domain=domain, **e) + + self.domains[domain.name] = domain + + def get_domain_entities(self, domains=None, file=None): + # Get all Entities included in the specified Domains, in the same + # order as Domains in the list. Alternatively, if a file is passed, + # identify its domains and then return the entities. + + if file is None: + if domains is None: + domains = list(self.domains.keys()) + else: + domains = self._get_domains_for_file(file) - return config + ents = {} + for d in domains: + ents.update(self.domains[d].entities) + return ents - def _check_inclusions(self, f): + def _check_inclusions(self, f, domains=None): ''' Check file or directory against regexes in config to determine if it should be included in the index ''' + filename = f if isinstance(f, six.string_types) else f.path - # If file matches any include regex, then True - include_regex = self.filtering_regex.get('include', []) - if include_regex: - for regex in include_regex: - if re.match(regex, filename): - break - else: - return False - else: - # If file matches any excldue regex, then false - for regex in self.filtering_regex.get('exclude', []): - if re.match(regex, filename, flags=re.UNICODE): + if domains is None: + domains = list(self.domains.keys()) + + for dom in domains: + dom = self.domains[dom] + # If file matches any include regex, then True + include_regex = dom.filtering_regex.get('include', []) + if include_regex: + for regex in include_regex: + if re.match(regex, filename): + break + else: return False + else: + # If file matches any excldue regex, then false + for regex in dom.filtering_regex.get('exclude', []): + if re.match(regex, filename, flags=re.UNICODE): + return False return True @@ -342,25 +447,59 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): for ent in self.entities.values(): ent.files = {} - def _index_file(self, root, f): + def _get_domains_for_file(self, f): + if isinstance(f, File): + return f.domains + return [d.name for d in self.domains.values() if f.startswith(d.root)] + def _index_file(self, root, f, domains=None): + + # If domains aren't explicitly passed, figure out what applies + if domains is None: + domains = self._get_domains_for_file(root) + + # Create the file object--allows for subclassing f = self._make_file_object(root, f) if not (self._check_inclusions(f) and self._validate_file(f)): return - for e in self.entities.values(): - e.matches(f) + for d in domains: + self.domains[d].add_file(f) + + entities = self.get_domain_entities(domains) + + if entities: + self.files[f.path] = f + + for e in entities.values(): + e.matches(f, update_file=True) - fe = f.entities.keys() + file_ents = f.tags.keys() # Only keep Files that match at least one Entity, and all # mandatory Entities - if fe and not (self.mandatory - set(fe)): + if file_ents and not (self.mandatory - set(file_ents)): self.files[f.path] = f # Bind the File to all of the matching entities - for ent, val in f.entities.items(): - self.entities[ent].add_file(f.path, val) + for name, tag in f.tags.items(): + ent_id = tag.entity.id + self.entities[ent_id].add_file(f.path, tag.value) + + def _find_entity(self, entity): + ''' Find an Entity instance by name. Checks both name and id fields.''' + if entity in self.entities: + return self.entities[entity] + _ent = [e for e in self.entities.values() if e.name == entity] + if len(_ent) > 1: + raise ValueError("Entity name '%s' matches %d entities. To " + "avoid ambiguity, please prefix the entity " + "name with its domain (e.g., 'bids.%s'." % + (entity, len(_ent), entity)) + if _ent: + return _ent[0] + + raise ValueError("No entity '%s' found." % entity) def index(self): @@ -371,28 +510,63 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Loop over all files for root, directories, filenames in dataset: + # Determine which Domains apply to the current directory + domains = self._get_domains_for_file(root) + # Exclude directories that match exclude regex from further search full_dirs = [os.path.join(root, d) for d in directories] - full_dirs = filter(self._check_inclusions, full_dirs) - directories[:] = [split(d)[1] for d in - filter(self._validate_dir, full_dirs)] - # self._index_filenames(filenames) + def check_incl(directory): + return self._check_inclusions(directory, domains) + + full_dirs = filter(check_incl, full_dirs) + full_dirs = filter(self._validate_dir, full_dirs) + directories[:] = [split(d)[1] for d in full_dirs] + + if self.config_filename in filenames: + config_path = os.path.join(root, self.config_filename) + config = json.load(open(config_path, 'r')) + self._load_domain(config) + + # Filter Domains if current dir's config file has an + # include directive + if 'include' in config: + missing = set(config['include']) - set(domains) + if missing: + msg = ("Missing configs '%s' specified in include " + "directive of config '%s'. Please make sure " + "these config files are accessible from the " + "directory %s.") % (missing, config['name'], + root) + raise ValueError(msg) + domains = config['include'] + domains.append(config['name']) + + filenames.remove(self.config_filename) for f in filenames: - self._index_file(root, f) + self._index_file(root, f, domains) def save_index(self, filename): ''' Save the current Layout's index to a .json file. + Args: filename (str): Filename to write to. + + Note: At the moment, this won't serialize directory-specific config + files. This means reconstructed indexes will only work properly in + cases where there aren't multiple layout specs within a project. ''' - data = {f.path: f.entities for f in self.files.values()} + data = {} + for f in self.files.values(): + entities = {v.entity.id: v.value for k, v in f.tags.items()} + data[f.path] = entities with open(filename, 'w') as outfile: json.dump(data, outfile) def load_index(self, filename, reindex=False): ''' Load the Layout's index from a plaintext file. + Args: filename (str): Path to the plaintext index file. reindex (bool): If True, discards entity values provided in the @@ -401,24 +575,33 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): False, in which case it is assumed that all entity definitions in the loaded index are correct and do not need any further validation. + + Note: At the moment, directory-specific config files aren't serialized. + This means reconstructed indexes will only work properly in cases + where there aren't multiple layout specs within a project. ''' self._reset_index() data = json.load(open(filename, 'r')) for path, ents in data.items(): + # If file path isn't absolute, assume it's relative to layout root + if not isabs(path): + path = join(self.root, path) + root, f = dirname(path), basename(path) if reindex: self._index_file(root, f) else: f = self._make_file_object(root, f) - f.entities = ents + tags = {k: Tag(self.entities[k], v) for k, v in ents.items()} + f.tags = tags self.files[f.path] = f for ent, val in f.entities.items(): self.entities[ent].add_file(f.path, val) - def add_entity(self, **kwargs): + def add_entity(self, domain, **kwargs): ''' Add a new Entity to tracking. ''' # Set the entity's mapping func if one was specified @@ -433,12 +616,14 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): map_func = getattr(self.entity_mapper, kwargs['map_func']) kwargs['map_func'] = map_func - ent = Entity(**kwargs) + ent = Entity(domain=domain, **kwargs) + domain.add_entity(ent) + if ent.mandatory: - self.mandatory.add(ent.name) + self.mandatory.add(ent.id) if ent.directory is not None: ent.directory = ent.directory.replace('{{root}}', self.root) - self.entities[ent.name] = ent + self.entities[ent.id] = ent if self.dynamic_getters: func = partial(getattr(self, 'get'), target=ent.name, return_type='id') @@ -446,9 +631,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): setattr(self, 'get_%s' % func_name, func) def get(self, return_type='tuple', target=None, extensions=None, - regex_search=None, **kwargs): + domains=None, regex_search=None, **kwargs): """ Retrieve files and/or metadata from the current Layout. + Args: return_type (str): Type of result to return. Valid values: 'tuple': returns a list of namedtuples containing file name as @@ -462,6 +648,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): (if return_type is 'dir' or 'id'). extensions (str, list): One or more file extensions to filter on. Files with any other extensions will be excluded. + domains (list): Optional list of domain names to scan for files. + If None, all available domains are scanned. regex_search (bool or None): Whether to require exact matching (False) or regex search (True) when comparing the query string to each entity. If None (default), uses the value found in @@ -481,7 +669,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): filters = {} filters.update(kwargs) for filename, file in self.files.items(): - if not file._matches(filters, extensions, regex_search): + if not file._matches(filters, extensions, domains, regex_search): continue result.append(file) @@ -496,6 +684,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): return result else: + valid_entities = self.get_domain_entities(domains) + if target is None: raise ValueError('If return_type is "id" or "dir", a valid ' 'target entity must also be specified.') @@ -506,7 +696,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): return natural_sort(result) elif return_type == 'dir': - template = self.entities[target].directory + template = valid_entities[target].directory if template is None: raise ValueError('Return type set to directory, but no ' 'directory template is defined for the ' @@ -514,7 +704,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Construct regex search pattern from target directory template to_rep = re.findall('\{(.*?)\}', template) for ent in to_rep: - patt = self.entities[ent].pattern + patt = valid_entities[ent].pattern template = template.replace('{%s}' % ent, patt) template += '[^\%s]*$' % os.path.sep matches = [f.dirname for f in self.files.values() @@ -528,27 +718,30 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def unique(self, entity): """ Return a list of unique values for the named entity. + Args: entity (str): The name of the entity to retrieve unique values of. """ - return self.entities[entity].unique() + return self._find_entity(entity).unique() def count(self, entity, files=False): """ Return the count of unique values or files for the named entity. + Args: entity (str): The name of the entity. files (bool): If True, counts the number of filenames that contain at least one value of the entity, rather than the number of unique values of the entity. """ - return self.entities[entity].count(files) + return self._find_entity(entity).count(files) def as_data_frame(self, **kwargs): """ Return information for all Files tracked in the Layout as a pandas DataFrame. - args: + + Args: kwargs: Optional keyword arguments passed on to get(). This allows one to easily select only a subset of files for export. Returns: @@ -574,6 +767,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): ignore_strict_entities=None, **kwargs): ''' Walk up the file tree from the specified path and return the nearest matching file(s). + Args: path (str): The file to search from. return_type (str): What to return; must be one of 'file' (default) @@ -593,10 +787,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): ''' entities = {} - for name, ent in self.entities.items(): + for ent in self.entities.values(): m = ent.regex.search(path) if m: - entities[name] = m.group(1) + entities[ent.name] = m.group(1) # Remove any entities we want to ignore when strict matching is on if strict and ignore_strict_entities is not None: @@ -612,9 +806,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): folders[f.dirname].append(f) def count_matches(f): - keys = set(entities.keys()) & set(f.entities.keys()) + f_ents = f.entities + keys = set(entities.keys()) & set(f_ents.keys()) shared = len(keys) - return [shared, sum([entities[k] == f.entities[k] for k in keys])] + return [shared, sum([entities[k] == f_ents[k] for k in keys])] matches = [] @@ -715,7 +910,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def write_contents_to_file(self, entities, path_patterns=None, contents=None, link_to=None, content_mode='text', conflicts='fail', - strict=False): + strict=False, domains=None): """ Write arbitrary data to a file defined by the passed entities and path patterns. @@ -737,10 +932,17 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. + domains (list): List of Domains to scan for path_patterns. Order + determines precedence (i.e., earlier Domains will be scanned + first). If None, all available domains are included. """ if not path_patterns: path_patterns = self.path_patterns + if domains is None: + domains = list(self.domains.keys()) + for dom in domains: + path_patterns.extend(self.domains[dom].path_patterns) path = build_path(entities, path_patterns, strict) write_contents_to_file(path, contents=contents, link_to=link_to, content_mode=content_mode, conflicts=conflicts, @@ -766,6 +968,7 @@ def merge_layouts(layouts): for l in layouts[1:]: layout.files.update(l.files) + layout.domains.update(l.domains) for k, v in l.entities.items(): if k not in layout.entities:
Hierarchical specifications Currently grabbit assumes that all folders/files below the project root obey the same spec. For a variety of reasons, it would be good to allow hierarchical specifications, so that on the initial scan, each folder would be checked for its own .json spec, and if one is found, the entities defined within that file will be used for any files below. Internally, it's probably best to handle this by initializing multiple `Layout` objects and maintaining a common index across them (to enable returning of files that match entities shared across specs).
grabbles/grabbit
diff --git a/grabbit/tests/data/valuable_stamps/USA/dir_config.json b/grabbit/tests/data/valuable_stamps/USA/dir_config.json new file mode 100644 index 0000000..0563cca --- /dev/null +++ b/grabbit/tests/data/valuable_stamps/USA/dir_config.json @@ -0,0 +1,14 @@ +{ + "name": "usa_stamps", + "root": ".", + "entities": [ + { + "name": "name", + "pattern": "name=(.*?)\\#" + }, + { + "name": "value", + "pattern": "value=([a-z0-9]+)\\.txt" + } + ] +} \ No newline at end of file diff --git a/grabbit/tests/data/valuable_stamps/USA/name=1c_Washington_Irving#value=35cents.txt b/grabbit/tests/data/valuable_stamps/USA/name=1c_Washington_Irving#value=35cents.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/data/valuable_stamps/USA/name=5c_Francis_E_Willard#value=1dollar.txt b/grabbit/tests/data/valuable_stamps/USA/name=5c_Francis_E_Willard#value=1dollar.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/data/valuable_stamps/USA/name=5c_Walt_Whitman#value=80cents.txt b/grabbit/tests/data/valuable_stamps/USA/name=5c_Walt_Whitman#value=80cents.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/misc/index.json b/grabbit/tests/misc/index.json index a1394b3..c3bbf1d 100644 --- a/grabbit/tests/misc/index.json +++ b/grabbit/tests/misc/index.json @@ -1,130 +1,130 @@ { - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/dataset_description.json": { - "type": "description" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/participants.tsv": { - "type": "trt/participants" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_bold.json": { - "type": "bold", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_run-1_physio.json": { - "run": "1", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_run-2_physio.json": { - "run": "2", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-prefrontal_bold.json": { - "type": "bold", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-prefrontal_physio.json": { - "type": "physio", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/test.bval": { - "type": "trt/test", - "bval": "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/test.bval" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/models/excluded_model.json": { - "type": "model" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/sub-01_sessions.tsv": { - "subject": "01", - "type": "sessions" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/sub-01_ses-1_scans.tsv": { - "subject": "01", - "session": "1", - "type": "scans" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/anat/sub-01_ses-1_T1map.nii.gz": { - "subject": "01", - "session": "1", - "type": "T1map" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/anat/sub-01_ses-1_T1w.nii.gz": { - "subject": "01", - "session": "1", - "type": "T1w" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude1.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "magnitude1" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude2.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "magnitude2" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.json": { - "subject": "01", - "session": "1", - "run": "1", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude1.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "magnitude1" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude2.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "magnitude2" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.json": { - "subject": "01", - "session": "1", - "run": "2", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_bold.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "bold", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_physio.tsv.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-2_bold.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "bold", - "task": "rest_acq", - "acquisition": "fullbrain_run" + "dataset_description.json": { + "test.type": "description" + }, + "participants.tsv": { + "test.type": "trt/participants" + }, + "task-rest_acq-fullbrain_bold.json": { + "test.type": "bold", + "test.task": "rest_acq" + }, + "task-rest_acq-fullbrain_run-1_physio.json": { + "test.run": "1", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "task-rest_acq-fullbrain_run-2_physio.json": { + "test.run": "2", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "task-rest_acq-prefrontal_bold.json": { + "test.type": "bold", + "test.task": "rest_acq" + }, + "task-rest_acq-prefrontal_physio.json": { + "test.type": "physio", + "test.task": "rest_acq" + }, + "test.bval": { + "test.type": "trt/test", + "test.bval": "test.bval" + }, + "models/excluded_model.json": { + "test.type": "model" + }, + "sub-01/sub-01_sessions.tsv": { + "test.subject": "01", + "test.type": "sessions" + }, + "sub-01/ses-1/sub-01_ses-1_scans.tsv": { + "test.subject": "01", + "test.session": "1", + "test.type": "scans" + }, + "sub-01/ses-1/anat/sub-01_ses-1_T1map.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.type": "T1map" + }, + "sub-01/ses-1/anat/sub-01_ses-1_T1w.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.type": "T1w" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude1.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "magnitude1" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude2.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "magnitude2" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.json": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude1.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "magnitude1" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude2.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "magnitude2" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.json": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "phasediff" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_bold.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "bold", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_physio.tsv.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-2_bold.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "bold", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" } } diff --git a/grabbit/tests/specs/stamps.json b/grabbit/tests/specs/stamps.json index d5eb2d3..37ce56d 100644 --- a/grabbit/tests/specs/stamps.json +++ b/grabbit/tests/specs/stamps.json @@ -1,4 +1,5 @@ { + "name": "stamps", "entities": [ { "name": "name", diff --git a/grabbit/tests/specs/test.json b/grabbit/tests/specs/test.json index a1dd0d3..d93c986 100644 --- a/grabbit/tests/specs/test.json +++ b/grabbit/tests/specs/test.json @@ -1,4 +1,5 @@ { + "name": "test", "index" : { "exclude" : [".*derivatives.*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/specs/test_include.json b/grabbit/tests/specs/test_include.json index 2529bd5..f3a87ef 100644 --- a/grabbit/tests/specs/test_include.json +++ b/grabbit/tests/specs/test_include.json @@ -1,4 +1,5 @@ { + "name": "test_with_includes", "index" : { "include" : ["sub-(\\d+)", "ses-.*", "func", "fmap", ".*\\..*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/specs/test_with_mapper.json b/grabbit/tests/specs/test_with_mapper.json index 063aa0d..84c3d28 100644 --- a/grabbit/tests/specs/test_with_mapper.json +++ b/grabbit/tests/specs/test_with_mapper.json @@ -1,4 +1,5 @@ { + "name": "test_with_mapper", "index" : { "exclude" : [".*derivatives.*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py index a98e3bd..193f0a4 100644 --- a/grabbit/tests/test_core.py +++ b/grabbit/tests/test_core.py @@ -1,5 +1,5 @@ import pytest -from grabbit import File, Entity, Layout, merge_layouts +from grabbit import File, Entity, Layout, Tag, merge_layouts import os import posixpath as psp import tempfile @@ -30,15 +30,18 @@ def bids_layout(request): hdfs = pytest.importorskip("hdfs") from grabbit.extensions import HDFSLayout client = hdfs.Config().get_client() - root = psp.join('hdfs://localhost:9000{0}'.format(client.root), 'data', '7t_trt') - config = psp.join('hdfs://localhost:9000{0}'.format(client.root), 'specs', 'test.json') + root = psp.join('hdfs://localhost:9000{0}'.format( + client.root), 'data', '7t_trt') + config = psp.join('hdfs://localhost:9000{0}'.format( + client.root), 'specs', 'test.json') return HDFSLayout(root, config, regex_search=True) + @pytest.fixture(scope='module') def stamp_layout(): root = os.path.join(DIRNAME, 'data', 'valuable_stamps') config = os.path.join(DIRNAME, 'specs', 'stamps.json') - return Layout(root, config) + return Layout(root, config, config_filename='dir_config.json') @pytest.fixture(scope='module') @@ -63,7 +66,7 @@ class TestFile: assert file._matches() assert file._matches(extensions='nii.gz') assert not file._matches(extensions=['.txt', '.rtf']) - file.entities = {'task': 'rest', 'run': '2'} + file.tags = {'task': Tag(None, 'rest'), 'run': Tag(None, '2')} assert file._matches(entities={'task': 'rest', 'run': 2}) assert not file._matches(entities={'task': 'rest', 'run': 4}) assert not file._matches(entities={'task': 'st'}) @@ -75,7 +78,7 @@ class TestFile: regex_search=True) def test_named_tuple(self, file): - file.entities = {'attrA': 'apple', 'attrB': 'banana'} + file.tags = {'attrA': Tag(None, 'apple'), 'attrB': Tag(None, 'banana')} tup = file.as_named_tuple() assert(tup.filename == file.path) assert isinstance(tup, tuple) @@ -98,7 +101,7 @@ class TestEntity: tmpdir.mkdir("tmp").join(filename).write("###") f = File(os.path.join(str(tmpdir), filename)) e = Entity('avaricious', 'aardvark-(\d+)') - e.matches(f) + e.matches(f, update_file=True) assert 'avaricious' in f.entities assert f.entities['avaricious'] == '4' @@ -261,7 +264,7 @@ class TestLayout: assert targ not in layout_include.files with pytest.raises(ValueError): - layout_include._load_config({'entities': [], + layout_include._load_domain({'entities': [], 'index': {'include': 'test', 'exclude': 'test'}}) @@ -273,9 +276,11 @@ class TestLayout: index = json.load(infile) assert len(index) == len(bids_layout.files) # Check that entities for first 10 files match + files = list(bids_layout.files.values()) for i in range(10): - f = list(bids_layout.files.values())[i] - assert f.entities == index[f.path] + f = files[i] + entities = {v.entity.id: v.value for v in f.tags.values()} + assert entities == index[f.path] os.unlink(tmp) def test_load_index(self, bids_layout): @@ -324,22 +329,52 @@ class TestLayout: def test_clone(self, bids_layout): lc = bids_layout.clone() attrs = ['root', 'mandatory', 'dynamic_getters', 'regex_search', - 'filtering_regex', 'entity_mapper'] + 'entity_mapper'] for a in attrs: assert getattr(bids_layout, a) == getattr(lc, a) assert set(bids_layout.files.keys()) == set(lc.files.keys()) assert set(bids_layout.entities.keys()) == set(lc.entities.keys()) + def test_multiple_domains(self, stamp_layout): + layout = stamp_layout.clone() + assert {'stamps', 'usa_stamps'} == set(layout.domains.keys()) + usa = layout.domains['usa_stamps'] + general = layout.domains['stamps'] + assert len(usa.files) == 3 + assert len(layout.files) == len(general.files) + assert not set(usa.files) - set(general.files) + assert layout.entities['usa_stamps.name'] == usa.entities['name'] + assert layout.entities['stamps.name'] == general.entities['name'] + assert usa.entities['name'] != general.entities['name'] + f = layout.get(name='5c_Francis_E_Willard', return_type='obj')[0] + assert f.entities == {'name': '5c_Francis_E_Willard', + 'value': '1dollar'} + + def test_get_by_domain(self, stamp_layout): + files = stamp_layout.get(domains='usa_stamps') + assert len(files) == 3 + files = stamp_layout.get(domains=['nonexistent', 'doms']) + assert not files + files = stamp_layout.get(domains='usa_stamps', value='35', + regex_search=True) + assert len(files) == 1 + files = stamp_layout.get(value='35', regex_search=True) + assert len(files) == 2 + def test_merge_layouts(bids_layout, stamp_layout): layout = merge_layouts([bids_layout, stamp_layout]) assert len(layout.files) == len(bids_layout.files) + \ len(stamp_layout.files) - assert 'country' in layout.entities - assert 'subject' in layout.entities + assert 'stamps.country' in layout.entities + assert 'test.subject' in layout.entities + dom = layout.domains['stamps'] + assert 'country' in dom.entities + dom = layout.domains['test'] + assert 'subject' in dom.entities # Make sure first Layout was cloned and not passed by reference - patt = layout.entities['subject'].pattern - assert patt == bids_layout.entities['subject'].pattern - bids_layout.entities['subject'].pattern = "meh" + patt = layout.entities['test.subject'].pattern + assert patt == bids_layout.entities['test.subject'].pattern + bids_layout.entities['test.subject'].pattern = "meh" assert patt != "meh" diff --git a/grabbit/tests/test_writable.py b/grabbit/tests/test_writable.py index 22c295a..68b4442 100644 --- a/grabbit/tests/test_writable.py +++ b/grabbit/tests/test_writable.py @@ -1,5 +1,5 @@ import pytest -from grabbit import Layout, File +from grabbit import Layout, File, Tag from grabbit.extensions.writable import build_path import os import shutil @@ -13,6 +13,7 @@ def writable_file(tmpdir): fn.write('###') return File(os.path.join(str(fn))) + @pytest.fixture def layout(): data_dir = join(dirname(__file__), 'data', '7t_trt') @@ -20,15 +21,20 @@ def layout(): layout = Layout(data_dir, config=config) return layout + class TestWritableFile: def test_build_path(self, writable_file): - writable_file.entities = {'task': 'rest', 'run': '2', 'subject': '3'} + writable_file.tags = { + 'task': Tag(None, 'rest'), 'run': Tag(None, '2'), + 'subject': Tag(None, '3') + } # Single simple pattern with pytest.raises(TypeError): build_path(writable_file.entities) - pat = join(writable_file.dirname, '{task}/sub-{subject}/run-{run}.nii.gz') + pat = join(writable_file.dirname, + '{task}/sub-{subject}/run-{run}.nii.gz') target = join(writable_file.dirname, 'rest/sub-3/run-2.nii.gz') assert build_path(writable_file.entities, pat) == target @@ -79,11 +85,13 @@ class TestWritableFile: assert not build_path(entities, pats, True) def test_build_file(self, writable_file, tmpdir, caplog): - writable_file.entities = {'task': 'rest', 'run': '2', 'subject': '3'} + writable_file.tags = {'task': Tag(None, 'rest'), 'run': Tag(None, '2'), + 'subject': Tag(None, '3')} # Simple write out new_dir = join(writable_file.dirname, 'rest') - pat = join(writable_file.dirname, '{task}/sub-{subject}/run-{run}.nii.gz') + pat = join(writable_file.dirname, + '{task}/sub-{subject}/run-{run}.nii.gz') target = join(writable_file.dirname, 'rest/sub-3/run-2.nii.gz') writable_file.copy(pat) assert exists(target) @@ -98,7 +106,8 @@ class TestWritableFile: assert log_message == 'A file at path {} already exists, ' \ 'skipping writing file.'.format(target) writable_file.copy(pat, conflicts='append') - append_target = join(writable_file.dirname, 'rest/sub-3/run-2_1.nii.gz') + append_target = join(writable_file.dirname, + 'rest/sub-3/run-2_1.nii.gz') assert exists(append_target) writable_file.copy(pat, conflicts='overwrite') assert exists(target) @@ -138,12 +147,13 @@ class TestWritableLayout: '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') example_file2 = join(str(tmpdir), 'sub-04' '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') + assert exists(example_file) assert not exists(example_file2) @@ -157,7 +167,7 @@ class TestWritableLayout: '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') assert exists(example_file) assert exists(example_file2) @@ -181,6 +191,7 @@ class TestWritableLayout: data_dir = join(dirname(__file__), 'data', '7t_trt') config = join(dirname(__file__), 'specs', 'test.json') layout = Layout(data_dir, config=[config, { + 'name': "test_writable", 'default_path_patterns': ['sub-{subject}/ses-{session}/{subject}' '{session}{run}{type}{task}{acquisition}' '{bval}'] @@ -200,8 +211,8 @@ class TestWritableLayout: def test_build_file_from_layout(self, tmpdir, layout): entities = {'subject': 'Bob', 'session': '01', 'run': '1'} pat = join(str(tmpdir), 'sub-{subject}' - '/sess-{session}' - '/r-{run}.nii.gz') + '/sess-{session}' + '/r-{run}.nii.gz') path = layout.build_path(entities, path_patterns=pat) assert path == join(str(tmpdir), 'sub-Bob/sess-01/r-1.nii.gz')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 2 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "six" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 -e git+https://github.com/grabbles/grabbit.git@c1a811a2a41153afec970f73923c0a53c66ef694#egg=grabbit iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 six==1.17.0 tomli==2.2.1
name: grabbit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - six==1.17.0 - tomli==2.2.1 prefix: /opt/conda/envs/grabbit
[ "grabbit/tests/test_core.py::TestFile::test_init", "grabbit/tests/test_core.py::TestFile::test_matches", "grabbit/tests/test_core.py::TestFile::test_named_tuple", "grabbit/tests/test_core.py::TestEntity::test_init", "grabbit/tests/test_core.py::TestEntity::test_matches", "grabbit/tests/test_core.py::TestEntity::test_unique_and_count", "grabbit/tests/test_core.py::TestEntity::test_add_file", "grabbit/tests/test_core.py::TestLayout::test_init[local]", "grabbit/tests/test_core.py::TestLayout::test_absolute_paths[local]", "grabbit/tests/test_core.py::TestLayout::test_querying[local]", "grabbit/tests/test_core.py::TestLayout::test_natsort[local]", "grabbit/tests/test_core.py::TestLayout::test_unique_and_count[local]", "grabbit/tests/test_core.py::TestLayout::test_get_nearest[local]", "grabbit/tests/test_core.py::TestLayout::test_index_regex[local]", "grabbit/tests/test_core.py::TestLayout::test_save_index[local]", "grabbit/tests/test_core.py::TestLayout::test_load_index[local]", "grabbit/tests/test_core.py::TestLayout::test_clone[local]", "grabbit/tests/test_core.py::test_merge_layouts[local]", "grabbit/tests/test_core.py::TestLayout::test_dynamic_getters[/grabbit/grabbit/tests/data/7t_trt-/grabbit/grabbit/tests/specs/test.json]", "grabbit/tests/test_core.py::TestLayout::test_entity_mapper", "grabbit/tests/test_core.py::TestLayout::test_multiple_domains", "grabbit/tests/test_core.py::TestLayout::test_get_by_domain", "grabbit/tests/test_writable.py::TestWritableFile::test_build_path", "grabbit/tests/test_writable.py::TestWritableFile::test_strict_build_path", "grabbit/tests/test_writable.py::TestWritableFile::test_build_file", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_files", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_contents_to_file", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_contents_to_file_defaults", "grabbit/tests/test_writable.py::TestWritableLayout::test_build_file_from_layout" ]
[]
[]
[]
MIT License
2,188
[ "grabbit/__init__.py", "grabbit/core.py" ]
[ "grabbit/__init__.py", "grabbit/core.py" ]
level12__keg-elements-71
7838f08ef0e8fec7e2e74c8dcad21f748e98a697
2018-02-21 02:54:26
2b0ee8adedd6e21ffa426a5abf3adb9c79706abe
diff --git a/keg_elements/forms/validators.py b/keg_elements/forms/validators.py index f65db1d..d473d7a 100644 --- a/keg_elements/forms/validators.py +++ b/keg_elements/forms/validators.py @@ -47,8 +47,18 @@ class ValidateUnique(object): def __init__(self, object_html_link=None): self.object_html_link = object_html_link + def get_obj(self, form): + if hasattr(form, 'obj'): + return form.obj + if hasattr(form, '_obj'): + return form._obj + + raise AttributeError( + 'Form must provide either `obj` or `_obj` property for uniqueness validation.' + ) + def __call__(self, form, field): - obj = getattr(form, 'obj') + obj = self.get_obj(form) other = form.get_object_by_field(field) both_exist = None not in (obj, other) @@ -57,7 +67,7 @@ class ValidateUnique(object): if (both_exist and not same_record) or another_exists_with_value: link = (' to {}.'.format(self.object_html_link(other)) - if hasattr(self, 'object_html_link') else '.') + if self.object_html_link is not None else '.') msg = jinja2.Markup('This value must be unique but is already assigned' '{}'.format(link)) raise ValidationError(msg)
`ValidateUnique` form validator can integrate better Right now `ValidateUnique` requires that the Form have a `obj` member. This must be added manually to the form since it's not included by default on a WTForm form. However, WTForms do have a `_obj` member which has the same purpose. We could make `ValidateUnique` easier to use by utilizing this member.
level12/keg-elements
diff --git a/keg_elements/tests/test_forms/test_validators.py b/keg_elements/tests/test_forms/test_validators.py index 103894d..e390e5d 100644 --- a/keg_elements/tests/test_forms/test_validators.py +++ b/keg_elements/tests/test_forms/test_validators.py @@ -1,24 +1,67 @@ +import jinja2 import pytest import wtforms +from werkzeug.datastructures import MultiDict + import keg_elements.forms.validators as validators +from keg_elements.forms import ModelForm -class Form(): - def __init__(self, obj): - self.obj = obj +class UniqueForm(ModelForm): + uq_field = wtforms.fields.StringField('thing', validators=[ + validators.ValidateUnique(object_html_link=lambda field: 'link')]) def get_object_by_field(self, field): - return 1 + return 1 if field.data == '1' else None class TestUniqueValidator(object): - def test_unique_validator(self): - field = wtforms.fields.StringField('thing') - validator = validators.ValidateUnique(object_html_link=lambda field: 'link') + def test_validation_passes(self): + form = UniqueForm(MultiDict({'uq_field': '1'}), obj=1) + assert form.uq_field.validate(form) is True + + form = UniqueForm(MultiDict({'uq_field': '2'}), obj=1) + assert form.uq_field.validate(form) is True + + def test_validation_fails(self): + form = UniqueForm(MultiDict({'uq_field': '1'})) + assert form.uq_field.validate(form) is False + assert form.uq_field.errors == [ + jinja2.Markup('This value must be unique but is already assigned to link.') + ] + + form = UniqueForm(MultiDict({'uq_field': '1'}), obj=2) + assert form.uq_field.validate(form) is False + assert form.uq_field.errors == [ + jinja2.Markup('This value must be unique but is already assigned to link.') + ] + + def test_no_object_link_provided(self): + class Form(ModelForm): + uq_field = wtforms.fields.StringField('thing', validators=[validators.ValidateUnique()]) + + def get_object_by_field(self, field): + return 1 if field.data == '1' else None + + form = Form(MultiDict({'uq_field': '1'})) + assert form.uq_field.validate(form) is False + assert form.uq_field.errors == [ + jinja2.Markup('This value must be unique but is already assigned.') + ] + + def test_get_obj(self): + class Form: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + validator = validators.ValidateUnique() + + assert validator.get_obj(Form(obj='foo')) == 'foo' + assert validator.get_obj(Form(_obj='foo')) == 'foo' - assert validator(Form(1), field) is True + with pytest.raises(AttributeError) as exc: + validator.get_obj(Form()) - with pytest.raises(wtforms.ValidationError, - message=('This valuue must be unique bit is already ' - 'assigned link.')): - assert validator(Form(2), field) + assert str(exc.value) == ( + 'Form must provide either `obj` or `_obj` property for uniqueness validation.' + )
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements/runtime.txt", "requirements/testing.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
appdirs==1.4.4 arrow==1.3.0 beautifulsoup4==4.13.3 BlazeUtils==0.7.0 blinker==1.9.0 cffi==1.17.1 click==8.1.8 coverage==7.8.0 cryptography==44.0.2 cssselect==1.3.0 exceptiongroup==1.2.2 flake8==7.2.0 Flask==3.1.0 Flask-SQLAlchemy==3.1.1 Flask-WebTest==0.1.6 Flask-WTF==1.2.2 greenlet==3.1.1 importlib_metadata==8.6.1 infinity==1.5 iniconfig==2.1.0 intervals==0.9.2 itsdangerous==2.2.0 Jinja2==3.1.6 Keg==0.11.1 -e git+https://github.com/level12/keg-elements.git@7838f08ef0e8fec7e2e74c8dcad21f748e98a697#egg=KegElements lxml==5.3.1 MarkupSafe==3.0.2 mccabe==0.7.0 packaging==24.2 pluggy==1.5.0 pycodestyle==2.13.0 pycparser==2.22 pyflakes==3.3.2 pyquery==2.0.1 pytest==8.3.5 pytest-cov==6.0.0 python-dateutil==2.9.0.post0 python-json-logger==3.3.0 pytz==2025.2 six==1.17.0 soupsieve==2.6 SQLAlchemy==2.0.40 SQLAlchemy-Utils==0.41.2 tomli==2.2.1 types-python-dateutil==2.9.0.20241206 typing_extensions==4.13.0 validators==0.34.0 waitress==3.0.2 WebOb==1.8.9 WebTest==3.0.4 Werkzeug==3.1.3 wrapt==1.17.2 WTForms==3.2.1 WTForms-Alchemy==0.19.0 WTForms-Components==0.11.0 zipp==3.21.0
name: keg-elements channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - appdirs==1.4.4 - arrow==1.3.0 - beautifulsoup4==4.13.3 - blazeutils==0.7.0 - blinker==1.9.0 - cffi==1.17.1 - click==8.1.8 - coverage==7.8.0 - cryptography==44.0.2 - cssselect==1.3.0 - exceptiongroup==1.2.2 - flake8==7.2.0 - flask==3.1.0 - flask-sqlalchemy==3.1.1 - flask-webtest==0.1.6 - flask-wtf==1.2.2 - greenlet==3.1.1 - importlib-metadata==8.6.1 - infinity==1.5 - iniconfig==2.1.0 - intervals==0.9.2 - itsdangerous==2.2.0 - jinja2==3.1.6 - keg==0.11.1 - lxml==5.3.1 - markupsafe==3.0.2 - mccabe==0.7.0 - packaging==24.2 - pluggy==1.5.0 - pycodestyle==2.13.0 - pycparser==2.22 - pyflakes==3.3.2 - pyquery==2.0.1 - pytest==8.3.5 - pytest-cov==6.0.0 - python-dateutil==2.9.0.post0 - python-json-logger==3.3.0 - pytz==2025.2 - six==1.17.0 - soupsieve==2.6 - sqlalchemy==2.0.40 - sqlalchemy-utils==0.41.2 - tomli==2.2.1 - types-python-dateutil==2.9.0.20241206 - typing-extensions==4.13.0 - validators==0.34.0 - waitress==3.0.2 - webob==1.8.9 - webtest==3.0.4 - werkzeug==3.1.3 - wrapt==1.17.2 - wtforms==3.2.1 - wtforms-alchemy==0.19.0 - wtforms-components==0.11.0 - zipp==3.21.0 prefix: /opt/conda/envs/keg-elements
[ "keg_elements/tests/test_forms/test_validators.py::TestUniqueValidator::test_get_obj" ]
[ "keg_elements/tests/test_forms/test_validators.py::TestUniqueValidator::test_validation_passes", "keg_elements/tests/test_forms/test_validators.py::TestUniqueValidator::test_validation_fails", "keg_elements/tests/test_forms/test_validators.py::TestUniqueValidator::test_no_object_link_provided" ]
[]
[]
BSD License
2,189
[ "keg_elements/forms/validators.py" ]
[ "keg_elements/forms/validators.py" ]
ucfopen__canvasapi-155
5a3b536d9cff7d79df58203f64531cf649a6d942
2018-02-21 20:19:02
c69f6a9801ac275fdad46d97fa95c77c25d6f953
coveralls: [![Coverage Status](https://coveralls.io/builds/15634324/badge)](https://coveralls.io/builds/15634324) Coverage remained the same at 100.0% when pulling **482ea1d76add02887435faf7f61e5f4125af43a4 on andrew-gardener:issue/154-course-sections-converage** into **5a3b536d9cff7d79df58203f64531cf649a6d942 on ucfopen:develop**. Thetwam: Hey @andrew-gardener, thanks for the pull request! I believe we already cover this endpoint as [`Course.list_sections()`](http://canvasapi.readthedocs.io/en/latest/course-ref.html#canvasapi.course.Course.list_sections) ([here in the repo](https://github.com/ucfopen/canvasapi/blob/fec74692326591055df97451dd1fc24ff6e4dd0c/canvasapi/course.py#L673)). Internally, we have been talking about standardizing methods to always use the prefix `get_` rather than `list_` to avoid confusion. This seems like a good opportunity to change `list_sections` to `get_sections` for clarity. I've opened an issue for us to go through the library and change all `get_` to `list_` (#156), but for now we'll just do sections. Your changes to `Course.get_section()` looks good! Good catch. Would you like to handle the changing of `list_sections` to `get_sections`? andrew-gardener: @Thetwam my bad! I didn't realize that it existed as `list_sections` I've updated the PR to with the deprecation warning.
diff --git a/canvasapi/course.py b/canvasapi/course.py index e06fd8c..df55c63 100644 --- a/canvasapi/course.py +++ b/canvasapi/course.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function, unicode_literals -from warnings import warn + +import warnings from six import python_2_unicode_compatible, text_type @@ -17,6 +18,8 @@ from canvasapi.user import UserDisplay from canvasapi.util import combine_kwargs, is_multivalued, obj_or_id from canvasapi.rubric import Rubric +warnings.simplefilter('always', DeprecationWarning) + @python_2_unicode_compatible class Course(CanvasObject): @@ -541,12 +544,32 @@ class Course(CanvasObject): _kwargs=combine_kwargs(**kwargs) ) - def get_section(self, section): + def get_sections(self, **kwargs): + """ + List all sections in a course. + + :calls: `GET /api/v1/courses/:course_id/sections \ + <https://canvas.instructure.com/doc/api/sections.html#method.sections.index>`_ + + :rtype: :class:`canvasapi.paginated_list.PaginatedList` of + :class:`canvasapi.section.Section` + """ + from canvasapi.section import Section + + return PaginatedList( + Section, + self._requester, + 'GET', + 'courses/{}/sections'.format(self.id), + _kwargs=combine_kwargs(**kwargs) + ) + + def get_section(self, section, **kwargs): """ Retrieve a section. :calls: `GET /api/v1/courses/:course_id/sections/:id \ - <https://canvas.instructure.com/doc/api/sections.html#method.sections.index>`_ + <https://canvas.instructure.com/doc/api/sections.html#method.sections.show>`_ :param section: The object or ID of the section to retrieve. :type section: :class:`canvasapi.section.Section` or int @@ -559,7 +582,8 @@ class Course(CanvasObject): response = self._requester.request( 'GET', - 'courses/{}/sections/{}'.format(self.id, section_id) + 'courses/{}/sections/{}'.format(self.id, section_id), + _kwargs=combine_kwargs(**kwargs) ) return Section(self._requester, response.json()) @@ -680,14 +704,12 @@ class Course(CanvasObject): :rtype: :class:`canvasapi.paginated_list.PaginatedList` of :class:`canvasapi.section.Section` """ - from canvasapi.section import Section - return PaginatedList( - Section, - self._requester, - 'GET', - 'courses/{}/sections'.format(self.id), - _kwargs=combine_kwargs(**kwargs) + warnings.warn( + "`list_sections` is being deprecated and will be removed in a future version." + " Use `get_sections` instead", + DeprecationWarning ) + return self.get_sections(**kwargs) def create_course_section(self, **kwargs): """ @@ -1202,7 +1224,7 @@ class Course(CanvasObject): :class:`canvasapi.submission.Submission` """ if 'grouped' in kwargs: - warn('The `grouped` parameter must be empty. Removing kwarg `grouped`.') + warnings.warn('The `grouped` parameter must be empty. Removing kwarg `grouped`.') del kwargs['grouped'] return PaginatedList(
Course sections coverage Add coverage for course sections https://canvas.instructure.com/doc/api/sections.html#method.sections.index Also improve course section coverage to support the `include` parameter https://canvas.instructure.com/doc/api/sections.html#method.sections.show
ucfopen/canvasapi
diff --git a/tests/fixtures/course.json b/tests/fixtures/course.json index 1aa9eb8..fa235c6 100644 --- a/tests/fixtures/course.json +++ b/tests/fixtures/course.json @@ -346,6 +346,43 @@ }, "status_code": 200 }, + "get_sections": { + "method": "GET", + "endpoint": "courses/1/sections", + "data": [ + { + "id": 1, + "name": "Section 1", + "course_id": 1 + }, + { + "id": 2, + "name": "Section 2", + "course_id": 1 + } + ], + "headers": { + "Link": "<http://example.com/api/v1/courses/1/sections?page=2&per_page=2>; rel=\"next\"" + }, + "status_code": 200 + }, + "get_sections_p2": { + "method": "GET", + "endpoint": "courses/1/sections?page=2&per_page=2", + "data": [ + { + "id": 3, + "name": "Section 3", + "course_id": 1 + }, + { + "id": 4, + "name": "Section 4", + "course_id": 1 + } + ], + "status_code": 200 + }, "get_user": { "method": "GET", "endpoint": "courses/1/users/1", @@ -872,39 +909,6 @@ }, "status_code": 200 }, - "list_sections": { - "method": "GET", - "endpoint": "courses/1/sections", - "data": [ - { - "id": 1, - "name": "Section 1" - }, - { - "id": 2, - "name": "Section 2" - } - ], - "status_code": 200, - "headers": { - "Link": "<http://example.com/api/v1/courses/1/list_sections?page=2&per_page=2>; rel=\"next\"" - } - }, - "list_sections2": { - "method": "GET", - "endpoint": "courses/1/list_sections?page=2&per_page=2", - "data": [ - { - "id": 3, - "name": "Section 3" - }, - { - "id": 4, - "name": "Section 4" - } - ], - "status_code": 200 - }, "create_section": { "method": "POST", "endpoint": "courses/1/sections", diff --git a/tests/test_course.py b/tests/test_course.py index 888ca59..04aa27b 100644 --- a/tests/test_course.py +++ b/tests/test_course.py @@ -302,6 +302,16 @@ class TestCourse(unittest.TestCase): self.assertEqual(len(enrollment_list), 4) self.assertIsInstance(enrollment_list[0], Enrollment) + # get_sections() + def test_get_sections(self, m): + register_uris({'course': ['get_sections', 'get_sections_p2']}, m) + + sections = self.course.get_sections() + section_list = [section for section in sections] + + self.assertEqual(len(section_list), 4) + self.assertIsInstance(section_list[0], Section) + # get_section def test_get_section(self, m): register_uris({'course': ['get_section']}, m) @@ -434,13 +444,15 @@ class TestCourse(unittest.TestCase): self.assertEqual(len(tool_list), 4) def test_list_sections(self, m): - register_uris({'course': ['list_sections', 'list_sections2']}, m) + register_uris({'course': ['get_sections', 'get_sections_p2']}, m) - sections = self.course.list_sections() - section_list = [sect for sect in sections] + with warnings.catch_warnings(record=True) as w: + sections = self.course.list_sections() + section_list = [sect for sect in sections] - self.assertIsInstance(section_list[0], Section) - self.assertEqual(len(section_list), 4) + self.assertEqual(len(section_list), 4) + self.assertIsInstance(section_list[0], Section) + self.assertTrue(issubclass(w[0].category, DeprecationWarning)) def test_create_course_section(self, m): register_uris({'course': ['create_section']}, m)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 1 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "coverage", "flake8", "pycodestyle", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt", "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 -e git+https://github.com/ucfopen/canvasapi.git@5a3b536d9cff7d79df58203f64531cf649a6d942#egg=canvasapi certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 docutils==0.17.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytz==2025.2 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: canvasapi channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - docutils==0.17.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytz==2025.2 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/canvasapi
[ "tests/test_course.py::TestCourse::test_get_sections" ]
[ "tests/test_course.py::TestCourse::test_list_sections" ]
[ "tests/test_course.py::TestCourse::test__str__", "tests/test_course.py::TestCourse::test_add_grading_standards", "tests/test_course.py::TestCourse::test_add_grading_standards_empty_list", "tests/test_course.py::TestCourse::test_add_grading_standards_missing_name_key", "tests/test_course.py::TestCourse::test_add_grading_standards_missing_value_key", "tests/test_course.py::TestCourse::test_add_grading_standards_non_dict_list", "tests/test_course.py::TestCourse::test_conclude", "tests/test_course.py::TestCourse::test_course_files", "tests/test_course.py::TestCourse::test_create_assignment", "tests/test_course.py::TestCourse::test_create_assignment_fail", "tests/test_course.py::TestCourse::test_create_assignment_group", "tests/test_course.py::TestCourse::test_create_course_section", "tests/test_course.py::TestCourse::test_create_discussion_topic", "tests/test_course.py::TestCourse::test_create_external_feed", "tests/test_course.py::TestCourse::test_create_external_tool", "tests/test_course.py::TestCourse::test_create_folder", "tests/test_course.py::TestCourse::test_create_group_category", "tests/test_course.py::TestCourse::test_create_module", "tests/test_course.py::TestCourse::test_create_module_fail", "tests/test_course.py::TestCourse::test_create_page", "tests/test_course.py::TestCourse::test_create_page_fail", "tests/test_course.py::TestCourse::test_create_quiz", "tests/test_course.py::TestCourse::test_create_quiz_fail", "tests/test_course.py::TestCourse::test_delete", "tests/test_course.py::TestCourse::test_delete_external_feed", "tests/test_course.py::TestCourse::test_edit_front_page", "tests/test_course.py::TestCourse::test_enroll_user", "tests/test_course.py::TestCourse::test_get_assignment", "tests/test_course.py::TestCourse::test_get_assignment_group", "tests/test_course.py::TestCourse::test_get_assignments", "tests/test_course.py::TestCourse::test_get_course_level_assignment_data", "tests/test_course.py::TestCourse::test_get_course_level_participation_data", "tests/test_course.py::TestCourse::test_get_course_level_student_summary_data", "tests/test_course.py::TestCourse::test_get_discussion_topic", "tests/test_course.py::TestCourse::test_get_discussion_topics", "tests/test_course.py::TestCourse::test_get_enrollments", "tests/test_course.py::TestCourse::test_get_external_tool", "tests/test_course.py::TestCourse::test_get_external_tools", "tests/test_course.py::TestCourse::test_get_file", "tests/test_course.py::TestCourse::test_get_folder", "tests/test_course.py::TestCourse::test_get_full_discussion_topic", "tests/test_course.py::TestCourse::test_get_grading_standards", "tests/test_course.py::TestCourse::test_get_module", "tests/test_course.py::TestCourse::test_get_modules", "tests/test_course.py::TestCourse::test_get_outcome_group", "tests/test_course.py::TestCourse::test_get_outcome_groups_in_context", "tests/test_course.py::TestCourse::test_get_outcome_links_in_context", "tests/test_course.py::TestCourse::test_get_outcome_result_rollups", "tests/test_course.py::TestCourse::test_get_outcome_results", "tests/test_course.py::TestCourse::test_get_page", "tests/test_course.py::TestCourse::test_get_pages", "tests/test_course.py::TestCourse::test_get_quiz", "tests/test_course.py::TestCourse::test_get_quiz_fail", "tests/test_course.py::TestCourse::test_get_quizzes", "tests/test_course.py::TestCourse::test_get_recent_students", "tests/test_course.py::TestCourse::test_get_root_outcome_group", "tests/test_course.py::TestCourse::test_get_rubric", "tests/test_course.py::TestCourse::test_get_section", "tests/test_course.py::TestCourse::test_get_settings", "tests/test_course.py::TestCourse::test_get_single_grading_standard", "tests/test_course.py::TestCourse::test_get_submission", "tests/test_course.py::TestCourse::test_get_user", "tests/test_course.py::TestCourse::test_get_user_id_type", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_assignment_data", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_messaging_data", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_participation_data", "tests/test_course.py::TestCourse::test_get_users", "tests/test_course.py::TestCourse::test_list_assignment_groups", "tests/test_course.py::TestCourse::test_list_external_feeds", "tests/test_course.py::TestCourse::test_list_folders", "tests/test_course.py::TestCourse::test_list_gradeable_students", "tests/test_course.py::TestCourse::test_list_group_categories", "tests/test_course.py::TestCourse::test_list_groups", "tests/test_course.py::TestCourse::test_list_multiple_submissions", "tests/test_course.py::TestCourse::test_list_multiple_submissions_grouped_param", "tests/test_course.py::TestCourse::test_list_rubrics", "tests/test_course.py::TestCourse::test_list_submissions", "tests/test_course.py::TestCourse::test_list_tabs", "tests/test_course.py::TestCourse::test_mark_submission_as_read", "tests/test_course.py::TestCourse::test_mark_submission_as_unread", "tests/test_course.py::TestCourse::test_preview_html", "tests/test_course.py::TestCourse::test_reorder_pinned_topics", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_comma_separated_string", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_invalid_input", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_tuple", "tests/test_course.py::TestCourse::test_reset", "tests/test_course.py::TestCourse::test_show_front_page", "tests/test_course.py::TestCourse::test_subit_assignment_fail", "tests/test_course.py::TestCourse::test_submit_assignment", "tests/test_course.py::TestCourse::test_update", "tests/test_course.py::TestCourse::test_update_settings", "tests/test_course.py::TestCourse::test_update_submission", "tests/test_course.py::TestCourse::test_update_tab", "tests/test_course.py::TestCourse::test_upload", "tests/test_course.py::TestCourseNickname::test__str__", "tests/test_course.py::TestCourseNickname::test_remove" ]
[]
MIT License
2,191
[ "canvasapi/course.py" ]
[ "canvasapi/course.py" ]
jnothman__UpSetPlot-11
b8c0c99ee68c07ac0902d0f660570e33a55907b1
2018-02-21 22:29:15
b8c0c99ee68c07ac0902d0f660570e33a55907b1
diff --git a/upsetplot/plotting.py b/upsetplot/plotting.py index 3645595..6578ac5 100644 --- a/upsetplot/plotting.py +++ b/upsetplot/plotting.py @@ -1,10 +1,10 @@ -import warnings import itertools import numpy as np import pandas as pd import matplotlib from matplotlib import pyplot as plt +from matplotlib.tight_layout import get_renderer def _process_data(data, order, order_categories): @@ -119,11 +119,7 @@ class UpSetPlot: def _calculate_text_ncols(self, fig): if fig is None: fig = plt.gcf() - try: - r = fig.canvas.get_renderer() - except Exception: - warnings.warn('Could not get current renderer') - r = None + r = get_renderer(fig) t = fig.text(0, 0, '\n'.join(self.totals.index.values)) textw = t.get_window_extent(renderer=r).width figw = fig.get_window_extent(renderer=r).width @@ -141,8 +137,10 @@ class UpSetPlot: n_categories = data.index.nlevels # alternating row shading (XXX: use add_patch(Rectangle)?) - ax.barh(np.arange(0, n_categories, 2), len(data) + 1, left=-1, - color='#f5f5f5', zorder=0) + alternating = np.arange(0, n_categories, 2) + ax.barh(alternating, np.full(len(alternating), len(data) + 1), + left=-1, color='#f5f5f5', zorder=0, linewidth=0, + align='center') idx = np.flatnonzero(data.index.to_frame()[data.index.names].values) c = np.array(['lightgrey'] * len(data) * n_categories, dtype='O') @@ -150,7 +148,7 @@ class UpSetPlot: x = np.repeat(np.arange(len(data)), n_categories) y = np.tile(np.arange(n_categories), len(data)) # TODO: make s relative to colw - ax.scatter(x, y, c=c, s=200) + ax.scatter(x, y, c=c.tolist(), linewidth=0, s=200) if self._with_lines: line_data = (pd.Series(y[idx], index=x[idx]) @@ -170,7 +168,7 @@ class UpSetPlot: """Plot bars indicating intersection size """ ax.bar(np.arange(len(self.intersections)), self.intersections, - width=.5, color=self._forecolor, zorder=10) + width=.5, color=self._forecolor, zorder=10, align='center') ax.xaxis.set_visible(False) for x in ['top', 'bottom', 'right']: ax.spines[x].set_visible(False) @@ -182,12 +180,13 @@ class UpSetPlot: """Plot bars indicating total set size """ ax.barh(np.arange(len(self.totals.index.values)), self.totals, - height=.5, color=self._forecolor) + height=.5, color=self._forecolor, align='center') max_total = self.totals.max() ax.set_xlim(max_total, 0) for x in ['top', 'left', 'right']: ax.spines[x].set_visible(False) ax.yaxis.set_visible(False) + ax.xaxis.grid(True) ax.ticklabel_format(axis='x') def plot(self, fig=None):
Smoke test plotting Make sure plots can run in all environments.
jnothman/UpSetPlot
diff --git a/upsetplot/tests/test_upsetplot.py b/upsetplot/tests/test_upsetplot.py index 85d1b94..94edca7 100644 --- a/upsetplot/tests/test_upsetplot.py +++ b/upsetplot/tests/test_upsetplot.py @@ -1,9 +1,13 @@ +import io + import pytest from pandas.util.testing import assert_series_equal import numpy as np +import matplotlib.figure +from upsetplot import plot +from upsetplot import generate_data from upsetplot.plotting import _process_data -from upsetplot.data import generate_data def is_ascending(seq): @@ -53,3 +57,10 @@ def test_not_aggregated(order, order_categories): assert_series_equal(intersections1, intersections2, check_dtype=False) assert_series_equal(totals1, totals2, check_dtype=False) + + [email protected]('kw', [{}]) +def test_plot_smoke_test(kw): + fig = matplotlib.figure.Figure() + plot(generate_data(n_samples=100), fig) + fig.savefig(io.BytesIO(), format='png')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "flake8", "python-coveralls", "numpydoc", "sphinx-gallery", "sphinx-issues" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "test_requirements.txt", "doc/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cycler==0.11.0 docutils==0.17.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 mccabe==0.7.0 numpy==1.19.5 numpydoc==1.1.0 packaging==21.3 pandas==1.1.5 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 python-coveralls==2.9.3 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 Sphinx==4.3.2 sphinx-gallery==0.10.0 sphinx-issues==3.0.1 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 -e git+https://github.com/jnothman/UpSetPlot.git@b8c0c99ee68c07ac0902d0f660570e33a55907b1#egg=upsetplot urllib3==1.26.20 zipp==3.6.0
name: UpSetPlot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cycler==0.11.0 - docutils==0.17.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - mccabe==0.7.0 - numpy==1.19.5 - numpydoc==1.1.0 - packaging==21.3 - pandas==1.1.5 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - python-coveralls==2.9.3 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-gallery==0.10.0 - sphinx-issues==3.0.1 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/UpSetPlot
[ "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw0]" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[False-degree-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[True-degree-X1]" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[False-size-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[False-size-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[False-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[True-size-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[True-size-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[True-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[False-size]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[False-degree]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[True-size]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[True-degree]" ]
[]
New BSD License
2,192
[ "upsetplot/plotting.py" ]
[ "upsetplot/plotting.py" ]
pytorch__ignite-88
84a635b10433d79d3889d18ca37521d9a4971c21
2018-02-22 00:10:48
84a635b10433d79d3889d18ca37521d9a4971c21
diff --git a/ignite/evaluator.py b/ignite/evaluator.py index 345acce0..249fe72d 100644 --- a/ignite/evaluator.py +++ b/ignite/evaluator.py @@ -12,7 +12,7 @@ class Evaluator(Engine): super(Evaluator, self).add_event_handler(event_name, handler, *args, **kwargs) def run(self, data): - state = State(dataloader=data) + state = State(dataloader=data, metrics={}) self._fire_event(Events.STARTED, state) hours, mins, secs = self._run_once_on_dataset(state) self._logger.info("Evaluation Complete. Time taken: %02d:%02d:%02d", hours, mins, secs) @@ -20,16 +20,17 @@ class Evaluator(Engine): return state -def create_supervised_evaluator(model, cuda=False): +def create_supervised_evaluator(model, metrics={}, cuda=False): """ Factory function for creating an evaluator for supervised models Args: model (torch.nn.Module): the model to train + metrics (dict of str: Metric): a map of metric names to Metrics cuda (bool, optional): whether or not to transfer batch to GPU (default: False) Returns: - Trainer: a trainer instance with supervised inference function + Evaluator: a evaluator instance with supervised inference function """ def _prepare_batch(batch): x, y = batch @@ -43,4 +44,9 @@ def create_supervised_evaluator(model, cuda=False): y_pred = model(x) return y_pred.data.cpu(), y.data.cpu() - return Evaluator(_inference) + evaluator = Evaluator(_inference) + + for name, metric in metrics.items(): + metric.attach(evaluator, name) + + return evaluator diff --git a/ignite/exceptions.py b/ignite/exceptions.py new file mode 100644 index 00000000..c8c0a94d --- /dev/null +++ b/ignite/exceptions.py @@ -0,0 +1,4 @@ +class NotComputableError(RuntimeError): + """ + Exception class to raise if Metric cannot be computed. + """ diff --git a/ignite/handlers/__init__.py b/ignite/handlers/__init__.py index 8b142dd3..ee3f463c 100644 --- a/ignite/handlers/__init__.py +++ b/ignite/handlers/__init__.py @@ -1,1 +1,2 @@ +from .checkpoint import ModelCheckpoint from .timing import Timer diff --git a/ignite/handlers/checkpoint.py b/ignite/handlers/checkpoint.py new file mode 100644 index 00000000..2b2b8ba7 --- /dev/null +++ b/ignite/handlers/checkpoint.py @@ -0,0 +1,157 @@ +import os +import tempfile + +import torch + + +class ModelCheckpoint(object): + """ ModelCheckpoint handler can be used to periodically save objects to disk. + + This handler accepts three arguments: + - an `ignite.engine.Engine` object + - an `ignite.engine.State` object, which will be passed to the + `score_function` (if it is provided) + - a `dict` mapping names (`str`) to objects that should be saved to disk. + See Notes and Examples for further details. + + Args: + dirname (str): + Directory path where objects will be saved + filename_prefix (str): + Prefix for the filenames to which objects will be saved. See Notes + for more details. + save_interval (int, optional): + if not None, objects will be saved to disk every `save_interval` calls to the handler. + Exactly one of (`save_interval`, `score_function`) arguments must be provided. + score_function (Callable, optional): + if not None, it should be a function taking a 1single argument, + an `ignite.engine.State` object, + and return a score (`float`). Objects with highest scores will be retained. + Exactly one of (`save_interval`, `score_function`) arguments must be provided. + n_saved (int, optional): + Number of objects that should be kept on disk. Older files will be removed. + atomic (bool, optional): + If True, objects are serialized to a temporary file, + and then moved to final destination, so that files are + guaranteed to not be damaged (for example if exception occures during saving). + require_empty (bool, optional): + If True, will raise exception if there are any files starting with `filename_prefix` + in the directory 'dirname' + create_dir (bool, optional): + If True, will create directory 'dirname' if it doesnt exist. + exist_ok (bool, optional): + Passed to 'os.makedirs' call. Ignored if 'create_dir' is False. + + Notes: + This handler expects three arguments: an `Engine` object, + a `State` object, and a `dict` mapping names to objects that should + be saved. + These names are used to specify filenames for saved objects. + Each filename has the following structure: + `{filename_prefix}_{name}_{step_number}.pth`. + Here, `filename_prefix` is the argument passed to the constructor, + `name` is the key in the aforementioned `dict`, and `step_number` + is incremented by `1` with every call to the handler. + + Examples: + >>> import os + >>> from ignite.engine import Events + >>> from ignite.handlers import ModelCheckpoint + >>> from ignite.trainer import Trainer + >>> from torch import nn + >>> trainer = Trainer(lambda batch: None) + >>> handler = ModelCheckpoint('/tmp/models', 'myprefix', save_interval=2, n_saved=2, create_dir=True) + >>> model = nn.Linear(3, 3) + >>> trainer.add_event_handler(Events.EPOCH_COMPLETED, handler, {'mymodel': model}) + >>> trainer.run([0], max_epochs=6) + >>> os.listdir('/tmp/models') + ['myprefix_mymodel_4.pth', 'myprefix_mymodel_6.pth'] + """ + + def __init__(self, dirname, filename_prefix, + save_interval=None, score_function=None, + n_saved=1, + atomic=True, require_empty=True, + create_dir=True, exist_ok=False): + + self._dirname = dirname + self._fname_prefix = filename_prefix + self._n_saved = n_saved + self._save_interval = save_interval + self._score_function = score_function + self._atomic = atomic + self._saved = [] # list of tuples (priority, saved_objects) + self._iteration = 0 + + if not (save_interval is None) ^ (score_function is None): + raise ValueError("Exactly one of `save_interval`, or `score_function` " + "arguments must be provided.") + + if create_dir: + exists = os.path.exists(dirname) + if exists and not exist_ok: + raise OSError("Directory {} already exists. Pass exist_ok=True to ignore this error.") + elif not exists: + os.makedirs(dirname) + + if require_empty: + matched = [fname + for fname in os.listdir(dirname) + if fname.startswith(self._fname_prefix)] + + if len(matched) > 0: + raise ValueError("Files prefixed with {} are already present " + "in the directory {}. If you want to use this " + "directory anyway, pass `require_empty=False`. " + "".format(filename_prefix, dirname)) + + def _save(self, obj, path): + if not self._atomic: + torch.save(obj, path) + + else: + tmp = tempfile.NamedTemporaryFile(delete=False) + + try: + torch.save(obj, tmp.file) + + except BaseException: + tmp.close() + os.remove(tmp.name) + raise + + else: + tmp.close() + os.rename(tmp.name, path) + + def __call__(self, engine, state, to_save): + if len(to_save) == 0: + raise RuntimeError("No objects to checkpoint found.") + + self._iteration += 1 + + if self._score_function is not None: + priority = self._score_function(state) + + else: + priority = self._iteration + if (self._iteration % self._save_interval) != 0: + return + + if (len(self._saved) < self._n_saved) or (self._saved[0][0] < priority): + saved_objs = [] + + for name, obj in to_save.items(): + fname = '{}_{}_{}.pth'.format(self._fname_prefix, name, self._iteration) + path = os.path.join(self._dirname, fname) + + self._save(obj=obj, path=path) + saved_objs.append(path) + + self._saved.append((priority, saved_objs)) + self._saved.sort(key=lambda item: item[0]) + + if len(self._saved) > self._n_saved: + _, paths = self._saved.pop(0) + for p in paths: + os.remove(p) diff --git a/ignite/metrics/__init__.py b/ignite/metrics/__init__.py new file mode 100644 index 00000000..3902c5de --- /dev/null +++ b/ignite/metrics/__init__.py @@ -0,0 +1,3 @@ +from .categorical_accuracy import CategoricalAccuracy +from .mean_squared_error import MeanSquaredError +from .metric import Metric diff --git a/ignite/metrics/categorical_accuracy.py b/ignite/metrics/categorical_accuracy.py new file mode 100644 index 00000000..a08c0672 --- /dev/null +++ b/ignite/metrics/categorical_accuracy.py @@ -0,0 +1,29 @@ +from __future__ import division + +import torch + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class CategoricalAccuracy(Metric): + """ + Calculates the categorical accuracy. + + `update` must receive output of the form (y_pred, y). + """ + def reset(self): + self._num_correct = 0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + indices = torch.max(y_pred, 1)[1] + correct = torch.eq(indices, y) + self._num_correct += torch.sum(correct) + self._num_examples += correct.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('CategoricalAccuracy must have at least one example before it can be computed') + return self._num_correct / self._num_examples diff --git a/ignite/metrics/mean_squared_error.py b/ignite/metrics/mean_squared_error.py new file mode 100644 index 00000000..98f9533c --- /dev/null +++ b/ignite/metrics/mean_squared_error.py @@ -0,0 +1,28 @@ +from __future__ import division + +import torch + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class MeanSquaredError(Metric): + """ + Calculates the mean squared error. + + `update` must receive output of the form (y_pred, y). + """ + def reset(self): + self._sum_of_squared_errors = 0.0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + squared_errors = torch.pow(y_pred - y.view_as(y_pred), 2) + self._sum_of_squared_errors += torch.sum(squared_errors) + self._num_examples += y.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('MeanSquaredError must have at least one example before it can be computed') + return self._sum_of_squared_errors / self._num_examples diff --git a/ignite/metrics/metric.py b/ignite/metrics/metric.py new file mode 100644 index 00000000..d4ea0da4 --- /dev/null +++ b/ignite/metrics/metric.py @@ -0,0 +1,66 @@ +from abc import ABCMeta, abstractmethod + +from ignite.engine import Events + + +class Metric(object): + __metaclass__ = ABCMeta + + """ + Base class for all Metrics. + + Metrics provide a way to compute various quantities of interest in an online + fashion without having to store the entire output history of a model. + """ + def __init__(self): + self.reset() + + @abstractmethod + def reset(self): + """ + Resets the metric to to it's initial state. + + This is called at the start of each evaluation run. + """ + pass + + @abstractmethod + def update(self, output): + """ + Updates the metric's state using the passed batch output. + + This is called once for each batch of each evaluation run. + + Args: + output: the is the output from the evaluator's process function + """ + pass + + @abstractmethod + def compute(self): + """ + Computes the metric based on it's accumulated state. + + This is called at the end of each evaluation run. + + Returns: + Any: the actual quantity of interest + + Raises: + NotComputableError: raised when the metric cannot be computed + """ + pass + + def started(self, engine, state): + self.reset() + + def iteration_completed(self, engine, state): + self.update(state.output) + + def completed(self, engine, state, name): + state.metrics[name] = self.compute() + + def attach(self, engine, name): + engine.add_event_handler(Events.STARTED, self.started) + engine.add_event_handler(Events.ITERATION_COMPLETED, self.iteration_completed) + engine.add_event_handler(Events.COMPLETED, self.completed, name)
Evaluation/Metrics approach This has been discussed a bit in other issues, but I wanted to make a dedicated issue for us to discuss this as I think it's very important we get this right. ## Background Some previous discussions [here](https://github.com/pytorch/ignite/pull/54#issuecomment-363231705) and [here](https://github.com/pytorch/ignite/issues/72). Throughout, I am going to use the motivating example of training a supervised model where you periodically want to compute some metrics against a validation set. ## Current Setup Currently, in order to accomplish this, you need to do the following: 1. create an `Evaluator` 2. register the `Evaluate` handler to run the `Evaluator` on the validation set and store the predictions in the history 3. add another event handler to actually use this history to compute the metrics you care about 4. log/plot these metrics however you choose In code, this looks something like this: ```python model = ... validation_loader = ... trainer = ... evaluator = create_supervised_evaluator(model, cuda=True) trainer.add_event_handler(Events.EPOCH_COMPLETED, Evaluate(evaluator, validation_loader, epoch_interval=1)) @trainer.on(Events.EPOCH_COMPLETED) def log(engine): print(engine.current_epoch, categorical_accuracy(evaluator.history)) ``` ### Pros - keeps library code cleanly separated with minimal implicit dependencies - user doesn't have to write much code ### Cons - can be confusing what happens where - we have both an `Evaluator` and an `Evaluate` and yet neither one computes any sort of metrics - there are a lot of ways this could go wrong - you have to understand the contract between what gets stored in the `Evaluator`'s history and the metrics functions - you have to make sure you attach the `Evaluate` handler and any logging handlers to the same event ## Goals Evaluating a model is gonna be something that (essentially) everyone does so I think we need to have a good story here. Imo, we should make the supervised case be super easy while still making it possible for non-supervised cases. That being said, I think we want to accomplish this without removing flexibility and without adding a ton of code. ## Ideas Working backward from what I would like the api to be, it might be nice if you could just do something like this: ```python model = ... validation_loader = ... trainer = ... @trainer.on(Events.EPOCH_COMPLETED) def evaluate(engine): results = evaluate(model, {'acc': categorical_accuracy}) # do something with those results ``` It'd be even nicer if I could do something like this: ```python model = ... validation_loader = ... trainer = ... trainer.add_event_handler(Events.EPOCH_COMPLETED, Evaluate(model, {'acc': categorical_accuracy})) ``` But without making assumptions about how users want to plot/log their evaluation results, this isn't possible. What do you all think? Anything here you take issue with? Any ideas on how we can best accomplish this? Do we need to make plotting/logging part of this discussion as well?
pytorch/ignite
diff --git a/tests/ignite/handlers/test_checkpoint.py b/tests/ignite/handlers/test_checkpoint.py new file mode 100644 index 00000000..35a14b0b --- /dev/null +++ b/tests/ignite/handlers/test_checkpoint.py @@ -0,0 +1,128 @@ +import os +import tempfile + +import pytest +import torch +import shutil + +from ignite.engine import Events +from ignite.handlers import ModelCheckpoint +from ignite.trainer import Trainer + +_PREFIX = 'PREFIX' + + [email protected] +def dirname(): + path = tempfile.mkdtemp() + yield path + shutil.rmtree(path) + + +def test_args_validation(dirname): + existing = os.path.join(dirname, 'existing_dir') + nonempty = os.path.join(dirname, 'nonempty') + + os.makedirs(existing) + os.makedirs(nonempty) + + with open(os.path.join(nonempty, '{}_name_0.pth'.format(_PREFIX)), 'w'): + pass + + # save_interval & score_func + with pytest.raises(ValueError): + h = ModelCheckpoint(existing, _PREFIX, + create_dir=False) + + with pytest.raises(OSError): + h = ModelCheckpoint(existing, _PREFIX, create_dir=True, + save_interval=42) + + with pytest.raises(ValueError): + h = ModelCheckpoint(nonempty, _PREFIX, exist_ok=True, + save_interval=42) + + +def test_simple_recovery(dirname): + h = ModelCheckpoint(dirname, _PREFIX, create_dir=False, save_interval=1) + h(None, None, {'obj': 42}) + + fname = os.path.join(dirname, '{}_{}_{}.pth'.format(_PREFIX, 'obj', 1)) + assert torch.load(fname) == 42 + + +def test_atomic(dirname): + serializable = 42 + non_serializable = (42, lambda _: 42) + + def _test_existance(atomic, name, obj, expected): + h = ModelCheckpoint(dirname, _PREFIX, + atomic=atomic, + create_dir=False, + require_empty=False, + save_interval=1) + + try: + h(None, None, {name: obj}) + except: + pass + + fname = os.path.join(dirname, '{}_{}_{}.pth'.format(_PREFIX, name, 1)) + assert os.path.exists(fname) == expected + + _test_existance(atomic=False, name='nonatomic_OK', obj=serializable, expected=True) + _test_existance(atomic=False, name='nonatomic_FAIL', obj=non_serializable, expected=True) + + _test_existance(atomic=True, name='atomic_OK', obj=serializable, expected=True) + _test_existance(atomic=True, name='atomic_FAIL', obj=non_serializable, expected=False) + + +def test_last_k(dirname): + h = ModelCheckpoint(dirname, _PREFIX, create_dir=False, n_saved=2, save_interval=2) + to_save = {'name': 42} + + for _ in range(8): + h(None, None, to_save) + + expected = ['{}_{}_{}.pth'.format(_PREFIX, 'name', i) + for i in [6, 8]] + + assert sorted(os.listdir(dirname)) == expected + + +def test_best_k(dirname): + scores = iter([1.0, -2., 3.0, -4.0]) + + def score_function(state): + return next(scores) + + h = ModelCheckpoint(dirname, _PREFIX, create_dir=False, + n_saved=2, score_function=score_function) + + to_save = {'name': 42} + for _ in range(4): + h(None, None, to_save) + + expected = ['{}_{}_{}.pth'.format(_PREFIX, 'name', i) + for i in [1, 3]] + + assert sorted(os.listdir(dirname)) == expected + + +def test_with_trainer(dirname): + + def update_fn(batch): + pass + + name = 'model' + trainer = Trainer(update_fn) + handler = ModelCheckpoint(dirname, _PREFIX, create_dir=False, + n_saved=2, save_interval=1) + + trainer.add_event_handler(Events.EPOCH_COMPLETED, handler, {name: 42}) + trainer.run([0], max_epochs=4) + + expected = ['{}_{}_{}.pth'.format(_PREFIX, name, i) + for i in [3, 4]] + + assert sorted(os.listdir(dirname)) == expected diff --git a/tests/ignite/metrics/test_categorical_accuracy.py b/tests/ignite/metrics/test_categorical_accuracy.py new file mode 100644 index 00000000..ebd00594 --- /dev/null +++ b/tests/ignite/metrics/test_categorical_accuracy.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import CategoricalAccuracy +import pytest +import torch + + +def test_zero_div(): + acc = CategoricalAccuracy() + with pytest.raises(NotComputableError): + acc.compute() + + +def test_compute(): + acc = CategoricalAccuracy() + + y_pred = torch.eye(4) + y = torch.ones(4).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 0.25 + + acc.reset() + y_pred = torch.eye(2) + y = torch.ones(2).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 0.5 diff --git a/tests/ignite/metrics/test_mean_squared_error.py b/tests/ignite/metrics/test_mean_squared_error.py new file mode 100644 index 00000000..3be6845f --- /dev/null +++ b/tests/ignite/metrics/test_mean_squared_error.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import MeanSquaredError +import pytest +import torch + + +def test_zero_div(): + mse = MeanSquaredError() + with pytest.raises(NotComputableError): + mse.compute() + + +def test_compute(): + mse = MeanSquaredError() + + y_pred = torch.Tensor([[2.0], [-2.0]]) + y = torch.zeros(2) + mse.update((y_pred, y)) + assert mse.compute() == 4.0 + + mse.reset() + y_pred = torch.Tensor([[3.0], [-3.0]]) + y = torch.zeros(2) + mse.update((y_pred, y)) + assert mse.compute() == 9.0 diff --git a/tests/ignite/test_evaluator.py b/tests/ignite/test_evaluator.py index ee455cb0..cf4f6172 100644 --- a/tests/ignite/test_evaluator.py +++ b/tests/ignite/test_evaluator.py @@ -5,6 +5,7 @@ from torch.nn import Linear from ignite.engine import Events, State from ignite.evaluator import Evaluator, create_supervised_evaluator +from ignite.metrics import MeanSquaredError def test_returns_state(): @@ -121,3 +122,18 @@ def test_create_supervised(): assert model.weight.data[0, 0] == approx(0.0) assert model.bias.data[0] == approx(0.0) + + +def test_create_supervised_with_metrics(): + model = Linear(1, 1) + model.weight.data.zero_() + model.bias.data.zero_() + + evaluator = create_supervised_evaluator(model, metrics={'mse': MeanSquaredError()}) + + x = torch.FloatTensor([[1.0], [2.0]]) + y = torch.FloatTensor([[3.0], [4.0]]) + data = [(x, y)] + + state = evaluator.run(data) + assert state.metrics['mse'] == 12.5
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "numpy", "mock", "pytest", "codecov", "pytest-cov", "tqdm", "scikit-learn", "visdom", "torchvision", "tensorboardX", "gym" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 cloudpickle==2.2.1 codecov==2.1.13 coverage==6.2 dataclasses==0.8 decorator==4.4.2 enum34==1.1.10 gym==0.26.2 gym-notices==0.0.8 idna==3.10 -e git+https://github.com/pytorch/ignite.git@84a635b10433d79d3889d18ca37521d9a4971c21#egg=ignite importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work joblib==1.1.1 jsonpatch==1.32 jsonpointer==2.3 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work networkx==2.5.1 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work Pillow==8.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work protobuf==4.21.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 requests==2.27.1 scikit-learn==0.24.2 scipy==1.5.4 six==1.17.0 tensorboardX==2.6.2.2 threadpoolctl==3.1.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 torch==1.10.1 torchvision==0.11.2 tornado==6.1 tqdm==4.64.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 visdom==0.2.4 websocket-client==1.3.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ignite channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - cloudpickle==2.2.1 - codecov==2.1.13 - coverage==6.2 - dataclasses==0.8 - decorator==4.4.2 - enum34==1.1.10 - gym==0.26.2 - gym-notices==0.0.8 - idna==3.10 - importlib-resources==5.4.0 - joblib==1.1.1 - jsonpatch==1.32 - jsonpointer==2.3 - mock==5.2.0 - networkx==2.5.1 - numpy==1.19.5 - pillow==8.4.0 - protobuf==4.21.0 - pytest-cov==4.0.0 - requests==2.27.1 - scikit-learn==0.24.2 - scipy==1.5.4 - six==1.17.0 - tensorboardx==2.6.2.2 - threadpoolctl==3.1.0 - tomli==1.2.3 - torch==1.10.1 - torchvision==0.11.2 - tornado==6.1 - tqdm==4.64.1 - urllib3==1.26.20 - visdom==0.2.4 - websocket-client==1.3.1 prefix: /opt/conda/envs/ignite
[ "tests/ignite/handlers/test_checkpoint.py::test_args_validation", "tests/ignite/handlers/test_checkpoint.py::test_simple_recovery", "tests/ignite/handlers/test_checkpoint.py::test_atomic", "tests/ignite/handlers/test_checkpoint.py::test_last_k", "tests/ignite/handlers/test_checkpoint.py::test_best_k", "tests/ignite/handlers/test_checkpoint.py::test_with_trainer", "tests/ignite/metrics/test_categorical_accuracy.py::test_zero_div", "tests/ignite/metrics/test_categorical_accuracy.py::test_compute", "tests/ignite/metrics/test_mean_squared_error.py::test_zero_div", "tests/ignite/metrics/test_mean_squared_error.py::test_compute", "tests/ignite/test_evaluator.py::test_returns_state", "tests/ignite/test_evaluator.py::test_state_attributes", "tests/ignite/test_evaluator.py::test_current_validation_iteration_counter_increases_every_iteration", "tests/ignite/test_evaluator.py::test_evaluation_iteration_events_are_fired", "tests/ignite/test_evaluator.py::test_terminate_stops_evaluator_when_called_during_iteration", "tests/ignite/test_evaluator.py::test_create_supervised", "tests/ignite/test_evaluator.py::test_create_supervised_with_metrics" ]
[]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,193
[ "ignite/exceptions.py", "ignite/handlers/checkpoint.py", "ignite/evaluator.py", "ignite/handlers/__init__.py", "ignite/metrics/__init__.py", "ignite/metrics/metric.py", "ignite/metrics/categorical_accuracy.py", "ignite/metrics/mean_squared_error.py" ]
[ "ignite/exceptions.py", "ignite/handlers/checkpoint.py", "ignite/evaluator.py", "ignite/handlers/__init__.py", "ignite/metrics/__init__.py", "ignite/metrics/metric.py", "ignite/metrics/categorical_accuracy.py", "ignite/metrics/mean_squared_error.py" ]
jnothman__UpSetPlot-29
a272986182e3f1de64ab7d20fd44c59f8b693017
2018-02-22 10:36:28
a272986182e3f1de64ab7d20fd44c59f8b693017
diff --git a/upsetplot/plotting.py b/upsetplot/plotting.py index 89c8506..5203bf3 100644 --- a/upsetplot/plotting.py +++ b/upsetplot/plotting.py @@ -79,26 +79,29 @@ class UpSet: with_lines : bool Whether to show lines joining dots in the matrix, to mark multiple sets being intersected. - intersections_plot_size : float + element_size : float or None + Side length in pt. If None, size is estimated to fit figure + intersections_plot_elements : int The intersections plot should be large enough to fit this many matrix - dots. - totals_plot_size : float + elements. + totals_plot_elements : int The totals plot should be large enough to fit this many matrix dots. """ def __init__(self, data, vert=True, sort_by='degree', sort_sets_by='cardinality', forecolor='black', - with_lines=True, intersection_plot_size=6, - totals_plot_size=2): + with_lines=True, element_size=32, + intersection_plot_elements=6, totals_plot_elements=2): self._vert = vert if not vert: raise NotImplementedError() self._forecolor = forecolor self._with_lines = with_lines - self._totals_plot_size = totals_plot_size - self._intersection_plot_size = intersection_plot_size + self._element_size = element_size + self._totals_plot_elements = totals_plot_elements + self._intersection_plot_elements = intersection_plot_elements (self.intersections, self.totals) = _process_data(data, @@ -111,29 +114,42 @@ class UpSet: n_cats = len(self.totals) n_inters = len(self.intersections) - text_space = self._calculate_text_ncols(fig) - GS = matplotlib.gridspec.GridSpec - gridspec = GS(n_cats + self._intersection_plot_size, - n_inters + text_space + self._totals_plot_size, - hspace=1) - return {'intersections': gridspec[:-n_cats, -n_inters:], - 'matrix': gridspec[-n_cats:, -n_inters:], - 'totals': gridspec[-n_cats:, :self._totals_plot_size], - 'gs': gridspec} - - def _calculate_text_ncols(self, fig): if fig is None: fig = plt.gcf() + + # Determine text size to determine figure size / spacing r = get_renderer(fig) t = fig.text(0, 0, '\n'.join(self.totals.index.values)) textw = t.get_window_extent(renderer=r).width - figw = fig.get_window_extent(renderer=r).width - MAGIC_MARGIN = 20 # FIXME - colw = (figw - textw - MAGIC_MARGIN) / (len(self.intersections) + - self._totals_plot_size) t.remove() - return int(np.ceil(figw / colw - (len(self.intersections) + - self._totals_plot_size))) + + MAGIC_MARGIN = 10 # FIXME + figw = fig.get_window_extent(renderer=r).width + if self._element_size is None: + colw = (figw - textw - MAGIC_MARGIN) / (len(self.intersections) + + self._totals_plot_elements) + else: + render_ratio = figw / fig.get_figwidth() + colw = self._element_size / 72 * render_ratio + figw = (colw * (len(self.intersections) + + self._totals_plot_elements) + + MAGIC_MARGIN + textw) + fig.set_figwidth(figw / render_ratio) + fig.set_figheight((colw * (n_cats + + self._intersection_plot_elements)) / + render_ratio) + + text_nelems = int(np.ceil(figw / colw - (len(self.intersections) + + self._totals_plot_elements))) + + GS = matplotlib.gridspec.GridSpec + gridspec = GS(n_cats + self._intersection_plot_elements, + n_inters + text_nelems + self._totals_plot_elements, + hspace=1) + return {'intersections': gridspec[:-n_cats, -n_inters:], + 'matrix': gridspec[-n_cats:, -n_inters:], + 'totals': gridspec[-n_cats:, :self._totals_plot_elements], + 'gs': gridspec} def plot_matrix(self, ax): """Plot the matrix of intersection indicators onto ax @@ -152,8 +168,12 @@ class UpSet: c[idx] = self._forecolor x = np.repeat(np.arange(len(data)), n_sets) y = np.tile(np.arange(n_sets), len(data)) - # TODO: make s relative to colw - ax.scatter(x, y, c=c.tolist(), linewidth=0, s=200) + if self._element_size is not None: + s = (self._element_size * .35) ** 2 + else: + # TODO: make s relative to colw + s = 200 + ax.scatter(x, y, c=c.tolist(), linewidth=0, s=s) if self._with_lines: line_data = (pd.Series(y[idx], index=x[idx])
Allow automatic determination of figsize from dot radius in pts User should be able to specify a dot radius and the figure size is calculated from there...
jnothman/UpSetPlot
diff --git a/upsetplot/tests/test_upsetplot.py b/upsetplot/tests/test_upsetplot.py index f95cbb9..1dcfcab 100644 --- a/upsetplot/tests/test_upsetplot.py +++ b/upsetplot/tests/test_upsetplot.py @@ -72,7 +72,7 @@ def test_param_validation(kw): UpSet(X, **kw) [email protected]('kw', [{}]) [email protected]('kw', [{}, {'element_size': None}]) def test_plot_smoke_test(kw): fig = matplotlib.figure.Figure() X = generate_data(n_samples=100) @@ -81,6 +81,32 @@ def test_plot_smoke_test(kw): # Also check fig is optional n_nums = len(plt.get_fignums()) - plot(X) + plot(X, **kw) assert len(plt.get_fignums()) - n_nums == 1 assert plt.gcf().axes + + +def test_element_size(): + X = generate_data(n_samples=100) + figsizes = [] + for element_size in range(10, 50, 5): + fig = matplotlib.figure.Figure() + UpSet(X, element_size=element_size).make_grid(fig) + figsizes.append((fig.get_figwidth(), fig.get_figheight())) + + figwidths, figheights = zip(*figsizes) + # Absolute width increases + assert np.all(np.diff(figwidths) > 0) + aspect = np.divide(figwidths, figheights) + # Font size stays constant, so aspect ratio decreases + assert np.all(np.diff(aspect) < 0) + # But doesn't decrease by much + assert np.all(aspect[:-1] / aspect[1:] < 1.1) + + fig = matplotlib.figure.Figure() + figsize_before = fig.get_figwidth(), fig.get_figheight() + UpSet(X, element_size=None).make_grid(fig) + figsize_after = fig.get_figwidth(), fig.get_figheight() + assert figsize_before == figsize_after + + # TODO: make sure axes are all within figure
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "doc/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cycler==0.11.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 numpy==1.19.5 numpydoc==1.1.0 packaging==21.3 pandas==1.1.5 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-gallery==0.10.0 sphinx-issues==3.0.1 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 -e git+https://github.com/jnothman/UpSetPlot.git@a272986182e3f1de64ab7d20fd44c59f8b693017#egg=UpSetPlot urllib3==1.26.20 zipp==3.6.0
name: UpSetPlot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cycler==0.11.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - numpy==1.19.5 - numpydoc==1.1.0 - packaging==21.3 - pandas==1.1.5 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-gallery==0.10.0 - sphinx-issues==3.0.1 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/UpSetPlot
[ "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw1]", "upsetplot/tests/test_upsetplot.py::test_element_size" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[None-degree-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-degree-X1]" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[None-cardinality-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[None-cardinality-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[None-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-cardinality-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-cardinality-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[None-cardinality]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[None-degree]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[cardinality-cardinality]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[cardinality-degree]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw0]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw1]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw2]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw3]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw4]", "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw0]" ]
[]
New BSD License
2,194
[ "upsetplot/plotting.py" ]
[ "upsetplot/plotting.py" ]
jnothman__UpSetPlot-30
f6b066fe0e1a9b7ae06bc770b9a2ca69ce6468d8
2018-02-22 10:37:08
f6b066fe0e1a9b7ae06bc770b9a2ca69ce6468d8
diff --git a/examples/plot_vertical.py b/examples/plot_vertical.py new file mode 100644 index 0000000..6709bc3 --- /dev/null +++ b/examples/plot_vertical.py @@ -0,0 +1,14 @@ +""" +==================== +Vertical orientation +==================== + +This illustrates the effect of orientation='vertical'. +""" + +from matplotlib import pyplot as plt +from upsetplot import generate_data, plot + +example = generate_data(aggregated=True) +plot(example, orientation='vertical') +plt.show() diff --git a/upsetplot/plotting.py b/upsetplot/plotting.py index 5203bf3..1e635a9 100644 --- a/upsetplot/plotting.py +++ b/upsetplot/plotting.py @@ -52,6 +52,60 @@ def _process_data(data, sort_by, sort_sets_by): return data, totals +class _Transposed: + """Wrap an object in order to transpose some plotting operations + + Attributes of obj will be mapped. + Keyword arguments when calling obj will be mapped. + + The mapping is not recursive: callable attributes need to be _Transposed + again. + """ + + def __init__(self, obj): + self.__obj = obj + + def __getattr__(self, key): + return getattr(self.__obj, self._NAME_TRANSPOSE.get(key, key)) + + def __call__(self, *args, **kwargs): + return self.__obj(*args, **{self._NAME_TRANSPOSE.get(k, k): v + for k, v in kwargs.items()}) + + _NAME_TRANSPOSE = { + 'width': 'height', + 'height': 'width', + 'hspace': 'wspace', + 'wspace': 'hspace', + 'hlines': 'vlines', + 'vlines': 'hlines', + 'bar': 'barh', + 'barh': 'bar', + 'xaxis': 'yaxis', + 'yaxis': 'xaxis', + 'left': 'bottom', + 'right': 'top', + 'top': 'right', + 'bottom': 'left', + 'sharex': 'sharey', + 'sharey': 'sharex', + 'get_figwidth': 'get_figheight', + 'get_figheight': 'get_figwidth', + 'set_figwidth': 'set_figheight', + 'set_figheight': 'set_figwidth', + } + + +def _transpose(obj): + if isinstance(obj, str): + return _Transposed._NAME_TRANSPOSE.get(obj, obj) + return _Transposed(obj) + + +def _identity(obj): + return obj + + class UpSet: """Manage the data and drawing for a basic UpSet plot @@ -63,9 +117,8 @@ class UpSet: Values for each set to plot. Should have multi-index where each level is binary, corresponding to set membership. - vert : bool - If True, the primary plot (bar chart of intersections) will - be vertical. + orientation : {'horizontal' (default), 'vertical'} + If horizontal, intersections are listed from left to right. sort_by : {'cardinality', 'degree'} If 'cardinality', set intersections are listed from largest to smallest value. @@ -86,17 +139,16 @@ class UpSet: elements. totals_plot_elements : int The totals plot should be large enough to fit this many matrix - dots. + elements. """ - def __init__(self, data, vert=True, sort_by='degree', + def __init__(self, data, orientation='horizontal', sort_by='degree', sort_sets_by='cardinality', forecolor='black', with_lines=True, element_size=32, intersection_plot_elements=6, totals_plot_elements=2): - self._vert = vert - if not vert: - raise NotImplementedError() + self._horizontal = orientation == 'horizontal' + self._reorient = _identity if self._horizontal else _transpose self._forecolor = forecolor self._with_lines = with_lines self._element_size = element_size @@ -107,6 +159,13 @@ class UpSet: self.totals) = _process_data(data, sort_by=sort_by, sort_sets_by=sort_sets_by) + if not self._horizontal: + self.intersections = self.intersections[::-1] + + def _swapaxes(self, x, y): + if self._horizontal: + return x, y + return y, x def make_grid(self, fig=None): """Get a SubplotSpec for each Axes, accounting for label text width @@ -124,11 +183,12 @@ class UpSet: t.remove() MAGIC_MARGIN = 10 # FIXME - figw = fig.get_window_extent(renderer=r).width + figw = self._reorient(fig.get_window_extent(renderer=r)).width if self._element_size is None: colw = (figw - textw - MAGIC_MARGIN) / (len(self.intersections) + self._totals_plot_elements) else: + fig = self._reorient(fig) render_ratio = figw / fig.get_figwidth() colw = self._element_size / 72 * render_ratio figw = (colw * (len(self.intersections) + @@ -142,26 +202,35 @@ class UpSet: text_nelems = int(np.ceil(figw / colw - (len(self.intersections) + self._totals_plot_elements))) - GS = matplotlib.gridspec.GridSpec - gridspec = GS(n_cats + self._intersection_plot_elements, - n_inters + text_nelems + self._totals_plot_elements, + GS = self._reorient(matplotlib.gridspec.GridSpec) + gridspec = GS(*self._swapaxes(n_cats + + self._intersection_plot_elements, + n_inters + text_nelems + + self._totals_plot_elements), hspace=1) - return {'intersections': gridspec[:-n_cats, -n_inters:], - 'matrix': gridspec[-n_cats:, -n_inters:], - 'totals': gridspec[-n_cats:, :self._totals_plot_elements], - 'gs': gridspec} + if self._horizontal: + return {'intersections': gridspec[:-n_cats, -n_inters:], + 'matrix': gridspec[-n_cats:, -n_inters:], + 'totals': gridspec[-n_cats:, :self._totals_plot_elements], + 'gs': gridspec} + else: + return {'intersections': gridspec[-n_inters:, n_cats:], + 'matrix': gridspec[-n_inters:, :n_cats], + 'totals': gridspec[:self._totals_plot_elements, :n_cats], + 'gs': gridspec} def plot_matrix(self, ax): """Plot the matrix of intersection indicators onto ax """ + ax = self._reorient(ax) data = self.intersections n_sets = data.index.nlevels # alternating row shading (XXX: use add_patch(Rectangle)?) alternating = np.arange(0, n_sets, 2) ax.barh(alternating, np.full(len(alternating), len(data) + 1), - left=-1, color='#f5f5f5', zorder=0, linewidth=0, - align='center') + color='#f5f5f5', zorder=0, linewidth=0, align='center', + **{'left' if self._horizontal else 'bottom': -1}) idx = np.flatnonzero(data.index.to_frame()[data.index.names].values) c = np.array(['lightgrey'] * len(data) * n_sets, dtype='O') @@ -173,7 +242,7 @@ class UpSet: else: # TODO: make s relative to colw s = 200 - ax.scatter(x, y, c=c.tolist(), linewidth=0, s=s) + ax.scatter(*self._swapaxes(x, y), c=c.tolist(), linewidth=0, s=s) if self._with_lines: line_data = (pd.Series(y[idx], index=x[idx]) @@ -183,36 +252,45 @@ class UpSet: line_data['min'], line_data['max'], lw=2, colors=self._forecolor) - ax.set_yticks(np.arange(n_sets)) - ax.set_yticklabels(data.index.names) + tick_axis = ax.yaxis + tick_axis.set_ticks(np.arange(n_sets)) + tick_axis.set_ticklabels(data.index.names, + rotation=0 if self._horizontal else -90) ax.xaxis.set_visible(False) ax.tick_params(axis='both', which='both', length=0) + if not self._horizontal: + ax.yaxis.set_ticks_position('top') ax.set_frame_on(False) def plot_intersections(self, ax): """Plot bars indicating intersection size """ + ax = self._reorient(ax) ax.bar(np.arange(len(self.intersections)), self.intersections, - width=.5, color=self._forecolor, zorder=10, align='center') + .5, color=self._forecolor, zorder=10, align='center') ax.xaxis.set_visible(False) for x in ['top', 'bottom', 'right']: - ax.spines[x].set_visible(False) - ax.yaxis.grid(True) - ax.set_ylabel('Intersection size') - # ax.get_yaxis().set_tick_params(direction='in') + ax.spines[self._reorient(x)].set_visible(False) + + tick_axis = ax.yaxis + tick_axis.grid(True) + tick_axis.set_label('Intersection size') + # tick_axis.set_tick_params(direction='in') def plot_totals(self, ax): """Plot bars indicating total set size """ + orig_ax = ax + ax = self._reorient(ax) ax.barh(np.arange(len(self.totals.index.values)), self.totals, - height=.5, color=self._forecolor, align='center') + .5, color=self._forecolor, align='center') max_total = self.totals.max() - ax.set_xlim(max_total, 0) + if self._horizontal: + orig_ax.set_xlim(max_total, 0) for x in ['top', 'left', 'right']: - ax.spines[x].set_visible(False) + ax.spines[self._reorient(x)].set_visible(False) ax.yaxis.set_visible(False) ax.xaxis.grid(True) - ax.ticklabel_format(axis='x') def plot(self, fig=None): """Draw all parts of the plot onto fig or a new figure @@ -232,9 +310,11 @@ class UpSet: specs = self.make_grid(fig) matrix_ax = fig.add_subplot(specs['matrix']) self.plot_matrix(matrix_ax) - inters_ax = fig.add_subplot(specs['intersections'], sharex=matrix_ax) + inters_ax = self._reorient(fig.add_subplot)(specs['intersections'], + sharex=matrix_ax) self.plot_intersections(inters_ax, ) - totals_ax = fig.add_subplot(specs['totals'], sharey=matrix_ax) + totals_ax = self._reorient(fig.add_subplot)(specs['totals'], + sharey=matrix_ax) self.plot_totals(totals_ax) return {'matrix': matrix_ax, 'intersections': inters_ax,
Support transposed plot Really, according to the original UpSet work, the current display is transposed. We should support the original format of having the intersection size bars horizontal.
jnothman/UpSetPlot
diff --git a/upsetplot/tests/test_upsetplot.py b/upsetplot/tests/test_upsetplot.py index 1dcfcab..4419b06 100644 --- a/upsetplot/tests/test_upsetplot.py +++ b/upsetplot/tests/test_upsetplot.py @@ -72,7 +72,9 @@ def test_param_validation(kw): UpSet(X, **kw) [email protected]('kw', [{}, {'element_size': None}]) [email protected]('kw', [{}, + {'element_size': None}, + {'orientation': 'vertical'}]) def test_plot_smoke_test(kw): fig = matplotlib.figure.Figure() X = generate_data(n_samples=100) @@ -86,6 +88,25 @@ def test_plot_smoke_test(kw): assert plt.gcf().axes +def test_vertical(): + X = generate_data(n_samples=100) + + fig = matplotlib.figure.Figure() + UpSet(X, orientation='horizontal').make_grid(fig) + horz_height = fig.get_figheight() + horz_width = fig.get_figwidth() + assert horz_height < horz_width + + fig = matplotlib.figure.Figure() + UpSet(X, orientation='vertical').make_grid(fig) + vert_height = fig.get_figheight() + vert_width = fig.get_figwidth() + assert horz_width / horz_height > vert_width / vert_height + + # TODO: test axes positions, plot order, bar orientation + pass + + def test_element_size(): X = generate_data(n_samples=100) figsizes = []
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "doc/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cycler==0.11.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 numpy==1.19.5 numpydoc==1.1.0 packaging==21.3 pandas==1.1.5 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-gallery==0.10.0 sphinx-issues==3.0.1 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 -e git+https://github.com/jnothman/UpSetPlot.git@f6b066fe0e1a9b7ae06bc770b9a2ca69ce6468d8#egg=UpSetPlot urllib3==1.26.20 zipp==3.6.0
name: UpSetPlot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cycler==0.11.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - numpy==1.19.5 - numpydoc==1.1.0 - packaging==21.3 - pandas==1.1.5 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-gallery==0.10.0 - sphinx-issues==3.0.1 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/UpSetPlot
[ "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw2]", "upsetplot/tests/test_upsetplot.py::test_vertical" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[None-degree-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-degree-X1]" ]
[ "upsetplot/tests/test_upsetplot.py::test_process_data[None-cardinality-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[None-cardinality-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[None-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-cardinality-X0]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-cardinality-X1]", "upsetplot/tests/test_upsetplot.py::test_process_data[cardinality-degree-X0]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[None-cardinality]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[None-degree]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[cardinality-cardinality]", "upsetplot/tests/test_upsetplot.py::test_not_aggregated[cardinality-degree]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw0]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw1]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw2]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw3]", "upsetplot/tests/test_upsetplot.py::test_param_validation[kw4]", "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw0]", "upsetplot/tests/test_upsetplot.py::test_plot_smoke_test[kw1]", "upsetplot/tests/test_upsetplot.py::test_element_size" ]
[]
New BSD License
2,195
[ "upsetplot/plotting.py", "examples/plot_vertical.py" ]
[ "upsetplot/plotting.py", "examples/plot_vertical.py" ]
grabbles__grabbit-52
5847d795acbebed43d2d89b84a9dccee545ee2b2
2018-02-23 02:24:52
5a588731d1a4a42a6b67f09ede110d7770845ed0
diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index a72e893..0000000 --- a/.coveragerc +++ /dev/null @@ -1,5 +0,0 @@ -[run] -source = - grabbit/ -omit = - */tests/* diff --git a/.travis.yml b/.travis.yml index 57590d2..5e2016c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,17 +4,21 @@ sudo: false python: - "2.7" - "3.5" - - "3.6" install: - - pip install --upgrade pip pytest - - pip install runipy coveralls pytest-cov + - pip install pip --upgrade + - pip install pytest --upgrade + - pip install codecov runipy - pip install -e '.' + - pip install flake8 script: - PYTHONPATH=$PWD coverage run `which py.test` grabbit - - py.test --cov-report term-missing --cov=grabbit + - python setup.py install # test installation - cd examples; PYTHONPATH=.. runipy *.ipynb # test example notebooks to run and not fail + # for now flaking only the stub.py + # - flake8 grabbit after_success: - - coveralls + - codecov + diff --git a/README.md b/README.md index aee4d0b..e528243 100644 --- a/README.md +++ b/README.md @@ -5,10 +5,6 @@ Get grabby with file trees Grabbit is a lightweight Python 3 package for simple queries over filenames within a project. It's geared towards projects or applications with highly structured filenames that allow useful queries to be performed without having to inspect the file metadata or contents. -## Status -* [![Build Status](https://travis-ci.org/grabbles/grabbit.svg?branch=master)](https://travis-ci.org/grabbles/grabbit) -* [![Coverage Status](https://coveralls.io/repos/github/grabbles/grabbit/badge.svg?branch=master)](https://coveralls.io/github/grabbles/grabbit?branch=master) - ## Installation ``` diff --git a/grabbit/core.py b/grabbit/core.py index a84092b..9cb60dc 100644 --- a/grabbit/core.py +++ b/grabbit/core.py @@ -154,7 +154,7 @@ Tag = namedtuple('Tag', ['entity', 'value']) class Entity(object): def __init__(self, name, pattern=None, domain=None, mandatory=False, - directory=None, map_func=None, **kwargs): + directory=None, map_func=None, dtype=None, **kwargs): """ Represents a single entity defined in the JSON config. @@ -171,6 +171,10 @@ class Entity(object): defined .pattern). domain (Domain): The Domain the Entity belongs to. kwargs (dict): Additional keyword arguments. + dtype (str): The optional data type of the Entity values. Must be + one of 'int', 'float', 'bool', or 'str'. If None, no type + enforcement will be attempted, which means the dtype of the + value may be unpredictable. """ if pattern is None and map_func is None: raise ValueError("Invalid specification for Entity '%s'; no " @@ -183,9 +187,17 @@ class Entity(object): self.mandatory = mandatory self.directory = directory self.map_func = map_func + self.kwargs = kwargs + + if isinstance(dtype, six.string_types): + dtype = eval(dtype) + if dtype not in [str, float, int, bool, None]: + raise ValueError("Invalid dtype '%s'. Must be one of int, float, " + "bool, or str." % dtype) + self.dtype = dtype + self.files = {} self.regex = re.compile(pattern) if pattern is not None else None - self.kwargs = kwargs self.id = '.'.join([getattr(domain, 'name', ''), name]) def __iter__(self): @@ -223,6 +235,8 @@ class Entity(object): return False if update_file: + if self.dtype is not None: + val = self.dtype(val) f.tags[self.name] = Tag(self, val) return True @@ -254,7 +268,8 @@ class LayoutMetaclass(type): paths = listify(path) if len(paths) == 1: - return super(LayoutMetaclass, cls).__call__(path, *args, **kwargs) + return super(LayoutMetaclass, cls).__call__(paths[0], *args, + **kwargs) layouts = [] for p in paths: layout = super(LayoutMetaclass, cls).__call__(p, *args, **kwargs) @@ -355,9 +370,11 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): "the config file for this domain includes a " "'root' key." % config['name']) config['root'] = self.root + elif config['root'] == '.': + config['root'] = self.root elif not isabs(config['root']): _root = config['root'] - config['root'] = abspath(join(self.root, config['root'])) + config['root'] = join(self.root, config['root']) if not exists(config['root']): msg = ("Relative path '%s' for domain '%s' interpreted as '%s'" ", but this directory doesn't exist. Either specify the" @@ -458,7 +475,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): return f.domains return [d.name for d in self.domains.values() if f.startswith(d.root)] - def _index_file(self, root, f, domains=None): + def _index_file(self, root, f, domains=None, update_layout=True): # If domains aren't explicitly passed, figure out what applies if domains is None: @@ -485,13 +502,16 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Only keep Files that match at least one Entity, and all # mandatory Entities - if file_ents and not (self.mandatory - set(file_ents)): + if update_layout and file_ents and not (self.mandatory + - set(file_ents)): self.files[f.path] = f # Bind the File to all of the matching entities for name, tag in f.tags.items(): ent_id = tag.entity.id self.entities[ent_id].add_file(f.path, tag.value) + return f + def _find_entity(self, entity): ''' Find an Entity instance by name. Checks both name and id fields.''' if entity in self.entities: @@ -851,6 +871,17 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def clone(self): return deepcopy(self) + def parse_file_entities(self, filename, domains=None): + root, f = dirname(filename), basename(filename) + if not root and domains is None: + raise ValueError("If a relative path is provided as the filename " + "argument, you *must* specify the names of the " + "domains whose entities are to be extracted. " + "Available domains for the current layout are: %s" + % list(self.domains.keys())) + result = self._index_file(root, f, domains, update_layout=False) + return result.entities + def build_path(self, source, path_patterns=None, strict=False): ''' Constructs a target filename for a file or dictionary of entities.
Add type declaration to entity specification This is an oversight on my part, but there should really be a field for type declarations in the entity specification. Those can then be enforced when entity values are first read in, ensuring that we avoid ambiguous cases (e.g., should '1' be read in as a str or int?). For example: ```json { "entities": [ { "name": "subject", "pattern": ".*sub-([a-zA-Z0-9]+)", "directory": "{{root}}/{subject}", "type": "str" } ] }
grabbles/grabbit
diff --git a/grabbit/tests/specs/test.json b/grabbit/tests/specs/test.json index d93c986..e0c6630 100644 --- a/grabbit/tests/specs/test.json +++ b/grabbit/tests/specs/test.json @@ -7,7 +7,8 @@ { "name": "subject", "pattern": "sub-(\\d+)", - "directory": "{{root}}/{subject}" + "directory": "{{root}}/{subject}", + "dtype": "str" }, { "name": "session", @@ -18,7 +19,8 @@ }, { "name": "run", - "pattern": "run-0*(\\d+)" + "pattern": "run-(\\d+)", + "dtype": "int" }, { "name": "type", diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py index 02eb754..c0087ee 100644 --- a/grabbit/tests/test_core.py +++ b/grabbit/tests/test_core.py @@ -200,7 +200,7 @@ class TestLayout: assert len(result) == 1 assert 'phasediff.json' in result[0].filename assert hasattr(result[0], 'run') - assert result[0].run == '1' + assert result[0].run == 1 # With exact matching... result = bids_layout.get(subject='1', run=1, session=1, @@ -368,6 +368,14 @@ class TestLayout: files = stamp_layout.get(value='35', regex_search=True) assert len(files) == 2 + def test_parse_file_entities(self, bids_layout): + filename = 'sub-03_ses-07_run-4_sekret.nii.gz' + with pytest.raises(ValueError): + bids_layout.parse_file_entities(filename) + ents = bids_layout.parse_file_entities(filename, domains=['test']) + assert ents == {'subject': '03', 'session': '7', 'run': 4, + 'type': 'sekret'} + def test_merge_layouts(bids_layout, stamp_layout): layout = merge_layouts([bids_layout, stamp_layout])
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_removed_files", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 3 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "runipy" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirement.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
async-generator==1.10 attrs==22.2.0 backcall==0.2.0 bleach==4.1.0 certifi==2021.5.30 decorator==5.1.1 defusedxml==0.7.1 entrypoints==0.4 -e git+https://github.com/grabbles/grabbit.git@5847d795acbebed43d2d89b84a9dccee545ee2b2#egg=grabbit importlib-metadata==4.8.3 iniconfig==1.1.1 ipykernel==5.5.6 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nest-asyncio==1.6.0 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 python-dateutil==2.9.0.post0 pyzmq==25.1.2 runipy==0.1.5 six==1.17.0 testpath==0.6.0 tomli==1.2.3 tornado==6.1 traitlets==4.3.3 typing_extensions==4.1.1 wcwidth==0.2.13 webencodings==0.5.1 zipp==3.6.0
name: grabbit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - async-generator==1.10 - attrs==22.2.0 - backcall==0.2.0 - bleach==4.1.0 - decorator==5.1.1 - defusedxml==0.7.1 - entrypoints==0.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - ipykernel==5.5.6 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nest-asyncio==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pyzmq==25.1.2 - runipy==0.1.5 - six==1.17.0 - testpath==0.6.0 - tomli==1.2.3 - tornado==6.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - wcwidth==0.2.13 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/grabbit
[ "grabbit/tests/test_core.py::TestLayout::test_querying[local]", "grabbit/tests/test_core.py::TestLayout::test_parse_file_entities[local]" ]
[]
[ "grabbit/tests/test_core.py::TestFile::test_init", "grabbit/tests/test_core.py::TestFile::test_matches", "grabbit/tests/test_core.py::TestFile::test_named_tuple", "grabbit/tests/test_core.py::TestEntity::test_init", "grabbit/tests/test_core.py::TestEntity::test_matches", "grabbit/tests/test_core.py::TestEntity::test_unique_and_count", "grabbit/tests/test_core.py::TestEntity::test_add_file", "grabbit/tests/test_core.py::TestLayout::test_init[local]", "grabbit/tests/test_core.py::TestLayout::test_absolute_paths[local]", "grabbit/tests/test_core.py::TestLayout::test_natsort[local]", "grabbit/tests/test_core.py::TestLayout::test_unique_and_count[local]", "grabbit/tests/test_core.py::TestLayout::test_get_nearest[local]", "grabbit/tests/test_core.py::TestLayout::test_index_regex[local]", "grabbit/tests/test_core.py::TestLayout::test_save_index[local]", "grabbit/tests/test_core.py::TestLayout::test_load_index[local]", "grabbit/tests/test_core.py::TestLayout::test_clone[local]", "grabbit/tests/test_core.py::test_merge_layouts[local]", "grabbit/tests/test_core.py::TestLayout::test_dynamic_getters[/grabbit/grabbit/tests/data/7t_trt-/grabbit/grabbit/tests/specs/test.json]", "grabbit/tests/test_core.py::TestLayout::test_entity_mapper", "grabbit/tests/test_core.py::TestLayout::test_excludes", "grabbit/tests/test_core.py::TestLayout::test_multiple_domains", "grabbit/tests/test_core.py::TestLayout::test_get_by_domain" ]
[]
MIT License
2,197
[ "grabbit/core.py", ".travis.yml", "README.md", ".coveragerc" ]
[ "grabbit/core.py", ".travis.yml", "README.md", ".coveragerc" ]
iotile__python_iotile_cloud-29
4cd309944c3702a252e953cfcf199994c80aeab1
2018-02-23 19:33:02
438b95a980f78f0d6ab847640583ef7af814b9a9
diff --git a/CHANGELOG.md b/CHANGELOG.md index fd79bda..8a6b6a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,8 @@ -### 0.8.4 +### v0.8.5 (2018-02-23) + + * Fix get stream acknowledgment to get the right dictionary field (Issue #28) + +### v0.8.4 * Add support for PATCH in API diff --git a/iotile_cloud/utils/mock_cloud.py b/iotile_cloud/utils/mock_cloud.py index 7196cb9..24be7f1 100644 --- a/iotile_cloud/utils/mock_cloud.py +++ b/iotile_cloud/utils/mock_cloud.py @@ -206,26 +206,56 @@ class MockIOTileCloud(object): raise JSONErrorCode({'error': 'missing timestamp argument'}, 400) infile = request.files['file'] + self.logger.info("Received uploaded report, filename: %s", infile.filename) + indata = infile.read() - if len(indata) < (20 + 16): - print("Invalid input data length, too short, length=%d" % len(indata)) - raise ErrorCode({'error': 'invalid file length'}, 400) + # Quick code to support both message pack reports and SignedListReport without depending on ReportParser + if infile.filename.endswith(".mp"): + try: + import msgpack + except ImportError: + self.logger.error("python-msgpack not installed, cannot parse message pack report") + return {'count': 0} + + report = msgpack.unpackb(indata) + + fmt = report.get('format') + + lowest_id = report.get('lowest_id', 0) + highest_id = report.get('highest_id', 0) + device_id = report.get('device', 0) + origin_streamer = report.get('streamer_index') + streamer_selector = report.get('streamer_selector') + sent_timestamp = report.get('device_sent_timestamp') + report_id = report.get('incremental_id') + count = len(report.get('data', [])) + + if fmt != 'v100': + self.logger.error("Invalid format given in message packed report: %s", fmt) + raise ErrorCode(400) + else: + if len(indata) < (20 + 16): + print("Invalid input data length, too short, length=%d" % len(indata)) + raise ErrorCode({'error': 'invalid file length'}, 400) - inheader = indata[:20] - infooter = indata[-24:] + inheader = indata[:20] + infooter = indata[-24:] - lowest_id, highest_id = struct.unpack_from("<LL", infooter) - fmt, len_low, len_high, device_id, report_id, sent_timestamp, _signature_flags, origin_streamer, streamer_selector = struct.unpack("<BBHLLLBBH", inheader) + lowest_id, highest_id = struct.unpack_from("<LL", infooter) + fmt, len_low, len_high, device_id, report_id, sent_timestamp, _signature_flags, origin_streamer, streamer_selector = struct.unpack("<BBHLLLBBH", inheader) - length = (len_high << 8) | len_low - if length != len(indata): - print("Invalid input data length, did not match, expected: %d, found: %d" % (length, len(indata))) - raise ErrorCode(500) + length = (len_high << 8) | len_low + if length != len(indata): + print("Invalid input data length, did not match, expected: %d, found: %d" % (length, len(indata))) + raise ErrorCode(500) + + if fmt != 1: + print("Invalid report format code, expected: %d, found: %d" % (1, fmt)) + raise ErrorCode(500) + + count = (length - 24 - 20) // 16 - if fmt != 1: - print("Invalid report format code, expected: %d, found: %d" % (1, fmt)) - raise ErrorCode(500) old_highest = self._get_streamer_ack(device_id, origin_streamer) if highest_id > old_highest: @@ -237,7 +267,7 @@ class MockIOTileCloud(object): act_first = None act_last = None elif act_first <= highest_id: - act_first = highest_id + 1 + act_first = highest_id + 1 report_record = { "id": str(uuid.uuid4()), @@ -257,13 +287,13 @@ class MockIOTileCloud(object): self.reports[report_record['id']] = report_record self.raw_report_files[report_record['id']] = indata - return {'count': (length - 24 - 20) // 16} + return {'count': count} def _get_streamer_ack(self, device_id, index): streamer = str(gid.IOTileStreamerSlug(device_id, index)) if streamer in self.streamers: - return self.streamers[streamer]['highest_id'] + return self.streamers[streamer]['last_id'] return 0 @@ -799,6 +829,9 @@ class MockIOTileCloud(object): selector (int): Optional selector criteria used by this streamer. If this is specified it is used as is. If not, the default selector for each index is used. + + Returns: + str: The slug of the streamer that was just created """ default_selectors = { @@ -832,6 +865,8 @@ class MockIOTileCloud(object): self.streamers[streamer_slug] = streamer_data + return streamer_slug + def quick_add_fleet(self, devices, is_network=False, fleet_slug=None, org_slug=None): """Quickly add a fleet. diff --git a/version.py b/version.py index 63fa052..ef88a31 100644 --- a/version.py +++ b/version.py @@ -1,1 +1,1 @@ -version = '0.8.4' +version = '0.8.5'
KeyError on streamer It seems this should be `last_id` instead of `highest_id`. @timburke can you confirm? https://github.com/iotile/python_iotile_cloud/blob/4cd309944c3702a252e953cfcf199994c80aeab1/iotile_cloud/utils/mock_cloud.py#L266
iotile/python_iotile_cloud
diff --git a/tests/test_mock_cloud.py b/tests/test_mock_cloud.py index f537d5c..4297095 100644 --- a/tests/test_mock_cloud.py +++ b/tests/test_mock_cloud.py @@ -138,6 +138,12 @@ def test_quick_add_functionality(mock_cloud_private_nossl): org_data = api.org(proj_data['org']).get() assert org_data['slug'] == "quick-test-org" + # Make sure quick add streamer works (Issue 28) + slug = cloud.quick_add_streamer(1, 0, 10) + resp = api.streamer(slug).get() + print(resp) + assert resp['last_id'] == 10 + def test_quick_add_device(mock_cloud_private_nossl): """Make sure quick_add_device works."""
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 3 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest_localserver", "numpy>=1.16.0", "pandas>=1.0.0" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 -e git+https://github.com/iotile/python_iotile_cloud.git@4cd309944c3702a252e953cfcf199994c80aeab1#egg=iotile_cloud MarkupSafe==3.0.2 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pytest==8.3.5 pytest-localserver==0.9.0.post0 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.32.3 six==1.17.0 tomli==2.2.1 tzdata==2025.2 urllib3==2.3.0 Werkzeug==3.1.3
name: python_iotile_cloud channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - markupsafe==3.0.2 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pytest==8.3.5 - pytest-localserver==0.9.0.post0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.32.3 - six==1.17.0 - tomli==2.2.1 - tzdata==2025.2 - urllib3==2.3.0 - werkzeug==3.1.3 prefix: /opt/conda/envs/python_iotile_cloud
[ "tests/test_mock_cloud.py::test_quick_add_functionality" ]
[]
[ "tests/test_mock_cloud.py::test_http_support", "tests/test_mock_cloud.py::test_quick_add_device", "tests/test_mock_cloud.py::test_list_devices", "tests/test_mock_cloud.py::test_quick_add_fleet", "tests/test_mock_cloud.py::test_upload_report", "tests/test_mock_cloud.py::test_device_patch", "tests/test_mock_cloud.py::test_get_sg_dt" ]
[]
MIT License
2,198
[ "iotile_cloud/utils/mock_cloud.py", "CHANGELOG.md", "version.py" ]
[ "iotile_cloud/utils/mock_cloud.py", "CHANGELOG.md", "version.py" ]
s-kostyuk__everpl-69
5361453ab3edd27c3e8ec037c688b96fa92acd4c
2018-02-23 19:53:25
f04b80d01a26cf2de97ea311625fcdb88c128c5f
diff --git a/dpl/auth/auth_aspect.py b/dpl/auth/auth_aspect.py new file mode 100644 index 0000000..a861d9a --- /dev/null +++ b/dpl/auth/auth_aspect.py @@ -0,0 +1,94 @@ +""" +This module contains a definition of authorization aspect - +a factory of interceptor methods which will intercept original +calls, extract an access token and other contextual information, +check user authentication using AuthService.check_permission +method and only then will pass execution to the wrapped method +""" +import inspect +import functools +from typing import Callable + +from .exceptions import AuthMissingTokenError +from .abs_auth_service import AbsAuthService +from .auth_context import AuthContext + + +class AuthAspect(object): + """ + A callable factory class that contains a definition of + an authorization advice. Such advice contains a logic + of extracting of contextual information (like an access + token, name of the domain / requested service and + requested action / service method) and checking it with + AuthService.check_permission functionality. + """ + def __init__(self, auth_service: AbsAuthService, auth_context: AuthContext): + """ + Constructor. Accepts an instance of AuthService used + for permission checking and an instance of AuthContext + for extraction of an access token + + :param auth_service: an instance of AuthService, + an object that will be used for permission + (authorization) checking + :param auth_context: an instance of AuthContext, an + object that will be used for extraction of + contextual information and access token in + particular + """ + self._auth_service = auth_service + self._auth_context = auth_context + + def __call__(self, wrapped_f: Callable) -> Callable: + """ + Returns a new callable which wraps the specified + wrapped_f callable with authorization logic + + :param wrapped_f: a callable to be wrapped + :return: a new callable which wraps the specified one + """ + original_wrapped = inspect.unwrap(wrapped_f) + qualname = original_wrapped.__qualname__ # type: str + + domain, method_name = qualname.rsplit(sep='.', maxsplit=1) + + @functools.wraps(wrapped_f) + def _auth_advice(*args, **kwargs): + """ + An authorization advice. Checks if the access token + associated with a current authorization context permits + execution of the requested action in the specified domain + with the specified arguments and, if everything is OK, + passes control to the wrapped (requested) method. + + :param args: positional arguments to be passed to the + wrapped callable + :param kwargs: keyword arguments to be passed to the + wrapped callable + :return: the same value as was returned by the wrapped + callable + :raises: the same exceptions as was raised by the wrapped + callable + :raises AuthMissingTokenError: if there is no access token + saved in the current context + """ + token = self._auth_context.current_token + + if token is None: + raise AuthMissingTokenError( + "Denied access to a protected service: Method was " + "called outside of an Authorization Context?" + ) + + self._auth_service.check_permission( + access_token=token, + in_domain=domain, + to_execute=method_name, + args=args, + kwargs=kwargs + ) + + return wrapped_f(*args, **kwargs) + + return _auth_advice diff --git a/dpl/auth/auth_context.py b/dpl/auth/auth_context.py new file mode 100644 index 0000000..edb7ae8 --- /dev/null +++ b/dpl/auth/auth_context.py @@ -0,0 +1,81 @@ +""" +This module contains a definition of AuthContext - an +implementation of Ambient Context pattern based on a Python's +context manager +""" +import contextlib +from typing import Optional + +from dpl.utils.get_concurrent_identity import get_concurrent_identity + + +class AuthContext(object): + """ + AuthContext is an implementation of Ambient Context + pattern which: + + - will save current authentication token in a temporary + storage; + - will allow to read currently stored token; + - will remove a token from a temporary storage on an exit + from a context; + - will be used by AuthAspect for authorization and + authentication checking. + + All received access tokens are bound to the current thread (for + multi-threaded environments) or task (for asynchronous + environments). + """ + def __init__(self): + """ + Constructor. Initializes an internal storage of tokens + """ + # key: a thread- or task-unique identifier + # value: a token itself + self._tokens = dict() + + def __call__(self, token: str): + """ + Constructs and returns a Python context manager. Adds auth + token to the temporary storage + + :param token: an auth token to be used for the current + Thread or Task + :return: an instance of a Python context manager + """ + return self._context_manager(token) + + @contextlib.contextmanager + def _context_manager(self, token: str): + """ + An implementation of a Python context manager. Saves the + specified token to the temporary storage, allows for a + code in the context to execute and finally removes an + access token from a temporary storage + + For more information about the structure of context + managers see Python documentation at https://goo.gl/6g7VES + + :param token: an auth token to be used for the current + Thread or Task + :return: None + """ + concurrent_id = get_concurrent_identity() + self._tokens[concurrent_id] = token + + yield + + self._tokens.pop(concurrent_id) + + @property + def current_token(self) -> Optional[str]: + """ + Returns an access token bound to the current Thread + or Task. Returns None if no access token was bound + + :return: an access token which is currently bound to + the current Thread or Task; returns None if + no access token was bound + """ + concurrent_id = get_concurrent_identity() + return self._tokens.get(concurrent_id, None) diff --git a/dpl/auth/exceptions.py b/dpl/auth/exceptions.py index 102c872..6b8ae35 100644 --- a/dpl/auth/exceptions.py +++ b/dpl/auth/exceptions.py @@ -30,3 +30,11 @@ class AuthInvalidTokenError(ServiceValidationError): was revoked or was not existing at all """ pass + + +class AuthMissingTokenError(ServiceValidationError): + """ + Exception to be raised if an attempt to extract an + access token out of a current context was failed + """ + pass
Implement Authentication aspect (in a form of context manager) Implement an Aspect that will control Authorization in the system. I.e. will check if the current user is able to (is authorized to) execute a requested action. Use Python's Context Managers and a concept of Ambient Context for passing of authentication tokens, use Interceptor pattern as an implementation detail for controlling of calls to Services. Blocked by #63
s-kostyuk/everpl
diff --git a/tests/auth/test_auth_aspect.py b/tests/auth/test_auth_aspect.py new file mode 100644 index 0000000..068ae82 --- /dev/null +++ b/tests/auth/test_auth_aspect.py @@ -0,0 +1,87 @@ +import unittest +from unittest.mock import Mock + +from dpl.auth.auth_context import AuthContext +from dpl.auth.abs_auth_service import AbsAuthService +from dpl.repo_impls.in_memory.placement_repository import PlacementRepository +from dpl.service_impls.placement_service import PlacementService +from dpl.auth.auth_aspect import AuthAspect, AuthMissingTokenError +from dpl.utils.simple_interceptor import SimpleInterceptor +from dpl.utils.generate_token import generate_token + + +class TestAuthAspect(unittest.TestCase): + def setUp(self): + self.auth_service = Mock(spec=AbsAuthService) # type: AbsAuthService + self.auth_service.check_permission = Mock() + + self.auth_context = AuthContext() + self.auth_aspect = AuthAspect( + auth_service=self.auth_service, + auth_context=self.auth_context + ) + self.placement_repo = PlacementRepository() + self.placement_service = PlacementService(self.placement_repo) + self.protected_service = SimpleInterceptor( + wrapped=self.placement_service, + aspect=self.auth_aspect + ) # type: PlacementService + + self.sample_token = generate_token() + + def test_permission_checked(self): + """ + Tests that a random method of a protected service (or + some another class) was correctly intercepted, access + token with other contextual information was extracted + and passed to the AuthService's check_permission method + + :return: None + """ + with self.auth_context(token=self.sample_token): + self.protected_service.view_all() + + mocked_method = self.auth_service.check_permission # type: Mock + mocked_method.assert_called_with( + access_token=self.sample_token, + in_domain='PlacementService', + to_execute='view_all', + args=(), + kwargs=dict() + ) + + def test_call_arguments_passed(self): + """ + Tests that on permission checking all call arguments + was intercepted and passed to the authorization + checker + + :return: None + """ + placement_name = "Some Placement" + placement_image = None + + with self.auth_context(token=self.sample_token): + self.protected_service.create_placement( + placement_name, + image_url=placement_image + ) + + mocked_method = self.auth_service.check_permission # type: Mock + mocked_method.assert_called_with( + access_token=self.sample_token, + in_domain='PlacementService', + to_execute='create_placement', + args=(placement_name,), + kwargs={'image_url': placement_image} + ) + + def test_out_of_context_error(self): + """ + Tests that if an access token is missing in an + AuthContext, then an FIXME error will be raised + + :return: None + """ + with self.assertRaises(AuthMissingTokenError): + self.protected_service.view_all() diff --git a/tests/auth/test_auth_context.py b/tests/auth/test_auth_context.py new file mode 100644 index 0000000..aea5001 --- /dev/null +++ b/tests/auth/test_auth_context.py @@ -0,0 +1,105 @@ +import unittest +import uuid +import threading +import asyncio + +from dpl.auth.auth_context import AuthContext + + +class TestAuthContext(unittest.TestCase): + def test_same_thread(self): + ac = AuthContext() + token = uuid.uuid4().hex + + self.assertIsNone(ac.current_token) + + with ac(token=token): + self.assertEqual(ac.current_token, token) + + self.assertIsNone(ac.current_token) + + def test_thread_separation(self): + def _check_token_unavailable(): + self.assertIsNone(ac.current_token) + + ac = AuthContext() + token = uuid.uuid4().hex + + thread = threading.Thread(target=_check_token_unavailable) + thread2 = threading.Thread(target=_check_token_unavailable) + + with ac(token=token): + thread.start() + thread2.start() + + thread.join() + thread2.join() + + def test_thread_unique_tokens(self): + def _enter_context_and_save_token(): + token = uuid.uuid4().hex + + with ac(token=token): + tokens.append(token) + + tokens = list() + ac = AuthContext() + + thread = threading.Thread(target=_enter_context_and_save_token) + thread2 = threading.Thread(target=_enter_context_and_save_token) + + thread.start() + thread2.start() + + thread.join() + thread2.join() + + self.assertTrue( + len(tokens) == 2 + ) + + self.assertNotEqual( + tokens[0], tokens[1] + ) + + def test_task_separation(self): + async def _check_token_unavailable(): + self.assertIsNone(ac.current_token) + + ac = AuthContext() + token = uuid.uuid4().hex + loop = asyncio.get_event_loop() + + with ac(token=token): + loop.run_until_complete(_check_token_unavailable()) + loop.run_until_complete(_check_token_unavailable()) + + def test_task_unique_tokens(self): + async def _enter_context_and_save_token(): + token = uuid.uuid4().hex + + with ac(token=token): + tokens.append(token) + await asyncio.sleep(0.02) + + self.assertTrue( + len(tokens) == 2 + ) + + tokens = list() + ac = AuthContext() + loop = asyncio.get_event_loop() + + t1 = asyncio.ensure_future(_enter_context_and_save_token()) + t2 = asyncio.ensure_future(_enter_context_and_save_token()) + + loop.run_until_complete(t1) + loop.run_until_complete(t2) + + self.assertTrue( + len(tokens) == 2 + ) + + self.assertNotEqual( + tokens[0], tokens[1] + )
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference", "has_added_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aiohttp==3.8.6 aiosignal==1.2.0 appdirs==1.4.4 async-timeout==4.0.2 asynctest==0.13.0 attrs==22.2.0 certifi==2021.5.30 charset-normalizer==3.0.1 -e git+https://github.com/s-kostyuk/everpl.git@5361453ab3edd27c3e8ec037c688b96fa92acd4c#egg=everpl frozenlist==1.2.0 greenlet==2.0.2 idna==3.10 idna-ssl==1.1.0 importlib-metadata==4.8.3 iniconfig==1.1.1 multidict==5.2.0 packaging==21.3 passlib==1.7.4 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 SQLAlchemy==1.4.54 tomli==1.2.3 typing_extensions==4.1.1 yarl==1.7.2 zipp==3.6.0
name: everpl channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - aiohttp==3.8.6 - aiosignal==1.2.0 - appdirs==1.4.4 - async-timeout==4.0.2 - asynctest==0.13.0 - attrs==22.2.0 - charset-normalizer==3.0.1 - frozenlist==1.2.0 - greenlet==2.0.2 - idna==3.10 - idna-ssl==1.1.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - multidict==5.2.0 - packaging==21.3 - passlib==1.7.4 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - sqlalchemy==1.4.54 - tomli==1.2.3 - typing-extensions==4.1.1 - yarl==1.7.2 - zipp==3.6.0 prefix: /opt/conda/envs/everpl
[ "tests/auth/test_auth_aspect.py::TestAuthAspect::test_call_arguments_passed", "tests/auth/test_auth_aspect.py::TestAuthAspect::test_out_of_context_error", "tests/auth/test_auth_aspect.py::TestAuthAspect::test_permission_checked", "tests/auth/test_auth_context.py::TestAuthContext::test_same_thread", "tests/auth/test_auth_context.py::TestAuthContext::test_task_separation", "tests/auth/test_auth_context.py::TestAuthContext::test_task_unique_tokens", "tests/auth/test_auth_context.py::TestAuthContext::test_thread_separation", "tests/auth/test_auth_context.py::TestAuthContext::test_thread_unique_tokens" ]
[]
[]
[]
MIT License
2,199
[ "dpl/auth/auth_aspect.py", "dpl/auth/auth_context.py", "dpl/auth/exceptions.py" ]
[ "dpl/auth/auth_aspect.py", "dpl/auth/auth_context.py", "dpl/auth/exceptions.py" ]
jmwri__ioccontainer-4
9155dbf9030df7bd911a5bc93feb397d1f545feb
2018-02-23 20:50:01
9155dbf9030df7bd911a5bc93feb397d1f545feb
diff --git a/ioccontainer/inject.py b/ioccontainer/inject.py index 162f6e1..98ba622 100644 --- a/ioccontainer/inject.py +++ b/ioccontainer/inject.py @@ -50,7 +50,11 @@ def inject_decorator(container: 'Container'): service = container.get(cls) if _is_positional_argument(position, parameter, new_args): - new_args.append(service) + if len(new_args) >= position + 1: + new_args[position] = service + else: + new_args.append(service) + elif _is_keyword_argument(parameter): kwargs[parameter.name] = service else: @@ -96,7 +100,9 @@ def _default_parameter_provided(parameter: inspect.Parameter) -> bool: def _argument_provided(position: int, parameter: inspect.Parameter, args: typing.List, kwargs: typing.Dict) -> bool: - return position < len(args) or parameter.name in kwargs.keys() + if position < len(args) and args[position] is not None: + return True + return kwargs.get(parameter.name) is not None def _is_positional_argument( @@ -106,7 +112,9 @@ def _is_positional_argument( inspect.Parameter.POSITIONAL_OR_KEYWORD) if parameter.kind not in positional_types: return False - return position == len(args) + if position == len(args): + return True + return position + 1 == len(args) and args[position] is None def _is_keyword_argument(parameter: inspect.Parameter) -> bool:
Inject should override value if None When using the `@inject` decorator, the provided parameter should be overridden if it is `None`. At the moment the provider parameter takes precedence 100% of the time.
jmwri/ioccontainer
diff --git a/tests/test_container.py b/tests/test_container.py index d76d2e1..f338f75 100644 --- a/tests/test_container.py +++ b/tests/test_container.py @@ -148,3 +148,29 @@ def test_injection_to_constructor(): my_class = MyClass('my_test_string') assert my_class.some_str is 'my_test_string' assert my_class.get_val() is 1 + + +@provider('str_service') +def provide_str(): + return 'string service' + + +def test_param_overriding(): + @inject(string='str_service') + def my_fn(string): + return string + + assert my_fn() == 'string service' + assert my_fn('overridden') == 'overridden' + assert my_fn(None) == 'string service' + + +def test_multiple_param_overriding(): + @inject(s1='str_service', s2='str_service') + def my_fn(s1, s2): + return s1, s2 + + assert my_fn() == ('string service', 'string service') + assert my_fn('overridden') == ('overridden', 'string service') + assert my_fn(None) == ('string service', 'string service') + assert my_fn('overridden', None) == ('overridden', 'string service')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
cachetools==5.5.2 chardet==5.2.0 colorama==0.4.6 coverage==7.8.0 distlib==0.3.9 docopt==0.6.2 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work filelock==3.18.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work -e git+https://github.com/jmwri/ioccontainer.git@9155dbf9030df7bd911a5bc93feb397d1f545feb#egg=ioccontainer packaging @ file:///croot/packaging_1734472117206/work platformdirs==4.3.7 pluggy @ file:///croot/pluggy_1733169602837/work pyproject-api==1.9.0 pytest @ file:///croot/pytest_1738938843180/work pytest-watch==4.2.0 tomli==2.2.1 tox==4.25.0 typing_extensions==4.13.0 virtualenv==20.29.3 watchdog==6.0.0
name: ioccontainer channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - cachetools==5.5.2 - chardet==5.2.0 - colorama==0.4.6 - coverage==7.8.0 - distlib==0.3.9 - docopt==0.6.2 - filelock==3.18.0 - platformdirs==4.3.7 - pyproject-api==1.9.0 - pytest-watch==4.2.0 - tomli==2.2.1 - tox==4.25.0 - typing-extensions==4.13.0 - virtualenv==20.29.3 - watchdog==6.0.0 prefix: /opt/conda/envs/ioccontainer
[ "tests/test_container.py::test_param_overriding", "tests/test_container.py::test_multiple_param_overriding" ]
[]
[ "tests/test_container.py::test_injection", "tests/test_container.py::test_singleton", "tests/test_container.py::test_invalid_scope", "tests/test_container.py::test_duplicate_provider", "tests/test_container.py::test_invalid_provider", "tests/test_container.py::test_invalid_service_name", "tests/test_container.py::test_no_provider_specified", "tests/test_container.py::test_multiple_params", "tests/test_container.py::test_default_provided", "tests/test_container.py::test_argument_provided", "tests/test_container.py::test_threaded_provider", "tests/test_container.py::test_injection_to_constructor" ]
[]
MIT License
2,200
[ "ioccontainer/inject.py" ]
[ "ioccontainer/inject.py" ]
missionpinball__mpf-1104
04f9b7a8615af0c81701ddd066e77188be706c4a
2018-02-24 12:53:23
2c1bb3aa1e25674916bc4e0d17ccb6c3c87bd01b
diff --git a/mpf/core/service_controller.py b/mpf/core/service_controller.py index 694780a75..f4e53b5d3 100644 --- a/mpf/core/service_controller.py +++ b/mpf/core/service_controller.py @@ -105,7 +105,7 @@ class ServiceController(MpfController): raise AssertionError("Not in service mode!") light_map = [] for light in self.machine.lights.values(): - light_map.append(LightMap(next(iter(light.hw_drivers.values())).get_board_name(), light)) + light_map.append(LightMap(next(iter(light.hw_drivers.values()))[0].get_board_name(), light)) # sort by board + driver number light_map.sort(key=lambda x: (self._natural_key_sort(x[0]), self._natural_key_sort(str(x[1].config['number'])))) diff --git a/mpf/devices/light.py b/mpf/devices/light.py index 3795b0a07..a2c5835aa 100644 --- a/mpf/devices/light.py +++ b/mpf/devices/light.py @@ -111,14 +111,15 @@ class Light(SystemWideDevice): del kwargs check_set = set() for light in machine.lights: - for driver in light.hw_drivers.values(): - key = (light.platform, driver.number, type(driver)) - if key in check_set: - raise AssertionError( - "Duplicate light number {} {} for light {}".format( - type(driver), driver.number, light)) + for drivers in light.hw_drivers.values(): + for driver in drivers: + key = (light.platform, driver.number, type(driver)) + if key in check_set: + raise AssertionError( + "Duplicate light number {} {} for light {}".format( + type(driver), driver.number, light)) - check_set.add(key) + check_set.add(key) def _map_channels_to_colors(self, channel_list) -> dict: if self.config['type']: @@ -143,20 +144,24 @@ class Light(SystemWideDevice): for color_name in color_channels: # red channel if color_name == 'r': - channels["red"] = channel_list.pop(0) + full_color_name = "red" # green channel elif color_name == 'g': - channels["green"] = channel_list.pop(0) + full_color_name = "green" # blue channel elif color_name == 'b': - channels["blue"] = channel_list.pop(0) + full_color_name = "blue" # simple white channel elif color_name == 'w': - channels["white"] = channel_list.pop(0) + full_color_name = "white" else: raise AssertionError("Invalid element {} in type {} of light {}".format( color_name, self.config['type'], self.name)) + if full_color_name not in channels: + channels[full_color_name] = [] + channels[full_color_name].append(channel_list.pop(0)) + return channels def _load_hw_drivers(self): @@ -192,13 +197,19 @@ class Light(SystemWideDevice): format(self.name)) # alternatively use channels from config channels = self.config['channels'] + # ensure that we got lists + for channel in channels: + if not isinstance(channels[channel], list): + channels[channel] = [channels[channel]] if not channels: raise AssertionError("Light {} has no channels.".format(self.name)) - for color, channel in channels.items(): - channel = self.machine.config_validator.validate_config("light_channels", channel) - self.hw_drivers[color] = self._load_hw_driver(channel) + for color, channel_list in channels.items(): + self.hw_drivers[color] = [] + for channel in channel_list: + channel = self.machine.config_validator.validate_config("light_channels", channel) + self.hw_drivers[color].append(self._load_hw_driver(channel)) def _load_hw_driver(self, channel): """Load one channel.""" @@ -462,8 +473,9 @@ class Light(SystemWideDevice): self.stack[:] = [x for x in self.stack if x['key'] != key] def _schedule_update(self): - for color, hw_driver in self.hw_drivers.items(): - hw_driver.set_fade(partial(self._get_brightness_and_fade, color=color)) + for color, hw_drivers in self.hw_drivers.items(): + for hw_driver in hw_drivers: + hw_driver.set_fade(partial(self._get_brightness_and_fade, color=color)) for platform in self.platforms: platform.light_sync()
Support multiple channels for the same color in lights This will only use the last channel since there can be only one per color: ``` l_white_led: number: 6-8 type: www ```
missionpinball/mpf
diff --git a/mpf/tests/MpfTestCase.py b/mpf/tests/MpfTestCase.py index 7165cd661..8cb97203d 100644 --- a/mpf/tests/MpfTestCase.py +++ b/mpf/tests/MpfTestCase.py @@ -533,11 +533,11 @@ class MpfTestCase(unittest.TestCase): self.assertEqual(state, self.machine.switch_controller.is_active(name)) def assertLightChannel(self, light_name, brightness, channel="white"): - self.assertAlmostEqual(brightness / 255.0, self.machine.lights[light_name].hw_drivers[channel]. + self.assertAlmostEqual(brightness / 255.0, self.machine.lights[light_name].hw_drivers[channel][0]. current_brightness) def assertNotLightChannel(self, light_name, brightness, channel="white"): - self.assertNotEqual(brightness, self.machine.lights[light_name].hw_drivers[channel]. + self.assertNotEqual(brightness, self.machine.lights[light_name].hw_drivers[channel][0]. current_brightness) def assertLightColor(self, light_name, color): diff --git a/mpf/tests/machine_files/light/config/light.yaml b/mpf/tests/machine_files/light/config/light.yaml index 8bb72e4b0..28bea61e7 100644 --- a/mpf/tests/machine_files/light/config/light.yaml +++ b/mpf/tests/machine_files/light/config/light.yaml @@ -32,13 +32,13 @@ lights: led3: channels: red: - number: 7 + - number: 7 green: - number: 8 + - number: 8 blue: - number: 9 + - number: 9 white: - number: 10 + - number: 10 debug: True led4: number: 11 @@ -46,3 +46,7 @@ lights: led_corrected: number: color_correction_profile: correction_profile_1 + led_www: + number: 23 + type: www + debug: True diff --git a/mpf/tests/test_DeviceLight.py b/mpf/tests/test_DeviceLight.py index 1476a3b5b..bc3d3246a 100644 --- a/mpf/tests/test_DeviceLight.py +++ b/mpf/tests/test_DeviceLight.py @@ -42,25 +42,25 @@ class TestDeviceLight(MpfTestCase): # corrected color self.assertEqual(RGBColor([210, 184, 159]), led.color_correct(led.get_color())) # check hardware - self.assertEqual(210 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(184 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(159 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(210 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(184 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(159 / 255.0, led.hw_drivers["blue"][0].current_brightness) led.color(RGBColor([128, 128, 128])) self.advance_time_and_run() self.assertLightColor("led1", [128, 128, 128]) self.assertEqual(RGBColor([96, 83, 70]), led.color_correct(led.get_color())) - self.assertEqual(96 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(83 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(70 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(96 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(83 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(70 / 255.0, led.hw_drivers["blue"][0].current_brightness) led.color(RGBColor("black")) self.advance_time_and_run() self.assertLightColor("led1", [0, 0, 0]) self.assertEqual(RGBColor([0, 0, 0]), led.color_correct(led.get_color())) - self.assertEqual(0 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(0 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(0 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["blue"][0].current_brightness) def test_consecutive_fades(self): self.assertLightColor("led1", [0, 0, 0]) @@ -340,25 +340,25 @@ class TestDeviceLight(MpfTestCase): # corrected color self.assertEqual(RGBColor([210, 184, 159]), led.color_correct(led.get_color())) # check hardware - self.assertEqual(210 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(184 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(159 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(210 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(184 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(159 / 255.0, led.hw_drivers["blue"][0].current_brightness) led.color(RGBColor([128, 128, 128])) self.advance_time_and_run() self.assertLightColor("led_corrected", [128, 128, 128]) self.assertEqual(RGBColor([96, 83, 70]), led.color_correct(led.get_color())) - self.assertEqual(96 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(83 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(70 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(96 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(83 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(70 / 255.0, led.hw_drivers["blue"][0].current_brightness) led.color(RGBColor("black")) self.advance_time_and_run() self.assertLightColor("led_corrected", [0, 0, 0]) self.assertEqual(RGBColor([0, 0, 0]), led.color_correct(led.get_color())) - self.assertEqual(0 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(0 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(0 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(0 / 255.0, led.hw_drivers["blue"][0].current_brightness) def test_non_rgb_leds(self): # test bgr @@ -366,22 +366,22 @@ class TestDeviceLight(MpfTestCase): led.color(RGBColor((11, 23, 42))) self.advance_time_and_run(1) - self.assertEqual(42 / 255, led.hw_drivers["blue"].current_brightness) - self.assertEqual('led-2', led.hw_drivers["blue"].number) - self.assertEqual(23 / 255, led.hw_drivers["green"].current_brightness) - self.assertEqual('led-3', led.hw_drivers["green"].number) - self.assertEqual(11 / 255, led.hw_drivers["red"].current_brightness) - self.assertEqual('led-4', led.hw_drivers["red"].number) + self.assertEqual(42 / 255, led.hw_drivers["blue"][0].current_brightness) + self.assertEqual('led-2', led.hw_drivers["blue"][0].number) + self.assertEqual(23 / 255, led.hw_drivers["green"][0].current_brightness) + self.assertEqual('led-3', led.hw_drivers["green"][0].number) + self.assertEqual(11 / 255, led.hw_drivers["red"][0].current_brightness) + self.assertEqual('led-4', led.hw_drivers["red"][0].number) led = self.machine.lights.led_bgr_2 led.color(RGBColor((11, 23, 42))) self.advance_time_and_run(1) - self.assertEqual(42 / 255, led.hw_drivers["blue"].current_brightness) - self.assertEqual('led-42-r', led.hw_drivers["blue"].number) - self.assertEqual(23 / 255, led.hw_drivers["green"].current_brightness) - self.assertEqual('led-42-g', led.hw_drivers["green"].number) - self.assertEqual(11 / 255, led.hw_drivers["red"].current_brightness) - self.assertEqual('led-42-b', led.hw_drivers["red"].number) + self.assertEqual(42 / 255, led.hw_drivers["blue"][0].current_brightness) + self.assertEqual('led-42-r', led.hw_drivers["blue"][0].number) + self.assertEqual(23 / 255, led.hw_drivers["green"][0].current_brightness) + self.assertEqual('led-42-g', led.hw_drivers["green"][0].number) + self.assertEqual(11 / 255, led.hw_drivers["red"][0].current_brightness) + self.assertEqual('led-42-b', led.hw_drivers["red"][0].number) # test rgbw led = self.machine.lights.led3 @@ -389,8 +389,20 @@ class TestDeviceLight(MpfTestCase): led.color(RGBColor((11, 23, 42))) self.advance_time_and_run(1) self.assertLightColor("led2", [11, 23, 42]) - self.assertEqual(11 / 255, led.hw_drivers["white"].current_brightness) - self.assertEqual('led-10', led.hw_drivers["white"].number) + self.assertEqual(11 / 255, led.hw_drivers["white"][0].current_brightness) + self.assertEqual('led-10', led.hw_drivers["white"][0].number) + + # test www light + led = self.machine.lights.led_www + led.on(128) + self.advance_time_and_run(1) + self.assertLightColor("led_www", [128, 128, 128]) + self.assertEqual(128 / 255, led.hw_drivers["white"][0].current_brightness) + self.assertEqual('led-23-r', led.hw_drivers["white"][0].number) + self.assertEqual(128 / 255, led.hw_drivers["white"][1].current_brightness) + self.assertEqual('led-23-g', led.hw_drivers["white"][1].number) + self.assertEqual(128 / 255, led.hw_drivers["white"][2].current_brightness) + self.assertEqual('led-23-b', led.hw_drivers["white"][2].number) def test_brightness_correction(self): led = self.machine.lights.led1 @@ -398,15 +410,15 @@ class TestDeviceLight(MpfTestCase): led.color(RGBColor((100, 100, 100))) self.advance_time_and_run(1) self.assertLightColor("led1", [100, 100, 100]) - self.assertEqual(100 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(100 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(100 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(100 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(100 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(100 / 255.0, led.hw_drivers["blue"][0].current_brightness) self.machine.set_machine_var("brightness", 0.8) led.color(RGBColor((100, 100, 100))) self.advance_time_and_run(1) self.assertLightColor("led1", [100, 100, 100]) - self.assertEqual(80 / 255.0, led.hw_drivers["red"].current_brightness) - self.assertEqual(80 / 255.0, led.hw_drivers["green"].current_brightness) - self.assertEqual(80 / 255.0, led.hw_drivers["blue"].current_brightness) + self.assertEqual(80 / 255.0, led.hw_drivers["red"][0].current_brightness) + self.assertEqual(80 / 255.0, led.hw_drivers["green"][0].current_brightness) + self.assertEqual(80 / 255.0, led.hw_drivers["blue"][0].current_brightness) diff --git a/mpf/tests/test_LightGroups.py b/mpf/tests/test_LightGroups.py index 1a1649c8b..4d8d99404 100644 --- a/mpf/tests/test_LightGroups.py +++ b/mpf/tests/test_LightGroups.py @@ -22,28 +22,28 @@ class TestLightGroups(MpfTestCase): def test_config(self): # stripe 1 - self.assertEqual("led-10-r", self.machine.lights["stripe1_light_10"].hw_drivers["red"].number) + self.assertEqual("led-10-r", self.machine.lights["stripe1_light_10"].hw_drivers["red"][0].number) self.assertListEqual(["test", "stripe1"], self.machine.lights["stripe1_light_10"].config['tags']) - self.assertEqual("led-11-r", self.machine.lights["stripe1_light_11"].hw_drivers["red"].number) - self.assertEqual("led-12-r", self.machine.lights["stripe1_light_12"].hw_drivers["red"].number) - self.assertEqual("led-13-r", self.machine.lights["stripe1_light_13"].hw_drivers["red"].number) - self.assertEqual("led-14-r", self.machine.lights["stripe1_light_14"].hw_drivers["red"].number) + self.assertEqual("led-11-r", self.machine.lights["stripe1_light_11"].hw_drivers["red"][0].number) + self.assertEqual("led-12-r", self.machine.lights["stripe1_light_12"].hw_drivers["red"][0].number) + self.assertEqual("led-13-r", self.machine.lights["stripe1_light_13"].hw_drivers["red"][0].number) + self.assertEqual("led-14-r", self.machine.lights["stripe1_light_14"].hw_drivers["red"][0].number) self.assertListEqual(["test", "stripe1"], self.machine.lights["stripe1_light_14"].config['tags']) # stripe 2 - self.assertEqual("led-7-200-r", self.machine.lights["stripe2_light_200"].hw_drivers["red"].number) + self.assertEqual("led-7-200-r", self.machine.lights["stripe2_light_200"].hw_drivers["red"][0].number) self.assertEqual(10, self.machine.lights["stripe2_light_200"].config['x']) self.assertEqual(20, self.machine.lights["stripe2_light_200"].config['y']) - self.assertEqual("led-7-201-r", self.machine.lights["stripe2_light_201"].hw_drivers["red"].number) + self.assertEqual("led-7-201-r", self.machine.lights["stripe2_light_201"].hw_drivers["red"][0].number) self.assertEqual(15, self.machine.lights["stripe2_light_201"].config['x']) self.assertEqual(20, self.machine.lights["stripe2_light_201"].config['y']) # ring 1 - self.assertEqual("led-20-r", self.machine.lights["ring1_light_20"].hw_drivers["red"].number) - self.assertEqual("led-21-r", self.machine.lights["ring1_light_21"].hw_drivers["red"].number) - self.assertEqual("led-22-r", self.machine.lights["ring1_light_22"].hw_drivers["red"].number) - self.assertEqual("led-23-r", self.machine.lights["ring1_light_23"].hw_drivers["red"].number) - self.assertEqual("led-24-r", self.machine.lights["ring1_light_24"].hw_drivers["red"].number) + self.assertEqual("led-20-r", self.machine.lights["ring1_light_20"].hw_drivers["red"][0].number) + self.assertEqual("led-21-r", self.machine.lights["ring1_light_21"].hw_drivers["red"][0].number) + self.assertEqual("led-22-r", self.machine.lights["ring1_light_22"].hw_drivers["red"][0].number) + self.assertEqual("led-23-r", self.machine.lights["ring1_light_23"].hw_drivers["red"][0].number) + self.assertEqual("led-24-r", self.machine.lights["ring1_light_24"].hw_drivers["red"][0].number) # 90 degree self.assertEqual(103, self.machine.lights["ring1_light_20"].config['x']) self.assertEqual(50, self.machine.lights["ring1_light_20"].config['y']) diff --git a/mpf/tests/test_P3_Roc.py b/mpf/tests/test_P3_Roc.py index 9b8b0bf93..30cec4ec7 100644 --- a/mpf/tests/test_P3_Roc.py +++ b/mpf/tests/test_P3_Roc.py @@ -739,78 +739,78 @@ class TestP3Roc(MpfTestCase): ) # test enable of matrix light - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter.called - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.called self.machine.lights.test_pdb_light.on() self.advance_time_and_run(.02) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.assert_called_with( cycle_seconds=0, schedule=4294967295, now=True, number=32 ) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter = MagicMock() + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter = MagicMock() self.machine.lights.test_pdb_light.on(brightness=128) self.advance_time_and_run(.02) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter.assert_called_with( 32, 1, 1, 0, True ) # test disable of matrix light - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.called self.machine.lights.test_pdb_light.off() self.advance_time_and_run(.02) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.assert_called_with(32) + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.assert_called_with(32) def _test_pdb_gi_light(self): # test gi on device = self.machine.lights.test_gi num = self.machine.coils.test_gi.hw_driver.number - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color("white") self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule.assert_has_calls([ call(now=True, number=num, cycle_seconds=0, schedule=4294967295)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color([128, 128, 128]) self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter.assert_has_calls([ call(num, 1, 1, 0, True)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color([245, 245, 245]) self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter.assert_has_calls([ call(num, 19, 1, 0, True)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() # test gi off - device.hw_drivers["white"].driver.hw_driver.proc.driver_disable = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_disable = MagicMock() device.color("off") self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_disable.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_disable.assert_has_calls([ call(num)]) def _test_leds(self): device = self.machine.lights.test_led - device.hw_drivers['red'].proc.led_color = MagicMock() + device.hw_drivers['red'][0].proc.led_color = MagicMock() # test led on device.on() self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 1, 255), call(2, 2, 255), call(2, 3, 255)], True) - device.hw_drivers['red'].proc.proc.led_color = MagicMock() + device.hw_drivers['red'][0].proc.proc.led_color = MagicMock() # test led off device.off() self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 1, 0), call(2, 2, 0), call(2, 3, 0)], True) @@ -818,27 +818,27 @@ class TestP3Roc(MpfTestCase): # test led color device.color(RGBColor((2, 23, 42))) self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 1, 2), call(2, 2, 23), call(2, 3, 42)], True) def _test_leds_inverted(self): device = self.machine.lights.test_led_inverted - device.hw_drivers['red'].proc.led_color = MagicMock() + device.hw_drivers['red'][0].proc.led_color = MagicMock() # test led on device.on() self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 4, 0), call(2, 5, 0), call(2, 6, 0)], True) - device.hw_drivers['red'].proc.led_color = MagicMock() + device.hw_drivers['red'][0].proc.led_color = MagicMock() # test led off device.color("off") self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 4, 255), call(2, 5, 255), call(2, 6, 255)], True) @@ -846,7 +846,7 @@ class TestP3Roc(MpfTestCase): # test led color device.color(RGBColor((2, 23, 42))) self.advance_time_and_run(1) - device.hw_drivers['red'].proc.led_color.assert_has_calls([ + device.hw_drivers['red'][0].proc.led_color.assert_has_calls([ call(2, 4, 255 - 2), call(2, 5, 255 -23), call(2, 6, 255 - 42)], True) diff --git a/mpf/tests/test_P_Roc.py b/mpf/tests/test_P_Roc.py index 10be6e70f..a55016d2f 100644 --- a/mpf/tests/test_P_Roc.py +++ b/mpf/tests/test_P_Roc.py @@ -327,87 +327,87 @@ class TestPRoc(MpfTestCase): ) # test enable of matrix light - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter.called - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.called self.machine.lights.test_pdb_light.on() self.advance_time_and_run(.02) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.assert_called_with( cycle_seconds=0, schedule=4294967295, now=True, number=32 ) self.machine.lights.test_pdb_light.on(brightness=128) self.advance_time_and_run(.02) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter.assert_called_with( 32, 1, 1, 0, True ) # test disable of matrix light - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.called self.machine.lights.test_pdb_light.off() self.advance_time_and_run(.1) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.assert_called_with(32) + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.assert_called_with(32) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter = MagicMock() - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule = MagicMock() - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable = MagicMock() + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter = MagicMock() + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule = MagicMock() + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable = MagicMock() self.post_event("play_test_show") - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.assert_called_with( cycle_seconds=0, schedule=4294967295, now=True, number=32 ) self.advance_time_and_run(1) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_patter.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_patter.assert_called_with( 32, 3, 1, 0, True ) self.advance_time_and_run(1) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.assert_called_with(32) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable = MagicMock() + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.assert_called_with(32) + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable = MagicMock() self.advance_time_and_run(1) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.assert_called_with( cycle_seconds=0, schedule=4294967295, now=True, number=32 ) self.advance_time_and_run(10) - self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_schedule.assert_called_with( + self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_schedule.assert_called_with( cycle_seconds=0, schedule=4294967295, now=True, number=32 ) - assert not self.machine.lights.test_pdb_light.hw_drivers["white"].proc.driver_disable.called + assert not self.machine.lights.test_pdb_light.hw_drivers["white"][0].proc.driver_disable.called def _test_pdb_gi_light(self): # test gi on device = self.machine.lights.test_gi num = self.machine.coils.test_gi.hw_driver.number - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color("white") self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule.assert_has_calls([ call(now=True, number=num, cycle_seconds=0, schedule=4294967295)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color([128, 128, 128]) self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter.assert_has_calls([ call(num, 1, 1, 0, True)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() device.color([245, 245, 245]) self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter.assert_has_calls([ call(num, 19, 1, 0, True)]) - device.hw_drivers["white"].driver.hw_driver.proc.driver_patter = MagicMock() - device.hw_drivers["white"].driver.hw_driver.proc.driver_schedule = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_patter = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_schedule = MagicMock() # test gi off - device.hw_drivers["white"].driver.hw_driver.proc.driver_disable = MagicMock() + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_disable = MagicMock() device.color("off") self.advance_time_and_run(.1) - device.hw_drivers["white"].driver.hw_driver.proc.driver_disable.assert_has_calls([ + device.hw_drivers["white"][0].driver.hw_driver.proc.driver_disable.assert_has_calls([ call(num)]) def test_load_wpc(self):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.33
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asciimatics==1.14.0 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 future==1.0.0 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/missionpinball/mpf.git@04f9b7a8615af0c81701ddd066e77188be706c4a#egg=mpf packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work Pillow==8.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyfiglet==0.8.post1 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyserial==3.5 pyserial-asyncio==0.6 pytest==6.2.4 ruamel.base==1.0.0 ruamel.yaml==0.10.23 terminaltables==3.1.10 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing==3.7.4.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work wcwidth==0.2.13 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: mpf channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - asciimatics==1.14.0 - future==1.0.0 - pillow==8.4.0 - psutil==7.0.0 - pyfiglet==0.8.post1 - pyserial==3.5 - pyserial-asyncio==0.6 - ruamel-base==1.0.0 - ruamel-yaml==0.10.23 - terminaltables==3.1.10 - typing==3.7.4.3 - wcwidth==0.2.13 prefix: /opt/conda/envs/mpf
[ "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_brightness_correction", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_color_and_stack", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_color_correction", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_consecutive_fades", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_default_color_correction", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_default_on_color", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_fades", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_multiple_concurrent_fades", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_named_colors", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_non_rgb_leds", "mpf/tests/test_DeviceLight.py::TestDeviceLight::test_restore_to_fade_in_progress", "mpf/tests/test_LightGroups.py::TestLightGroups::test_config", "mpf/tests/test_P3_Roc.py::TestP3Roc::test_platform", "mpf/tests/test_P_Roc.py::TestPRoc::test_platform" ]
[]
[ "mpf/tests/test_LightGroups.py::TestLightGroups::test_color", "mpf/tests/test_P_Roc.py::TestPRoc::test_load_snux", "mpf/tests/test_P_Roc.py::TestPRoc::test_load_wpc" ]
[]
MIT License
2,201
[ "mpf/core/service_controller.py", "mpf/devices/light.py" ]
[ "mpf/core/service_controller.py", "mpf/devices/light.py" ]
jaywink__federation-117
aa8e8a79607f305f33baab76cb77669f27cc6861
2018-02-24 13:05:07
aa8e8a79607f305f33baab76cb77669f27cc6861
diff --git a/CHANGELOG.md b/CHANGELOG.md index 54f2b3f..4510564 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,10 @@ Parameter `to_user` is now `to_user_key` and thus instead of an object containing the `key` attribute it should now be an RSA public key object instance. This simplifies things since we only need the key from the user, nothing else. +* Switch Diaspora protocol to send new style entities ([related issue](https://github.com/jaywink/federation/issues/59)) + + We've already accepted these on incoming payloads for a long time and so do all the other platforms now, so now we always send out entities with the new property names. This can break federation with really old servers that don't understand these keys yet. + ### Fixed * Change unquote method used when preparing Diaspora XML payloads for verification ([related issue](https://github.com/jaywink/federation/issues/115)) diff --git a/federation/entities/diaspora/entities.py b/federation/entities/diaspora/entities.py index 8dd47d7..691e1c1 100644 --- a/federation/entities/diaspora/entities.py +++ b/federation/entities/diaspora/entities.py @@ -117,7 +117,7 @@ class DiasporaComment(DiasporaRelayableMixin, Comment): {"author_signature": self.signature}, {"parent_author_signature": self.parent_signature}, {"text": self.raw_content}, - {"diaspora_handle": self.handle}, + {"author": self.handle}, {"created_at": format_dt(self.created_at)}, ]) return element @@ -131,9 +131,9 @@ class DiasporaPost(DiasporaEntityMixin, Post): """Convert to XML message.""" element = etree.Element(self._tag_name) struct_to_xml(element, [ - {"raw_message": self.raw_content}, + {"text": self.raw_content}, {"guid": self.guid}, - {"diaspora_handle": self.handle}, + {"author": self.handle}, {"public": "true" if self.public else "false"}, {"created_at": format_dt(self.created_at)}, {"provider_display_name": self.provider_display_name}, @@ -150,13 +150,13 @@ class DiasporaLike(DiasporaRelayableMixin, Reaction): """Convert to XML message.""" element = etree.Element(self._tag_name) struct_to_xml(element, [ - {"target_type": "Post"}, + {"parent_type": "Post"}, {"guid": self.guid}, {"parent_guid": self.target_guid}, {"author_signature": self.signature}, {"parent_author_signature": self.parent_signature}, {"positive": "true"}, - {"diaspora_handle": self.handle}, + {"author": self.handle}, ]) return element @@ -203,7 +203,7 @@ class DiasporaProfile(DiasporaEntityMixin, Profile): """Convert to XML message.""" element = etree.Element(self._tag_name) struct_to_xml(element, [ - {"diaspora_handle": self.handle}, + {"author": self.handle}, {"first_name": self.name}, {"last_name": ""}, # Not used in Diaspora modern profiles {"image_url": self.image_urls["large"]}, @@ -289,7 +289,7 @@ class DiasporaReshare(DiasporaEntityMixin, Share): {"provider_display_name": self.provider_display_name}, {"public": "true" if self.public else "false"}, # Some of our own not in Diaspora protocol - {"raw_content": self.raw_content}, + {"text": self.raw_content}, {"entity_type": self.entity_type}, ]) return element
Fix outgoing Diaspora entity names according to new spec Some of the entities we build for Diaspora XML messages are based on the old protocol version which we don't intend to fully support. Since Diaspora 0.6 already understands these new names, we can safely send them out by breaking only pre-0.6 compatibility. See these changes from old to new: diaspora/diaspora_federation#29 + documentation of entities http://diaspora.github.io/diaspora_federation/entities/
jaywink/federation
diff --git a/federation/tests/entities/diaspora/test_entities.py b/federation/tests/entities/diaspora/test_entities.py index 0d4e3d1..2d6d733 100644 --- a/federation/tests/entities/diaspora/test_entities.py +++ b/federation/tests/entities/diaspora/test_entities.py @@ -24,8 +24,8 @@ class TestEntitiesConvertToXML: assert result.tag == "status_message" assert len(result.find("created_at").text) > 0 result.find("created_at").text = "" # timestamp makes testing painful - converted = b"<status_message><raw_message>raw_content</raw_message><guid>guid</guid>" \ - b"<diaspora_handle>handle</diaspora_handle><public>true</public><created_at>" \ + converted = b"<status_message><text>raw_content</text><guid>guid</guid>" \ + b"<author>handle</author><public>true</public><created_at>" \ b"</created_at><provider_display_name>Socialhome</provider_display_name></status_message>" assert etree.tostring(result) == converted @@ -40,7 +40,7 @@ class TestEntitiesConvertToXML: result.find("created_at").text = "" # timestamp makes testing painful converted = b"<comment><guid>guid</guid><parent_guid>target_guid</parent_guid>" \ b"<author_signature>signature</author_signature><parent_author_signature>" \ - b"</parent_author_signature><text>raw_content</text><diaspora_handle>handle</diaspora_handle>" \ + b"</parent_author_signature><text>raw_content</text><author>handle</author>" \ b"<created_at></created_at></comment>" assert etree.tostring(result) == converted @@ -48,9 +48,9 @@ class TestEntitiesConvertToXML: entity = DiasporaLike(guid="guid", target_guid="target_guid", handle="handle", signature="signature") result = entity.to_xml() assert result.tag == "like" - converted = b"<like><target_type>Post</target_type><guid>guid</guid><parent_guid>target_guid</parent_guid>" \ + converted = b"<like><parent_type>Post</parent_type><guid>guid</guid><parent_guid>target_guid</parent_guid>" \ b"<author_signature>signature</author_signature><parent_author_signature>" \ - b"</parent_author_signature><positive>true</positive><diaspora_handle>handle</diaspora_handle>" \ + b"</parent_author_signature><positive>true</positive><author>handle</author>" \ b"</like>" assert etree.tostring(result) == converted @@ -71,7 +71,7 @@ class TestEntitiesConvertToXML: ) result = entity.to_xml() assert result.tag == "profile" - converted = b"<profile><diaspora_handle>[email protected]</diaspora_handle>" \ + converted = b"<profile><author>[email protected]</author>" \ b"<first_name>Bob Bobertson</first_name><last_name></last_name><image_url>urllarge</image_url>" \ b"<image_url_small>urlsmall</image_url_small><image_url_medium>urlmedium</image_url_medium>" \ b"<gender></gender><bio>foobar</bio><location></location><searchable>true</searchable>" \ @@ -102,7 +102,7 @@ class TestEntitiesConvertToXML: result.find("created_at").text = "" # timestamp makes testing painful converted = "<reshare><author>%s</author><guid>%s</guid><created_at></created_at><root_author>%s" \ "</root_author><root_guid>%s</root_guid><provider_display_name>%s</provider_display_name>" \ - "<public>%s</public><raw_content>%s</raw_content><entity_type>%s</entity_type></reshare>" % ( + "<public>%s</public><text>%s</text><entity_type>%s</entity_type></reshare>" % ( entity.handle, entity.guid, entity.target_handle, entity.target_guid, entity.provider_display_name, "true" if entity.public else "false", entity.raw_content, entity.entity_type, diff --git a/federation/tests/entities/diaspora/test_utils.py b/federation/tests/entities/diaspora/test_utils.py index 8794b03..e4e9b38 100644 --- a/federation/tests/entities/diaspora/test_utils.py +++ b/federation/tests/entities/diaspora/test_utils.py @@ -29,8 +29,8 @@ class TestGetFullXMLRepresentation: entity = Post() document = get_full_xml_representation(entity, "") document = re.sub(r"<created_at>.*</created_at>", "", document) # Dates are annoying to compare - assert document == "<XML><post><status_message><raw_message></raw_message><guid></guid>" \ - "<diaspora_handle></diaspora_handle><public>false</public>" \ + assert document == "<XML><post><status_message><text></text><guid></guid>" \ + "<author></author><public>false</public>" \ "<provider_display_name></provider_display_name></status_message></post></XML>"
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.15
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-warnings" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "dev-requirements.txt" ], "test_cmd": "py.test --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 arrow==1.2.3 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 colorama==0.4.5 commonmark==0.9.1 coverage==6.2 cssselect==1.1.0 dirty-validators==0.5.4 Django==2.0.13 docutils==0.18.1 factory-boy==3.2.1 Faker==14.2.1 -e git+https://github.com/jaywink/federation.git@aa8e8a79607f305f33baab76cb77669f27cc6861#egg=federation freezegun==1.2.2 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 Jinja2==3.0.3 jsonschema==3.2.0 livereload==2.6.3 lxml==5.3.1 MarkupSafe==2.0.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycrypto==2.6.1 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 pytest-django==4.5.2 pytest-warnings==0.3.1 python-dateutil==2.9.0.post0 python-xrd==0.1 pytz==2025.2 recommonmark==0.7.1 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-autobuild==2021.3.14 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 tornado==6.1 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: federation channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - arrow==1.2.3 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - colorama==0.4.5 - commonmark==0.9.1 - coverage==6.2 - cssselect==1.1.0 - dirty-validators==0.5.4 - django==2.0.13 - docutils==0.18.1 - factory-boy==3.2.1 - faker==14.2.1 - freezegun==1.2.2 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - jinja2==3.0.3 - jsonschema==3.2.0 - livereload==2.6.3 - lxml==5.3.1 - markupsafe==2.0.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycrypto==2.6.1 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-django==4.5.2 - pytest-warnings==0.3.1 - python-dateutil==2.9.0.post0 - python-xrd==0.1 - pytz==2025.2 - recommonmark==0.7.1 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-autobuild==2021.3.14 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - tornado==6.1 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/federation
[ "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_post_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_comment_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_like_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_profile_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_reshare_to_xml", "federation/tests/entities/diaspora/test_utils.py::TestGetFullXMLRepresentation::test_returns_xml_document" ]
[]
[ "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_request_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_retraction_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntitiesConvertToXML::test_contact_to_xml", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_comment_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_contact_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_like_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_post_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_profile_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_request_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_reshare_ids", "federation/tests/entities/diaspora/test_entities.py::TestEntityAttributes::test_retraction_ids", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaProfileFillExtraAttributes::test_raises_if_no_handle", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaProfileFillExtraAttributes::test_calls_retrieve_and_parse_profile", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRetractionEntityConverters::test_entity_type_from_remote", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRetractionEntityConverters::test_entity_type_to_remote", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRelayableMixin::test_signing_comment_works", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRelayableMixin::test_signing_like_works", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRelayableMixin::test_sign_with_parent", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRelayableEntityValidate::test_raises_if_no_sender_key", "federation/tests/entities/diaspora/test_entities.py::TestDiasporaRelayableEntityValidate::test_calls_verify_signature", "federation/tests/entities/diaspora/test_utils.py::TestGetBaseAttributes::test_get_base_attributes_returns_only_intended_attributes", "federation/tests/entities/diaspora/test_utils.py::TestFormatDt::test_formatted_string_returned_from_tz_aware_datetime", "federation/tests/entities/diaspora/test_utils.py::test_add_element_to_doc" ]
[]
BSD 3-Clause "New" or "Revised" License
2,202
[ "federation/entities/diaspora/entities.py", "CHANGELOG.md" ]
[ "federation/entities/diaspora/entities.py", "CHANGELOG.md" ]
conda__conda-verify-36
0d50ea4da07553380b9077495f6ec863d062385c
2018-02-24 18:29:57
bdb3e6bf43174e08244f4323392297a5c87df0a1
diff --git a/conda_verify/checks.py b/conda_verify/checks.py index 1d8c170..a059c14 100644 --- a/conda_verify/checks.py +++ b/conda_verify/checks.py @@ -21,6 +21,8 @@ from conda_verify.utilities import (all_ascii, get_bad_seq, get_object_type, ensure_list, fullmatch) +ver_spec_pat = '^(?:[><=]{0,2}(?:(?:[\d\*]+[!\._]?){1,})[+\w\*]*[|,]?){1,}' + class CondaPackageCheck(object): """Create checks in order to validate conda package tarballs.""" @@ -40,7 +42,6 @@ class CondaPackageCheck(object): self.name_pat = re.compile(r'[a-z0-9_][a-z0-9_\-\.]*$') self.hash_pat = re.compile(r'[gh][0-9a-f]{5,}', re.I) self.version_pat = re.compile(r'[\w\.]+$') - self.ver_spec_pat = '^((==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)\,(==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)|(==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)|(\d+(\.\d+)*(\.)*\*?)\|(\d+(\.\d+)*(\.)*\*?)|(\d+(\.\d+)*(\.)*\*?))' @staticmethod def retrieve_package_name(path): @@ -115,7 +116,8 @@ class CondaPackageCheck(object): dependency_parts = dependency.split() if len(dependency_parts) == 0: return Error(self.path, 'C1113', 'Found empty dependencies in info/index.json') - elif len(dependency_parts) == 2 and not fullmatch(self.ver_spec_pat, dependency_parts[1]) or len(dependency_parts) > 3: + elif len(dependency_parts) == 2 and not fullmatch(ver_spec_pat, + dependency_parts[1]) or len(dependency_parts) > 3: return Error(self.path, 'C1114', 'Found invalid dependency "{}" in info/index.json' .format(dependency)) def check_license_family(self): @@ -376,7 +378,6 @@ class CondaRecipeCheck(object): self.recipe_dir = recipe_dir self.name_pat = re.compile(r'[a-z0-9_][a-z0-9_\-\.]*$') self.version_pat = re.compile(r'[\w\.]+$') - self.ver_spec_pat = '^((==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)\,(==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)|(==|>=|<=|<|>)(\d+(\.\d+)*(\.)*\*?)|(\d+(\.\d+)*(\.)*\*?)\|(\d+(\.\d+)*(\.)*\*?)|(\d+(\.\d+)*(\.)*\*?))' self.url_pat = re.compile(r'(ftp|http(s)?)://') self.hash_pat = {'md5': re.compile(r'[a-f0-9]{32}$'), 'sha1': re.compile(r'[a-f0-9]{40}$'), @@ -461,7 +462,7 @@ class CondaRecipeCheck(object): if len(requirement_parts) == 0: return Error(self.recipe_dir, 'C2113', 'Found empty dependencies in meta.yaml') - elif len(requirement_parts) >= 2 and not fullmatch(self.ver_spec_pat, requirement_parts[1]): + elif len(requirement_parts) >= 2 and not fullmatch(ver_spec_pat, requirement_parts[1]): return Error(self.recipe_dir, 'C2114', u'Found invalid dependency "{}" in meta.yaml' .format(requirement)) if len(build_requirements) != len(set(build_requirements)):
Pre-release versioning throws C1114 Prerelease versioning such as 2.0.0a0 will throw C1114 even though it's an acceptable versioning format.
conda/conda-verify
diff --git a/tests/unit_tests/test_regex.py b/tests/unit_tests/test_regex.py index 5f47f3c..bdfba76 100644 --- a/tests/unit_tests/test_regex.py +++ b/tests/unit_tests/test_regex.py @@ -16,19 +16,16 @@ def recipe_dir(): def test_ver_spec_pat(package_dir, recipe_dir): - package = os.path.join(package_dir, 'testfile-0.0.30-py27_0.tar.bz2') - recipe = os.path.join(recipe_dir, 'valid_test_file') - metadata = utilities.render_metadata(recipe, None) - package_check = checks.CondaPackageCheck(package) - recipe_check = checks.CondaRecipeCheck(metadata, recipe) - package_ver_spec_pat = package_check.ver_spec_pat - recipe_ver_spec_pat = recipe_check.ver_spec_pat + package_ver_spec_pat = checks.ver_spec_pat + recipe_ver_spec_pat = checks.ver_spec_pat extra_spec = '>===3.5' ge_version = '>=1.2' eq_version = '==1.2.2' pin_version = '>=2,<3' pin_version_long = '<=2.0.0*,<3.0.0*' + prerelease_version = '2.0rc1' + pin_prerelease_version = '>=1.9.3,<2.0.0a0' or_version = '1.0|1.2.*' regex_version = '3.6*' python_version = '3.6.*' @@ -38,6 +35,8 @@ def test_ver_spec_pat(package_dir, recipe_dir): assert utilities.fullmatch(package_ver_spec_pat, eq_version) assert utilities.fullmatch(package_ver_spec_pat, pin_version) assert utilities.fullmatch(package_ver_spec_pat, pin_version_long) + assert utilities.fullmatch(package_ver_spec_pat, prerelease_version) + assert utilities.fullmatch(package_ver_spec_pat, pin_prerelease_version) assert utilities.fullmatch(package_ver_spec_pat, or_version) assert utilities.fullmatch(package_ver_spec_pat, regex_version) assert utilities.fullmatch(package_ver_spec_pat, python_version) @@ -47,6 +46,8 @@ def test_ver_spec_pat(package_dir, recipe_dir): assert utilities.fullmatch(recipe_ver_spec_pat, eq_version) assert utilities.fullmatch(recipe_ver_spec_pat, pin_version) assert utilities.fullmatch(recipe_ver_spec_pat, pin_version_long) + assert utilities.fullmatch(recipe_ver_spec_pat, prerelease_version) + assert utilities.fullmatch(recipe_ver_spec_pat, pin_prerelease_version) assert utilities.fullmatch(recipe_ver_spec_pat, or_version) assert utilities.fullmatch(recipe_ver_spec_pat, regex_version) assert utilities.fullmatch(recipe_ver_spec_pat, python_version)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-mock", "pytest-catchlog" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 click==8.0.4 -e git+https://github.com/conda/conda-verify.git@0d50ea4da07553380b9077495f6ec863d062385c#egg=conda_verify coverage==6.2 future==1.0.0 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==3.0.3 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 PyYAML==6.0.1 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: conda-verify channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - click==8.0.4 - coverage==6.2 - future==1.0.0 - jinja2==3.0.3 - markupsafe==2.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pyyaml==6.0.1 - tomli==1.2.3 prefix: /opt/conda/envs/conda-verify
[ "tests/unit_tests/test_regex.py::test_ver_spec_pat" ]
[]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,203
[ "conda_verify/checks.py" ]
[ "conda_verify/checks.py" ]
marshmallow-code__marshmallow-744
642d18b58d9b42d16b450a30b42aa57ea3859192
2018-02-24 22:05:11
8e217c8d6fefb7049ab3389f31a8d35824fa2d96
diff --git a/marshmallow/schema.py b/marshmallow/schema.py index 2327081a..79bb8ee1 100644 --- a/marshmallow/schema.py +++ b/marshmallow/schema.py @@ -836,11 +836,11 @@ class BaseSchema(base.SchemaABC): if pass_many: validator = functools.partial(validator, many=many) if many and not pass_many: - for idx, item in enumerate(data): + for idx, (item, orig) in enumerate(zip(data, original_data)): try: - self._unmarshal.run_validator(validator, - item, original_data, self.fields, many=many, - index=idx, pass_original=pass_original) + self._unmarshal.run_validator( + validator, item, orig, self.fields, many=many, + index=idx, pass_original=pass_original) except ValidationError as err: errors.update(err.messages) else:
post_dump is passing a list of objects as original object Hi, I think post_dump with pass_original=True should pass the original object related to the data serialized and not a list of objects which this object belongs to. ``` python from marshmallow import fields, post_dump, Schema class DeviceSchema(Schema): id = fields.String() @post_dump(pass_original=True) def __post_dump(self, data, obj): print(obj) # <-- this is a list devices = [dict(id=1), dict(id=2)] DeviceSchema().dump(devices, many=True) ``` In the above example, the parameter `obj` is a list of devices rather than the device object itself. What do you think?
marshmallow-code/marshmallow
diff --git a/tests/test_decorators.py b/tests/test_decorators.py index 3fb3f1bb..226e550f 100644 --- a/tests/test_decorators.py +++ b/tests/test_decorators.py @@ -365,6 +365,35 @@ class TestValidatesSchemaDecorator: assert '_schema' in errors['nested'] assert 'foo' not in errors['nested'] + @pytest.mark.parametrize("data", ([{"foo": 1, "bar": 2}],)) + @pytest.mark.parametrize( + "pass_many,expected_data,expected_original_data", + ( + [True, [{"foo": 1}], [{"foo": 1, "bar": 2}]], + [False, {"foo": 1}, {"foo": 1, "bar": 2}], + ), + ) + def test_validator_nested_many_pass_original_and_pass_many( + self, pass_many, data, expected_data, expected_original_data): + + class NestedSchema(Schema): + foo = fields.Int(required=True) + + @validates_schema(pass_many=pass_many, pass_original=True) + def validate_schema(self, data, original_data, many=False): + assert data == expected_data + assert original_data == expected_original_data + assert many is pass_many + raise ValidationError("Method called") + + class MySchema(Schema): + nested = fields.Nested(NestedSchema, required=True, many=True) + + schema = MySchema() + errors = schema.validate({"nested": data}) + error = errors["nested"] if pass_many else errors["nested"][0] + assert error["_schema"][0] == "Method called" + def test_decorated_validators(self): class MySchema(Schema):
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 1 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[reco]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio", "pytest" ], "pre_install": [], "python": "3.9", "reqs_path": [ "dev-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 execnet==2.1.1 filelock==3.18.0 flake8==3.5.0 iniconfig==2.1.0 invoke==0.22.0 -e git+https://github.com/marshmallow-code/marshmallow.git@642d18b58d9b42d16b450a30b42aa57ea3859192#egg=marshmallow mccabe==0.6.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 py==1.11.0 pycodestyle==2.3.1 pyflakes==1.6.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 python-dateutil==2.6.1 pytz==2017.3 simplejson==3.13.2 six==1.17.0 toml==0.10.2 tomli==2.2.1 tox==3.12.1 typing_extensions==4.13.0 virtualenv==20.29.3
name: marshmallow channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - execnet==2.1.1 - filelock==3.18.0 - flake8==3.5.0 - iniconfig==2.1.0 - invoke==0.22.0 - mccabe==0.6.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - py==1.11.0 - pycodestyle==2.3.1 - pyflakes==1.6.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - python-dateutil==2.6.1 - pytz==2017.3 - simplejson==3.13.2 - six==1.17.0 - toml==0.10.2 - tomli==2.2.1 - tox==3.12.1 - typing-extensions==4.13.0 - virtualenv==20.29.3 prefix: /opt/conda/envs/marshmallow
[ "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many_pass_original_and_pass_many[False-expected_data1-expected_original_data1-data0]" ]
[]
[ "tests/test_decorators.py::test_decorated_processors", "tests/test_decorators.py::TestPassOriginal::test_pass_original_single_no_mutation", "tests/test_decorators.py::TestPassOriginal::test_pass_original_single_with_mutation", "tests/test_decorators.py::TestPassOriginal::test_pass_original_many", "tests/test_decorators.py::test_decorated_processor_inheritance", "tests/test_decorators.py::test_pre_dump_is_invoked_before_implicit_field_generation", "tests/test_decorators.py::TestValidatesDecorator::test_validates", "tests/test_decorators.py::TestValidatesDecorator::test_validates_with_attribute", "tests/test_decorators.py::TestValidatesDecorator::test_validates_decorator", "tests/test_decorators.py::TestValidatesDecorator::test_field_not_present", "tests/test_decorators.py::TestValidatesDecorator::test_precedence", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many_pass_original_and_pass_many[True-expected_data0-expected_original_data0-data0]", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_decorated_validators", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_multiple_validators", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_passing_original_data", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_allow_arbitrary_field_names_in_error", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_skip_on_field_errors", "tests/test_decorators.py::test_decorator_error_handling", "tests/test_decorators.py::test_decorator_error_handling_with_load[pre_load]", "tests/test_decorators.py::test_decorator_error_handling_with_load[post_load]", "tests/test_decorators.py::test_decorator_error_handling_with_dump[pre_dump]", "tests/test_decorators.py::test_decorator_error_handling_with_dump[post_dump]" ]
[]
MIT License
2,205
[ "marshmallow/schema.py" ]
[ "marshmallow/schema.py" ]
pre-commit__pre-commit-711
29715c9268dc866facf0b8a9cbe21d218d948a7b
2018-02-24 22:53:14
ac3a37d1a0e3575bddf23fd9babf6e56202b2988
diff --git a/pre_commit/commands/autoupdate.py b/pre_commit/commands/autoupdate.py index ca83a58..666cd11 100644 --- a/pre_commit/commands/autoupdate.py +++ b/pre_commit/commands/autoupdate.py @@ -33,9 +33,9 @@ def _update_repo(repo_config, runner, tags_only): Args: repo_config - A config for a repository """ - repo = Repository.create(repo_config, runner.store) + repo_path = runner.store.clone(repo_config['repo'], repo_config['sha']) - with cwd(repo._repo_path): + with cwd(repo_path): cmd_output('git', 'fetch') tag_cmd = ('git', 'describe', 'origin/master', '--tags') if tags_only: @@ -57,7 +57,7 @@ def _update_repo(repo_config, runner, tags_only): new_repo = Repository.create(new_config, runner.store) # See if any of our hooks were deleted with the new commits - hooks = {hook['id'] for hook in repo.repo_config['hooks']} + hooks = {hook['id'] for hook in repo_config['hooks']} hooks_missing = hooks - (hooks & set(new_repo.manifest_hooks)) if hooks_missing: raise RepositoryCannotBeUpdatedError( diff --git a/pre_commit/repository.py b/pre_commit/repository.py index 3ed160a..624ccd0 100644 --- a/pre_commit/repository.py +++ b/pre_commit/repository.py @@ -7,7 +7,6 @@ import os import pipes import shutil import sys -from collections import defaultdict import pkg_resources from cached_property import cached_property @@ -149,22 +148,11 @@ class Repository(object): else: return cls(config, store) - @cached_property - def _repo_path(self): - return self.store.clone( - self.repo_config['repo'], self.repo_config['sha'], - ) - - @cached_property - def _prefix(self): - return Prefix(self._repo_path) - - def _prefix_from_deps(self, language_name, deps): - return self._prefix - @cached_property def manifest_hooks(self): - manifest_path = os.path.join(self._repo_path, C.MANIFEST_FILE) + repo, sha = self.repo_config['repo'], self.repo_config['sha'] + repo_path = self.store.clone(repo, sha) + manifest_path = os.path.join(repo_path, C.MANIFEST_FILE) return {hook['id']: hook for hook in load_manifest(manifest_path)} @cached_property @@ -185,21 +173,25 @@ class Repository(object): for hook in self.repo_config['hooks'] ) - @cached_property + def _prefix_from_deps(self, language_name, deps): + repo, sha = self.repo_config['repo'], self.repo_config['sha'] + return Prefix(self.store.clone(repo, sha, deps)) + def _venvs(self): - deps_dict = defaultdict(_UniqueList) - for _, hook in self.hooks: - deps_dict[(hook['language'], hook['language_version'])].update( - hook['additional_dependencies'], - ) ret = [] - for (language, version), deps in deps_dict.items(): - ret.append((self._prefix, language, version, deps)) + for _, hook in self.hooks: + language = hook['language'] + version = hook['language_version'] + deps = hook['additional_dependencies'] + ret.append(( + self._prefix_from_deps(language, deps), + language, version, deps, + )) return tuple(ret) def require_installed(self): if not self.__installed: - _install_all(self._venvs, self.repo_config['repo'], self.store) + _install_all(self._venvs(), self.repo_config['repo'], self.store) self.__installed = True def run_hook(self, hook, file_args): @@ -237,19 +229,6 @@ class LocalRepository(Repository): for hook in self.repo_config['hooks'] ) - @cached_property - def _venvs(self): - ret = [] - for _, hook in self.hooks: - language = hook['language'] - version = hook['language_version'] - deps = hook['additional_dependencies'] - ret.append(( - self._prefix_from_deps(language, deps), - language, version, deps, - )) - return tuple(ret) - class MetaRepository(LocalRepository): @cached_property @@ -303,14 +282,3 @@ class MetaRepository(LocalRepository): (hook['id'], _hook(self.manifest_hooks[hook['id']], hook)) for hook in self.repo_config['hooks'] ) - - -class _UniqueList(list): - def __init__(self): - self._set = set() - - def update(self, obj): - for item in obj: - if item not in self._set: - self._set.add(item) - self.append(item) diff --git a/pre_commit/store.py b/pre_commit/store.py index 1311984..7e49c8f 100644 --- a/pre_commit/store.py +++ b/pre_commit/store.py @@ -72,9 +72,9 @@ class Store(object): with contextlib.closing(sqlite3.connect(tmpfile)) as db: db.executescript( 'CREATE TABLE repos (' - ' repo CHAR(255) NOT NULL,' - ' ref CHAR(255) NOT NULL,' - ' path CHAR(255) NOT NULL,' + ' repo TEXT NOT NULL,' + ' ref TEXT NOT NULL,' + ' path TEXT NOT NULL,' ' PRIMARY KEY (repo, ref)' ');', ) @@ -101,15 +101,17 @@ class Store(object): self._create() self.__created = True - def _new_repo(self, repo, ref, make_strategy): + def _new_repo(self, repo, ref, deps, make_strategy): self.require_created() + if deps: + repo = '{}:{}'.format(repo, ','.join(sorted(deps))) def _get_result(): # Check if we already exist with sqlite3.connect(self.db_path) as db: result = db.execute( 'SELECT path FROM repos WHERE repo = ? AND ref = ?', - [repo, ref], + (repo, ref), ).fetchone() if result: return result[0] @@ -137,7 +139,7 @@ class Store(object): ) return directory - def clone(self, repo, ref): + def clone(self, repo, ref, deps=()): """Clone the given url and checkout the specific ref.""" def clone_strategy(directory): cmd_output( @@ -151,7 +153,7 @@ class Store(object): env=no_git_env(), ) - return self._new_repo(repo, ref, clone_strategy) + return self._new_repo(repo, ref, deps, clone_strategy) def make_local(self, deps): def make_local_strategy(directory): @@ -172,8 +174,7 @@ class Store(object): _git_cmd('commit', '--no-edit', '--no-gpg-sign', '-n', '-minit') return self._new_repo( - 'local:{}'.format(','.join(sorted(deps))), C.LOCAL_REPO_VERSION, - make_local_strategy, + 'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy, ) @cached_property
Same repo but different additional dependencies I have two Git projects `A` and `B`, and those two use same [pre-commit repo](https://github.com/coldnight/pre-commit-pylint). But those two projects use different additional dependencies: `.pre-commit-config.yaml` in `A`: ```yaml - repo: [email protected]:coldnight/pre-commit-pylint.git sha: 630e2662aabf3236fc62460b163d613c4bd1cfbc hooks: - id: pylint-py3k - id: pylint-score-limit args: - --limit=8.5 - --rcfile=./.pylintrc additional_dependencies: - enum34; python_version<='3.4' - mock ``` `.pre-commit-config.yaml` in `B`: ```yaml - repo: [email protected]:coldnight/pre-commit-pylint.git sha: 630e2662aabf3236fc62460b163d613c4bd1cfbc hooks: - id: pylint-py3k - id: pylint-score-limit args: - --limit=8.5 - --rcfile=./.pylintrc additional_dependencies: - enum34; python_version<='3.4' - requests ``` Here is my problem: 1. First I run `pre-commit` in project `A`, and the environment has installed 2. And then I run `pre-commit` in project `B`, and the environment has installed 3. When back project `A` and run `pre-commit`, the installed environment has removed by above and need another installation(Huge slow!) Any idea for this? Support different projects that use different home directories?
pre-commit/pre-commit
diff --git a/tests/conftest.py b/tests/conftest.py index fd3784d..246820e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -165,12 +165,6 @@ def log_info_mock(): yield mck [email protected] -def log_warning_mock(): - with mock.patch.object(logging.getLogger('pre_commit'), 'warning') as mck: - yield mck - - class FakeStream(object): def __init__(self): self.data = io.BytesIO() diff --git a/tests/repository_test.py b/tests/repository_test.py index 0123ce4..dea387f 100644 --- a/tests/repository_test.py +++ b/tests/repository_test.py @@ -433,7 +433,7 @@ def test_venvs(tempdir_factory, store): path = make_repo(tempdir_factory, 'python_hooks_repo') config = make_config_from_repo(path) repo = Repository.create(config, store) - venv, = repo._venvs + venv, = repo._venvs() assert venv == (mock.ANY, 'python', python.get_default_version(), []) @@ -443,50 +443,33 @@ def test_additional_dependencies(tempdir_factory, store): config = make_config_from_repo(path) config['hooks'][0]['additional_dependencies'] = ['pep8'] repo = Repository.create(config, store) - venv, = repo._venvs + venv, = repo._venvs() assert venv == (mock.ANY, 'python', python.get_default_version(), ['pep8']) @pytest.mark.integration -def test_additional_dependencies_duplicated( - tempdir_factory, store, log_warning_mock, -): - path = make_repo(tempdir_factory, 'ruby_hooks_repo') - config = make_config_from_repo(path) - deps = ['thread_safe', 'tins', 'thread_safe'] - config['hooks'][0]['additional_dependencies'] = deps - repo = Repository.create(config, store) - venv, = repo._venvs - assert venv == (mock.ANY, 'ruby', 'default', ['thread_safe', 'tins']) - - [email protected] -def test_additional_python_dependencies_installed(tempdir_factory, store): +def test_additional_dependencies_roll_forward(tempdir_factory, store): path = make_repo(tempdir_factory, 'python_hooks_repo') - config = make_config_from_repo(path) - config['hooks'][0]['additional_dependencies'] = ['mccabe'] - repo = Repository.create(config, store) - repo.require_installed() - with python.in_env(repo._prefix, 'default'): - output = cmd_output('pip', 'freeze', '-l')[1] - assert 'mccabe' in output + config1 = make_config_from_repo(path) + repo1 = Repository.create(config1, store) + repo1.require_installed() + (prefix1, _, version1, _), = repo1._venvs() + with python.in_env(prefix1, version1): + assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1] [email protected] -def test_additional_dependencies_roll_forward(tempdir_factory, store): - path = make_repo(tempdir_factory, 'python_hooks_repo') - config = make_config_from_repo(path) - # Run the repo once without additional_dependencies - repo = Repository.create(config, store) - repo.require_installed() - # Now run it with additional_dependencies - config['hooks'][0]['additional_dependencies'] = ['mccabe'] - repo = Repository.create(config, store) - repo.require_installed() - # We should see our additional dependency installed - with python.in_env(repo._prefix, 'default'): - output = cmd_output('pip', 'freeze', '-l')[1] - assert 'mccabe' in output + # Make another repo with additional dependencies + config2 = make_config_from_repo(path) + config2['hooks'][0]['additional_dependencies'] = ['mccabe'] + repo2 = Repository.create(config2, store) + repo2.require_installed() + (prefix2, _, version2, _), = repo2._venvs() + with python.in_env(prefix2, version2): + assert 'mccabe' in cmd_output('pip', 'freeze', '-l')[1] + + # should not have affected original + with python.in_env(prefix1, version1): + assert 'mccabe' not in cmd_output('pip', 'freeze', '-l')[1] @xfailif_windows_no_ruby @@ -499,7 +482,8 @@ def test_additional_ruby_dependencies_installed( config['hooks'][0]['additional_dependencies'] = ['thread_safe', 'tins'] repo = Repository.create(config, store) repo.require_installed() - with ruby.in_env(repo._prefix, 'default'): + (prefix, _, version, _), = repo._venvs() + with ruby.in_env(prefix, version): output = cmd_output('gem', 'list', '--local')[1] assert 'thread_safe' in output assert 'tins' in output @@ -516,7 +500,8 @@ def test_additional_node_dependencies_installed( config['hooks'][0]['additional_dependencies'] = ['lodash'] repo = Repository.create(config, store) repo.require_installed() - with node.in_env(repo._prefix, 'default'): + (prefix, _, version, _), = repo._venvs() + with node.in_env(prefix, version): output = cmd_output('npm', 'ls', '-g')[1] assert 'lodash' in output @@ -532,7 +517,8 @@ def test_additional_golang_dependencies_installed( config['hooks'][0]['additional_dependencies'] = deps repo = Repository.create(config, store) repo.require_installed() - binaries = os.listdir(repo._prefix.path( + (prefix, _, _, _), = repo._venvs() + binaries = os.listdir(prefix.path( helpers.environment_dir(golang.ENVIRONMENT_DIR, 'default'), 'bin', )) # normalize for windows @@ -598,8 +584,9 @@ def test_control_c_control_c_on_install(tempdir_factory, store): repo.run_hook(hook, []) # Should have made an environment, however this environment is broken! - envdir = 'py_env-{}'.format(python.get_default_version()) - assert repo._prefix.exists(envdir) + (prefix, _, version, _), = repo._venvs() + envdir = 'py_env-{}'.format(version) + assert prefix.exists(envdir) # However, it should be perfectly runnable (reinstall after botched # install) @@ -616,8 +603,8 @@ def test_invalidated_virtualenv(tempdir_factory, store): # Simulate breaking of the virtualenv repo.require_installed() - version = python.get_default_version() - libdir = repo._prefix.path('py_env-{}'.format(version), 'lib', version) + (prefix, _, version, _), = repo._venvs() + libdir = prefix.path('py_env-{}'.format(version), 'lib', version) paths = [ os.path.join(libdir, p) for p in ('site.py', 'site.pyc', '__pycache__') ]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_git_commit_hash", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 3 }
1.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aspy.yaml==1.3.0 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 cfgv==3.3.1 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 identify==2.4.4 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 mock==5.2.0 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 -e git+https://github.com/pre-commit/pre-commit.git@29715c9268dc866facf0b8a9cbe21d218d948a7b#egg=pre_commit py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-env==0.6.2 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: pre-commit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - aspy-yaml==1.3.0 - attrs==22.2.0 - cached-property==1.5.2 - cfgv==3.3.1 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - identify==2.4.4 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - mock==5.2.0 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-env==0.6.2 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/pre-commit
[ "tests/repository_test.py::test_venvs", "tests/repository_test.py::test_additional_dependencies", "tests/repository_test.py::test_additional_dependencies_roll_forward", "tests/repository_test.py::test_additional_node_dependencies_installed", "tests/repository_test.py::test_control_c_control_c_on_install", "tests/repository_test.py::test_invalidated_virtualenv" ]
[ "tests/repository_test.py::test_switch_language_versions_doesnt_clobber", "tests/repository_test.py::test_run_a_ruby_hook", "tests/repository_test.py::test_run_versioned_ruby_hook", "tests/repository_test.py::test_run_ruby_hook_with_disable_shared_gems", "tests/repository_test.py::test_golang_hook", "tests/repository_test.py::test_additional_ruby_dependencies_installed", "tests/repository_test.py::test_additional_golang_dependencies_installed", "tests/repository_test.py::test_local_golang_additional_dependencies" ]
[ "tests/repository_test.py::test_python_hook", "tests/repository_test.py::test_python_hook_default_version", "tests/repository_test.py::test_python_hook_args_with_spaces", "tests/repository_test.py::test_python_hook_weird_setup_cfg", "tests/repository_test.py::test_versioned_python_hook", "tests/repository_test.py::test_run_a_node_hook", "tests/repository_test.py::test_run_versioned_node_hook", "tests/repository_test.py::test_system_hook_with_spaces", "tests/repository_test.py::test_missing_executable", "tests/repository_test.py::test_run_a_script_hook", "tests/repository_test.py::test_run_hook_with_spaced_args", "tests/repository_test.py::test_run_hook_with_curly_braced_arguments", "tests/repository_test.py::TestPygrep::test_grep_hook_matching", "tests/repository_test.py::TestPygrep::test_grep_hook_case_insensitive", "tests/repository_test.py::TestPygrep::test_grep_hook_not_matching[nope]", "tests/repository_test.py::TestPygrep::test_grep_hook_not_matching[foo'bar]", "tests/repository_test.py::TestPygrep::test_grep_hook_not_matching[^\\\\[INFO\\\\]]", "tests/repository_test.py::TestPCRE::test_grep_hook_matching", "tests/repository_test.py::TestPCRE::test_grep_hook_case_insensitive", "tests/repository_test.py::TestPCRE::test_grep_hook_not_matching[nope]", "tests/repository_test.py::TestPCRE::test_grep_hook_not_matching[foo'bar]", "tests/repository_test.py::TestPCRE::test_grep_hook_not_matching[^\\\\[INFO\\\\]]", "tests/repository_test.py::TestPCRE::test_pcre_hook_many_files", "tests/repository_test.py::TestPCRE::test_missing_pcre_support", "tests/repository_test.py::test_cwd_of_hook", "tests/repository_test.py::test_lots_of_files", "tests/repository_test.py::test_reinstall", "tests/repository_test.py::test_really_long_file_paths", "tests/repository_test.py::test_config_overrides_repo_specifics", "tests/repository_test.py::test_tags_on_repositories", "tests/repository_test.py::test_local_repository", "tests/repository_test.py::test_local_python_repo", "tests/repository_test.py::test_hook_id_not_present", "tests/repository_test.py::test_meta_hook_not_present", "tests/repository_test.py::test_too_new_version", "tests/repository_test.py::test_versions_ok[0.1.0]", "tests/repository_test.py::test_versions_ok[1.6.0]", "tests/repository_test.py::test_manifest_hooks" ]
[]
MIT License
2,207
[ "pre_commit/store.py", "pre_commit/commands/autoupdate.py", "pre_commit/repository.py" ]
[ "pre_commit/store.py", "pre_commit/commands/autoupdate.py", "pre_commit/repository.py" ]
pre-commit__pre-commit-713
8abfb37fdf8eac15940cfdeccb0a39f47b53df62
2018-02-24 23:43:02
ac3a37d1a0e3575bddf23fd9babf6e56202b2988
diff --git a/pre_commit/commands/autoupdate.py b/pre_commit/commands/autoupdate.py index ca83a58..f375913 100644 --- a/pre_commit/commands/autoupdate.py +++ b/pre_commit/commands/autoupdate.py @@ -106,7 +106,7 @@ def _write_new_config_file(path, output): f.write(to_write) -def autoupdate(runner, tags_only, repo=None): +def autoupdate(runner, tags_only, repos=()): """Auto-update the pre-commit config to the latest versions of repos.""" migrate_config(runner, quiet=True) retv = 0 @@ -120,7 +120,7 @@ def autoupdate(runner, tags_only, repo=None): is_local_repo(repo_config) or is_meta_repo(repo_config) or # Skip updating any repo_configs that aren't for the specified repo - repo and repo != repo_config['repo'] + repos and repo_config['repo'] not in repos ): output_repos.append(repo_config) continue diff --git a/pre_commit/main.py b/pre_commit/main.py index 16b6c3b..e2f48ed 100644 --- a/pre_commit/main.py +++ b/pre_commit/main.py @@ -168,7 +168,8 @@ def main(argv=None): ), ) autoupdate_parser.add_argument( - '--repo', help='Only update this repository.', + '--repo', dest='repos', action='append', metavar='REPO', + help='Only update this repository -- may be specified multiple times.', ) migrate_config_parser = subparsers.add_parser( @@ -251,7 +252,7 @@ def main(argv=None): return autoupdate( runner, tags_only=not args.bleeding_edge, - repo=args.repo, + repos=args.repos, ) elif args.command == 'migrate-config': return migrate_config(runner)
Autoupdate, select, multiple repositories Brought up from #657 As noted by @asottile, we can do `pre-commit autoupdate --repo repo1 --repo repo2 --repo repo3` to update multiple repositories. That can be implemented pretty easily with `action='append'` -- and changing the same parts as #657
pre-commit/pre-commit
diff --git a/tests/commands/autoupdate_test.py b/tests/commands/autoupdate_test.py index 91e7733..8fe4583 100644 --- a/tests/commands/autoupdate_test.py +++ b/tests/commands/autoupdate_test.py @@ -138,7 +138,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name( runner = Runner('.', C.CONFIG_FILE) before = open(C.CONFIG_FILE).read() repo_name = 'file://{}'.format(out_of_date_repo.path) - ret = autoupdate(runner, tags_only=False, repo=repo_name) + ret = autoupdate(runner, tags_only=False, repos=(repo_name,)) after = open(C.CONFIG_FILE).read() assert ret == 0 assert before != after @@ -158,7 +158,7 @@ def test_autoupdate_out_of_date_repo_with_wrong_repo_name( runner = Runner('.', C.CONFIG_FILE) before = open(C.CONFIG_FILE).read() # It will not update it, because the name doesn't match - ret = autoupdate(runner, tags_only=False, repo='wrong_repo_name') + ret = autoupdate(runner, tags_only=False, repos=('wrong_repo_name',)) after = open(C.CONFIG_FILE).read() assert ret == 0 assert before == after diff --git a/tests/commands/install_uninstall_test.py b/tests/commands/install_uninstall_test.py index 00d5eff..ea6727e 100644 --- a/tests/commands/install_uninstall_test.py +++ b/tests/commands/install_uninstall_test.py @@ -187,14 +187,11 @@ def test_unicode_merge_commit_message(tempdir_factory): with cwd(path): assert install(Runner(path, C.CONFIG_FILE)) == 0 cmd_output('git', 'checkout', 'master', '-b', 'foo') - cmd_output('git', 'commit', '--allow-empty', '-n', '-m', 'branch2') + cmd_output('git', 'commit', '--allow-empty', '-m', 'branch2') cmd_output('git', 'checkout', 'master') cmd_output('git', 'merge', 'foo', '--no-ff', '--no-commit', '-m', '☃') # Used to crash - cmd_output_mocked_pre_commit_home( - 'git', 'commit', '--no-edit', - tempdir_factory=tempdir_factory, - ) + cmd_output('git', 'commit', '--no-edit') def test_install_idempotent(tempdir_factory):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
1.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aspy.yaml==1.3.0 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 cfgv==3.3.1 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 identify==2.4.4 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 mock==5.2.0 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 -e git+https://github.com/pre-commit/pre-commit.git@8abfb37fdf8eac15940cfdeccb0a39f47b53df62#egg=pre_commit py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-env==0.6.2 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: pre-commit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - aspy-yaml==1.3.0 - attrs==22.2.0 - cached-property==1.5.2 - cfgv==3.3.1 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - identify==2.4.4 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - mock==5.2.0 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-env==0.6.2 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/pre-commit
[ "tests/commands/autoupdate_test.py::test_autoupdate_out_of_date_repo_with_correct_repo_name", "tests/commands/autoupdate_test.py::test_autoupdate_out_of_date_repo_with_wrong_repo_name" ]
[ "tests/commands/autoupdate_test.py::test_updates_old_format_to_new_format", "tests/commands/install_uninstall_test.py::test_install_in_submodule_and_run", "tests/commands/install_uninstall_test.py::test_environment_not_sourced" ]
[ "tests/commands/autoupdate_test.py::test_up_to_date_repo", "tests/commands/autoupdate_test.py::test_autoupdate_up_to_date_repo", "tests/commands/autoupdate_test.py::test_autoupdate_old_revision_broken", "tests/commands/autoupdate_test.py::test_out_of_date_repo", "tests/commands/autoupdate_test.py::test_autoupdate_out_of_date_repo", "tests/commands/autoupdate_test.py::test_does_not_reformat", "tests/commands/autoupdate_test.py::test_loses_formatting_when_not_detectable", "tests/commands/autoupdate_test.py::test_autoupdate_tagged_repo", "tests/commands/autoupdate_test.py::test_autoupdate_tags_only", "tests/commands/autoupdate_test.py::test_hook_disppearing_repo_raises", "tests/commands/autoupdate_test.py::test_autoupdate_hook_disappearing_repo", "tests/commands/autoupdate_test.py::test_autoupdate_local_hooks", "tests/commands/autoupdate_test.py::test_autoupdate_local_hooks_with_out_of_date_repo", "tests/commands/autoupdate_test.py::test_autoupdate_meta_hooks", "tests/commands/install_uninstall_test.py::test_is_not_script", "tests/commands/install_uninstall_test.py::test_is_script", "tests/commands/install_uninstall_test.py::test_is_previous_pre_commit", "tests/commands/install_uninstall_test.py::test_install_pre_commit", "tests/commands/install_uninstall_test.py::test_install_hooks_directory_not_present", "tests/commands/install_uninstall_test.py::test_install_hooks_dead_symlink", "tests/commands/install_uninstall_test.py::test_uninstall_does_not_blow_up_when_not_there", "tests/commands/install_uninstall_test.py::test_uninstall", "tests/commands/install_uninstall_test.py::test_install_pre_commit_and_run", "tests/commands/install_uninstall_test.py::test_install_pre_commit_and_run_custom_path", "tests/commands/install_uninstall_test.py::test_commit_am", "tests/commands/install_uninstall_test.py::test_unicode_merge_commit_message", "tests/commands/install_uninstall_test.py::test_install_idempotent", "tests/commands/install_uninstall_test.py::test_failing_hooks_returns_nonzero", "tests/commands/install_uninstall_test.py::test_install_existing_hooks_no_overwrite", "tests/commands/install_uninstall_test.py::test_install_existing_hook_no_overwrite_idempotent", "tests/commands/install_uninstall_test.py::test_failing_existing_hook_returns_1", "tests/commands/install_uninstall_test.py::test_install_overwrite_no_existing_hooks", "tests/commands/install_uninstall_test.py::test_install_overwrite", "tests/commands/install_uninstall_test.py::test_uninstall_restores_legacy_hooks", "tests/commands/install_uninstall_test.py::test_replace_old_commit_script", "tests/commands/install_uninstall_test.py::test_uninstall_doesnt_remove_not_our_hooks", "tests/commands/install_uninstall_test.py::test_installs_hooks_with_hooks_True", "tests/commands/install_uninstall_test.py::test_install_hooks_command", "tests/commands/install_uninstall_test.py::test_installed_from_venv", "tests/commands/install_uninstall_test.py::test_pre_push_integration_failing", "tests/commands/install_uninstall_test.py::test_pre_push_integration_accepted", "tests/commands/install_uninstall_test.py::test_pre_push_new_upstream", "tests/commands/install_uninstall_test.py::test_pre_push_integration_empty_push", "tests/commands/install_uninstall_test.py::test_pre_push_legacy", "tests/commands/install_uninstall_test.py::test_commit_msg_integration_failing", "tests/commands/install_uninstall_test.py::test_commit_msg_integration_passing", "tests/commands/install_uninstall_test.py::test_commit_msg_legacy", "tests/commands/install_uninstall_test.py::test_install_disallow_mising_config", "tests/commands/install_uninstall_test.py::test_install_allow_mising_config", "tests/commands/install_uninstall_test.py::test_install_temporarily_allow_mising_config" ]
[]
MIT License
2,208
[ "pre_commit/commands/autoupdate.py", "pre_commit/main.py" ]
[ "pre_commit/commands/autoupdate.py", "pre_commit/main.py" ]
akolar__ogn-lib-9
a78134bedadbeba05ef3c4a514dd205fcd80300a
2018-02-26 12:49:07
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
codecov-io: # [Codecov](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=h1) Report > Merging [#9](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=desc) into [develop](https://codecov.io/gh/akolar/ogn-lib/commit/a78134bedadbeba05ef3c4a514dd205fcd80300a?src=pr&el=desc) will **decrease** coverage by `0.57%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/akolar/ogn-lib/pull/9/graphs/tree.svg?height=150&width=650&src=pr&token=h0RKAqOWA9)](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## develop #9 +/- ## =========================================== - Coverage 98.78% 98.21% -0.58% =========================================== Files 4 4 Lines 413 392 -21 =========================================== - Hits 408 385 -23 - Misses 5 7 +2 ``` | [Impacted Files](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [ogn\_lib/parser.py](https://codecov.io/gh/akolar/ogn-lib/pull/9/diff?src=pr&el=tree#diff-b2duX2xpYi9wYXJzZXIucHk=) | `99.17% <100%> (-0.83%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=footer). Last update [a78134b...b6490ea](https://codecov.io/gh/akolar/ogn-lib/pull/9?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index 4373711..f43f6cb 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -1,6 +1,7 @@ import collections import functools import logging +import re from datetime import datetime, time, timedelta from ogn_lib import constants, exceptions @@ -43,6 +44,8 @@ class ParserBase(type): for c in callsigns: logger.debug('Setting %s as a parser for %s messages', name, c) meta.parsers[c] = class_ + elif callsigns is None: + pass else: raise TypeError('instance of __destto__ should be either a sequence' 'or a string; is {}'.format(type(callsigns))) @@ -69,7 +72,7 @@ class ParserBase(type): _, body = raw_message.split('>', 1) destto, *_ = body.split(',', 1) - if 'TCPIP*' in body or ':>' in body or 'qAC' in body: # server message + if 'TCPIP*' in body: return ServerParser.parse_message(raw_message) try: @@ -102,33 +105,75 @@ class Parser(metaclass=ParserBase): Base class for all parser classes. Implements parsing of APRS message header and calls the populates the data - with the values returned by the _parse_comment(comment) of the extending - class. + with the values returned by the _parse_protocol_specific(comment) of the + extending class. """ __default__ = True + # TNC-2 formatted header (p. 84) + PATTERN_HEADER = re.compile('(?P<source>.{1,9})' + '>(?P<destination>.{1,9}?)' + '(,(?P<digipeaters>.{0,80}))' + ':(?P<data>.*?)$') + + # Lat/Long Position Report Format - with Timestamp (p. 32) + PATTERN_LOCATION = re.compile('(@|/)' + '(?P<time>\d{6}(z|h))' + '(?P<latitude>\d{4}\.\d{2}(N|S))' + '(/|\\\\|I)(?P<longitude>\d{5}\.\d{2}(E|W))') + + PATTERN_COMMENT_COMON = re.compile('((?P<heading>\d{3})/(?P<speed>\d{3}))?' + '(/A=(?P<altitude>\d{6}))?' + '( (?P<protocol_specific>.*?))?$') + + # Merged header and position + PATTERN_ALL = re.compile('(?P<source>.{1,9})>' + '(?P<destination>.{1,9}?)' + '(,(?P<digipeaters>.{0,81})):' + '(@|/)' + '(?P<time>\d{6}(z|h))' + '(?P<latitude>\d{4}\.\d{2}(N|S))' + '.(?P<longitude>\d{5}\.\d{2}(E|W))' + '.((?P<heading>\d{3})/(?P<speed>\d{3}))?' + '(/A=(?P<altitude>\d{6}))?' + '( (?P<protocol_specific>.*?))?$') + @classmethod def parse_message(cls, raw_message): """ - Parses the fields of a raw APRS message to a dictionary. + Parses the fields of a raw APRS message to a dictionary. Returns + none if message could not be parsed. :param str raw_message: raw APRS message - :return: parsed message - :rtype: dict + :return: parsed message or None if the message failed to parse + :rtype: dict or None + :raises ogn_lib.exceptions.ParseError: if message cannot be parsed + using Parser.PATTERN_ALL """ - from_, body = raw_message.split('>', 1) - header, *comment = body.split(' ', 1) + match = cls.PATTERN_ALL.match(raw_message) + + if not match: + raise exceptions.ParseError('Message {} did not match {}' + .format(raw_message, cls.PATTERN_ALL)) data = { - 'from': from_, - 'beacon_type': constants.BeaconType.aircraft_beacon + 'from': match.group('source'), + 'destto': match.group('destination'), + 'beacon_type': constants.BeaconType.aircraft_beacon, + 'timestamp': Parser._parse_timestamp(match.group('time')), + 'latitude': Parser._parse_location(match.group('latitude')), + 'longitude': Parser._parse_location(match.group('longitude')), + 'altitude': Parser._parse_altitude(match.group('altitude')) } - data.update(Parser._parse_header(header)) + data.update(Parser._parse_digipeaters(match.group('digipeaters'))) + data.update(Parser._parse_heading_speed(match.group('heading'), + match.group('speed'))) - if comment: - comment_data = cls._parse_comment(comment[0]) + protocol_specific = match.group('protocol_specific') + if protocol_specific: + comment_data = cls._parse_protocol_specific(protocol_specific) try: cls._update_data(data, comment_data['_update']) @@ -142,117 +187,77 @@ class Parser(metaclass=ParserBase): return data @staticmethod - def _parse_header(header): + def _parse_digipeaters(digipeaters): """ - Parses the APRS message header. - - :param str header: APRS message between the '[callsign]>' and comment - field - :param str pos_separator: separator for latitude and longitude - :param str attrs_separator: separator for attributes substring - :return: parsed header - :rtype: dict - """ - - col_idx = header.find(':') - origin = header[:col_idx] - position = header[col_idx + 2:] + Parses the content of the APRS digipeaters field and extracts the + information about the receiver and the relayer. - data = Parser._parse_origin(origin) - data.update(Parser._parse_position(position)) - - return data - - @staticmethod - def _parse_origin(header): - """ - Parses the destto, receiver and relayer field of the APRS message. - :param str header: APRS message between the '[callsign]>' and position - information - :return: parsed origin part of the APRS message + :param str digipeaters: digipeaters string from the original message + :return: dictionary with the receiver and the relayer :rtype: dict + :raises ValueError: if digipeaters string is in invalid format """ - fields = header.split(',') + fields = digipeaters.split(',') - if len(fields) == 3: # standard message + if len(fields) == 2: # standard message relayer = None - elif len(fields) == 4: # relayed message - relayer = fields[1].strip('*') + elif len(fields) == 3: # relayed message + relayer = fields[0].strip('*') else: - raise ValueError('Unknown header format: {}'.format(header)) - - data = {'destto': fields[0], 'receiver': fields[-1], 'relayer': relayer} + raise ValueError('Unknown digipeaters format: {}' + .format(digipeaters)) - return data + return {'receiver': fields[-1], 'relayer': relayer} @staticmethod - def _parse_position(pos_header): + def _parse_heading_speed(heading, speed): """ - Parses the position information, timestamp, altitude, heading and - ground speed from an APRS message. + Parses and converts the heading and speed from the original message + to the appropriate units. - :param str pos_header: position part of the APRS header - :param str pos_separator: separator for latitude and longitude - :param str attrs_separator: separator for attributes substring - :return: parsed position part of the APRS message + :param str heading: heading string + :param str speed: speed string + :return: dictionary containing the heading and ground speed in m/s :rtype: dict """ - timestamp = pos_header[0:7] - lat = pos_header[7:15] - lon = pos_header[16:25] - attrs = pos_header[26:] + if not heading or not speed: + return {} - data = { - 'timestamp': Parser._parse_timestamp(timestamp), - 'latitude': Parser._parse_location(lat), - 'longitude': Parser._parse_location(lon), - } - data.update(Parser._parse_attributes(attrs)) - - return data - - @staticmethod - def _parse_attributes(attributes): - """ - Parses the APRS attributes for heading, ground speed and altitude. + hdg = int(heading) + gsp = int(speed) - :param str attributes: attributes part of the APRS message - :return: parsed attributes - :rtype: dict - """ - - attrs = attributes.split('/') data = {} - if attrs[-1].startswith('A='): # has altitude - data['altitude'] = int(attrs[-1][2:]) * FEET_TO_METERS - else: - data['altitude'] = None - - if len(attrs) > 1 and attributes[0] != '/': # i.e., format is hdg/gsp/? - heading = int(attrs[0]) - speed = int(attrs[1]) - else: - heading = None - speed = None - - if heading or speed: - data['heading'] = heading - data['ground_speed'] = speed * KNOTS_TO_MS + if hdg or gsp: + data['heading'] = hdg + data['ground_speed'] = gsp * KNOTS_TO_MS else: data['heading'] = None data['ground_speed'] = None return data + @staticmethod + def _parse_altitude(altitude_string): + """ + Parses the altitude string and converts it from feet to meters. + + :param altitude_string: the altitude string + :type altitude_string: str or None + :return: altitude in meters or None if altitude_str is not given + :rtype: float or None + """ + + return int(altitude_string) * FEET_TO_METERS if altitude_string else None + @staticmethod def _parse_timestamp(timestamp_str): """ - Parses the UTC timestamp of an APRS package. + Parses the timestamp of an APRS package. - :param timestamp_str: utc timestamp string in %H%M%S or %d%H%M format + :param str timestamp_str: timestamp string in %H%M%S or %d%H%M format :return: parsed timestamp :rtype: datetime.datetime """ @@ -318,6 +323,9 @@ class Parser(metaclass=ParserBase): :rtype: float """ + if not location_str: + return None + sphere = location_str[-1] offset = 2 if sphere in ('N', 'S') else 3 @@ -330,7 +338,7 @@ class Parser(metaclass=ParserBase): return location @staticmethod - def _parse_comment(comment): + def _parse_protocol_specific(comment): """ Parses the comment string from APRS messages. @@ -339,7 +347,7 @@ class Parser(metaclass=ParserBase): :rtype: dict """ - logger.warn('Parser._parse_comment method not overriden') + logger.warn('Parser._parse_protocol_specific method not overriden') return {} @staticmethod @@ -411,7 +419,7 @@ class APRS(Parser): FLAGS_ADDRESS_TYPE = 0b11 @staticmethod - def _parse_comment(comment): + def _parse_protocol_specific(comment): """ Parses the comment string from APRS messages. @@ -510,7 +518,7 @@ class Naviter(Parser): FLAGS_ADDRESS_TYPE = 0b111111 << 4 @staticmethod - def _parse_comment(comment): + def _parse_protocol_specific(comment): """ Parses the comment string from Naviter's APRS messages. @@ -570,77 +578,52 @@ class Naviter(Parser): } -class ServerParser: +class ServerParser(Parser): """ Parser for server messages. """ - @staticmethod - def parse_message(raw_message): - """ - Passes a server message to an appropriate parser. - - :param raw_message: APRS message - :return: parsed data - :rtype: dict - """ - - if 'CPU' in raw_message or ':>' in raw_message: - data = ServerParser.parse_status(raw_message) - else: - data = ServerParser.parse_beacon(raw_message) + __destto__ = None - data['raw'] = raw_message - return data + PATTERN_ALL = re.compile('(?P<source>.{1,9})>' + '(?P<destination>.{1,9}?)' + '(,(?P<digipeaters>.{0,81})):' + '(@|/|>)' + '(?P<time>\d{6}(z|h))' + '((?P<latitude>\d{4}\.\d{2}(N|S))' + '.(?P<longitude>\d{5}\.\d{2}(E|W)))?' + '(.((?P<heading>\d{3})/(?P<speed>\d{3}))?' + '(/A=(?P<altitude>\d{6}))?)?' + '( (?P<protocol_specific>.*?))?$') - @staticmethod - def parse_beacon(raw_message): + @classmethod + def parse_message(cls, raw_message): """ - Parses server beacon messages. + Parses the server message using the Parser.parse_message. - :param raw_message: APRS message - :return: parsed data + :param str raw_message: raw APRS message + :return: parsed message :rtype: dict """ - from_, header = raw_message.split('>', 1) - position, *comment = header.split(' ', 1) - - data = { - 'from': from_, - 'beacon_type': constants.BeaconType.server_beacon - } + data = super().parse_message(raw_message) - data.update(Parser._parse_header(position)) - - data['comment'] = comment[0] if comment else None + if 'comment' in data: + data['beacon_type'] = constants.BeaconType.server_status + else: + data['beacon_type'] = constants.BeaconType.server_beacon return data @staticmethod - def parse_status(raw_message): + def _parse_protocol_specific(comment): """ - Parses server status messages. + Converts the comment field from the server status message to a format + expected by Parser.parse_message - :param raw_message: APRS message - :return: parsed data + :param str comment: status comment + :return: dictionary with the comment :rtype: dict """ - from_, body = raw_message.split('>', 1) - header, comment = body.split(' ', 1) - - sep_idx = header.find(':') - origin = header[:sep_idx] - timestamp = header[sep_idx + 2:sep_idx + 9] - - data = { - 'from': from_, - 'beacon_type': constants.BeaconType.server_status - } - - data.update(Parser._parse_origin(origin)) - data['timestamp'] = Parser._parse_timestamp(timestamp) - data['comment'] = comment - - return data + return {'comment': comment}
Rewrite parser engine in regex Rewrite all parser functions that use string manipulation for parsing messages and replace them with regular expressions. Advantages include: - more robust and extendable - faster than the current implementation
akolar/ogn-lib
diff --git a/tests/messages.txt b/tests/messages.txt new file mode 100644 index 0000000..72b0c93 --- /dev/null +++ b/tests/messages.txt @@ -0,0 +1,82 @@ +FLRDDA5BA>APRS,qAS,LFMX:/165829h4415.41N/00600.03E'342/049/A=005524 id0ADDA5BA -454fpm -1.1rot 8.8dB 0e +51.2kHz gps4x5 +ICA4B0E3A>APRS,qAS,Letzi:/165319h4711.75N\00802.59E^327/149/A=006498 id154B0E3A -3959fpm +0.5rot 9.0dB 0e -6.3kHz gps1x3 +FLRDDB091>APRS,qAS,Letzi:/165831h4740.04N/00806.01EX152/124/A=004881 id06DD8E80 +198fpm +0.0rot 6.5dB 13e +4.0kHz gps3x4 +FLRDDDD33>APRS,qAS,LFNF:/165341h4344.27N/00547.41E'/A=000886 id06DDDD33 +020fpm +0.0rot 20.8dB 0e -14.3kHz gps3x4 +FLRDDE026>APRS,qAS,LFNF:/165341h4358.58N/00553.89E'204/055/A=005048 id06DDE026 +257fpm +0.1rot 7.2dB 0e -0.8kHz gps4x7 +ICA484A9C>APRS,qAS,LFMX:/165341h4403.50N/00559.67E'/A=001460 id05484A9C +000fpm +0.0rot 18.0dB 0e +3.5kHz gps4x7 +OGNE95A16>APRS,qAS,Sylwek:/165641h5001.94N/01956.91E'270/004/A=000000 id07E95A16 +000fpm +0.1rot 37.8dB 0e -0.4kHz +ZK-GSC>APRS,qAS,Omarama:/165202h4429.25S/16959.33E'/A=001407 id05C821EA +020fpm +0.0rot 16.8dB 0e -3.1kHz gps1x3 hear1084 hearB597 hearB598 +ICA3ECE59>APRS,qAS,GLDRTR:/171254h5144.78N/00616.67E'263/000/A=000075 id093D0930 +000fpm +0.0rot +ICA3ECE59>APRS,qAS,GLDRTR:/171254h5144.78N/00616.67E'263/000/A=000075 id053ECE59 +Lachens>APRS,TCPIP*,qAC,GLIDERN2:/165334h4344.70NI00639.19E&/A=005435 v0.2.1 CPU:0.3 RAM:1764.4/2121.4MB NTP:2.8ms/+4.9ppm +47.0C RF:+0.70dB +LFGU>APRS,TCPIP*,qAC,GLIDERN2:/165556h4907.63NI00706.41E&/A=000833 v0.2.0 CPU:0.9 RAM:281.3/458.9MB NTP:0.5ms/-19.1ppm +53.0C RF:+0.70dB +LSGS>APRS,TCPIP*,qAC,GLIDERN1:/165345h4613.25NI00719.68E&/A=001581 CPU:0.7 RAM:247.9/456.4MB NTP:0.7ms/-11.4ppm +44.4C RF:+53+71.9ppm/+0.4dB +WolvesSW>APRS,TCPIP*,qAC,GLIDERN2:/165343h5232.23NI00210.91W&/A=000377 CPU:1.5 RAM:159.9/458.7MB NTP:6.6ms/-36.7ppm +45.5C RF:+130-0.4ppm/-0.1dB +Oxford>APRS,TCPIP*,qAC,GLIDERN1:/165533h5142.96NI00109.68W&/A=000380 v0.1.3 CPU:0.9 RAM:268.8/458.6MB NTP:0.5ms/-45.9ppm +60.5C RF:+55+2.9ppm/+1.54dB +Salland>APRS,TCPIP*,qAC,GLIDERN2:/165426h5227.93NI00620.03E&/A=000049 v0.2.2 CPU:0.7 RAM:659.3/916.9MB NTP:2.5ms/-75.0ppm RF:+0.41dB +LSGS>APRS,TCPIP*,qAC,GLIDERN1:/165345h4613.25NI00719.68E&/A=001581 CPU:0.7 RAM:247.9/456.4MB NTP:0.7ms/-11.4ppm +44.4C RF:+53+71.9ppm/+0.4dB +Drenstein>APRS,TCPIP*,qAC,GLIDERN1:/165011h5147.51NI00744.45E&/A=000213 v0.2.2 CPU:0.8 RAM:695.7/4025.5MB NTP:16000.0ms/+0.0ppm +63.0C +VITACURA1>APRS,TCPIP*,qAC,GLIDERN3:/042149h3322.81SI07034.95W&/A=002345 v0.2.5.ARM CPU:0.6 RAM:694.4/970.5MB NTP:0.8ms/-7.5ppm +54.8C RF:+0-0.2ppm/+3.81dB +Arnsberg>APRS,TCPIP*,qAC,GLIDERN1:/042146h5123.04NI00803.77E&/A=000623 v0.2.5.ARM CPU:0.4 RAM:765.1/970.8MB NTP:0.4ms/-1.7ppm +62.3C RF:+27+1.1ppm/+3.17dB +CNF3a>APRS,TCPIP*,qAC,GLIDERN3:/042143h4529.25NI07505.65W&/A=000259 v0.2.5.ARM CPU:0.6 RAM:514.6/970.8MB NTP:4.5ms/-1.5ppm +27.2C RF:+0-0.4ppm/+18.69dB +VITACURA2>APRS,TCPIP*,qAC,GLIDERN3:/042136h3322.81SI07034.95W&/A=002345 v0.2.5.ARM CPU:0.3 RAM:695.0/970.5MB NTP:0.6ms/-5.7ppm +51.5C RF:+0-0.0ppm/+1.32dB +ROBLE3>APRS,TCPIP*,qAC,GLIDERN4:/200022h3258.58SI07100.78W&/A=007229 Contact: [email protected], [email protected] +ALFALFAL>APRS,TCPIP*,qAC,GLIDERN4:/221830h3330.40SI07007.88W&/A=008659 Alfalfal Hidroelectric Plant, Club de Planeadores Vitacurs +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/062744h4845.03N/00230.46E'000/000 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/064243h4839.64N/00236.78E'000/085/A=000410 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/064548h4838.87N/00234.03E'000/042/A=000377 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/064847h4837.95N/00234.36E'000/000 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/065144h4837.56N/00233.80E'000/000 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/065511h4837.63N/00233.79E'000/000 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/070016h4837.63N/00233.77E'000/001/A=000360 +FLRDDEEF1>OGCAPT,qAS,CAPTURS:/070153h4837.62N/00233.77E'000/001/A=000344 +FLRDD89C9>OGFLR,qAS,LIDH:/115054h4543.22N/01132.84E'260/072/A=002542 !W10! id06DD89C9 +198fpm -0.8rot 7.0dB 0e +0.7kHz gps2x3 +FLRDD98C6>OGFLR,qAS,LIDH:/115054h4543.21N/01132.80E'255/074/A=002535 !W83! id0ADD98C6 +158fpm -1.8rot 10.5dB 0e -0.8kHz gps2x3 s6.09 h02 +ICAA8CBA8>OGFLR,qAS,MontCAIO:/231150z4512.12N\01059.03E^192/106/A=009519 !W20! id21A8CBA8 -039fpm +0.0rot 3.5dB 2e -8.7kHz gps1x2 s6.09 h43 rDF0267 +ICAA8CBA8>OGFLR,qAS,MontCAIO:/114949h4512.44N\01059.12E^190/106/A=009522 !W33! id21A8CBA8 -039fpm +0.1rot 4.5dB 1e -8.7kHz gps1x2 +14.3dBm +ICA3D1C35>OGFLR,qAS,Padova:/094220h4552.41N/01202.28E'110/099/A=003982 !W96! id053D1C35 -1187fpm +0.0rot 0.8dB 2e +4.5kHz gps1x2 s6.09 h32 rDD09D0 +FLRDDE48A>OGLT24,qAS,LT24:/102606h4030.47N/00338.38W'000/018/A=002267 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102608h4030.47N/00338.38W'044/018/A=002270 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102611h4030.47N/00338.38W'108/000/A=002280 id25387 +001fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102612h4030.47N/00338.38W'000/000/A=002280 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102615h4030.47N/00338.39W'224/003/A=002280 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102616h4030.47N/00338.38W'028/003/A=002250 id25387 -009fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102621h4030.47N/00338.38W'142/001/A=002267 id25387 +001fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102628h4030.47N/00338.38W'034/000/A=002263 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/102717h4030.47N/00338.38W'000/000/A=002263 id25387 +000fpm GPS +FLRDDE48A>OGLT24,qAS,LT24:/110453h4030.47N/00338.38W'154/004/A=002253 id25387 +000fpm GPS +NAV042121>OGNAVI,qAS,NAVITER:/140648h4550.36N/01314.85E'090/152/A=001086 !W47! id0440042121 +000fpm +0.5rot +NAV04220E>OGNAVI,qAS,NAVITER:/140748h4552.27N/01155.61E'090/012/A=006562 !W81! id044004220E +060fpm +1.2rot +NAV07220E>OGNAVI,qAS,NAVITER:/125447h4557.77N/01220.19E'258/056/A=006562 !W76! id1C4007220E +180fpm +0.0rot +FLRFFFFFF>OGNAVI,NAV07220E*,qAS,NAVITER:/092002h1000.00S/01000.00W'000/000/A=003281 !W00! id2820FFFFFF +300fpm +1.7rot +MontCAIO>OGNSDR,TCPIP*,qAC,GLIDERN3:/132231h4427.84NI01009.60E&/A=004822 +Padova>OGNSDR,TCPIP*,qAC,GLIDERN1:/132326h4525.38NI01156.29E&/A=000069 +LIDH>OGNSDR,TCPIP*,qAC,GLIDERN1:/132447h4540.89NI01129.65E&/A=000328 +LZHL>OGNSDR,TCPIP*,qAC,GLIDERN3:/132457h4849.09NI01708.30E&/A=000528 +BELG>OGNSDR,TCPIP*,qAC,GLIDERN3:/132507h4509.60NI00919.20E&/A=000246 +Saleve>OGNSDR,TCPIP*,qAC,GLIDERN1:/132624h4607.70NI00610.41E&/A=004198 Antenna: chinese, on a pylon, 20 meter above ground +OGN2FD00F>OGNTRK,qAS,LZHL:/093213h4848.78N/01708.32E'000/000/A=000538 !W12! id072FD00F -058fpm +0.0rot FL003.12 32.8dB 0e -0.8kHz gps3x5 +FLRDD9C70>OGNTRK,OGN2FD00F*,qAS,LZHL:/093214h4848.77N/01708.33E'000/000/A=000515 !W56! id06DD9C70 -019fpm +0.0rot 32.2dB 0e -0.8kHz gps2x3 +FLRDD9C70>OGNTRK,OGN2FD00F*,qAS,LZHL:/093021h4848.77N/01708.33E'000/000/A=000518 !W66! id06DD9C70 -019fpm +0.0rot 29.0dB 0e -0.8kHz gps2x3 s6.09 h03 +OGN03AF2A>OGNTRK,qAS,LZHL:/092912h4848.77N/01708.33E'000/000/A=000535 !W53! id0703AF2A +000fpm +0.0rot FL003.15 4.5dB 1e -0.1kHz gps4x5 -11.2dBm +FLRDD9C70>OGNTRK,RELAY*,qAS,LZHL:/094124h4848.78N/01708.33E'000/000/A=000397 !W15! id06DD9C70 +099fpm +0.0rot 24.5dB 0e -1.4kHz gps10x15 +FLRDDDD78>OGSKYL,qAS,SKYLINES:/134403h4225.90N/00144.83E'000/000/A=008438 id2816 +000fpm +FLRDDF944>OGSPID,qAS,SPIDER:/190930h3322.78S/07034.60W'000/000/A=002263 id300234010617040 +19dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/190930h3322.78S/07034.60W'000/000/A=002263 id300234010617040 +19dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/192430h3322.78S/07034.61W'000/000/A=002250 id300234010617040 +12dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/193930h3322.10S/07034.26W'273/027/A=004071 id300234010617040 +9dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/195430h3322.82S/07034.90W'000/000/A=002217 id300234010617040 +10dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/193930h3322.78S/07034.60W'348/000/A=002286 id300234010617040 +12dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/195430h3323.16S/07037.68W'302/034/A=003316 id300234010617040 +10dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/195430h3323.16S/07037.68W'302/034/A=003316 id300234010617040 +10dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/200930h3319.13S/07036.12W'128/031/A=005482 id300234010617040 +15dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/200930h3319.13S/07036.12W'128/031/A=005482 id300234010617040 +15dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/202430h3314.92S/07032.08W'138/032/A=006453 id300234010617040 +9dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/203930h3321.38S/07027.29W'104/034/A=006272 id300234010617040 +8dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/205430h3322.13S/07033.53W'296/031/A=003927 id300234010617040 +7dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/210930h3322.05S/07035.74W'165/030/A=005187 id300234010617040 +8dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/212430h3322.02S/07036.14W'281/028/A=004550 id300234010617040 +7dB LWE 3D +FLRDDF944>OGSPID,qAS,SPIDER:/213930h3322.17S/07033.97W'332/028/A=003428 id300234010617040 +7dB LWE 3D +ICA3E7540>OGSPOT,qAS,SPOT:/161427h1448.35S/04610.86W'000/000/A=008677 id0-2860357 SPOT3 GOOD +ICA3E7540>OGSPOT,qAS,SPOT:/162923h1431.99S/04604.33W'000/000/A=006797 id0-2860357 SPOT3 GOOD +ICA3E7540>OGSPOT,qAS,SPOT:/163421h1430.38S/04604.43W'000/000/A=007693 id0-2860357 SPOT3 GOOD diff --git a/tests/test_parser.py b/tests/test_parser.py index 87108d4..cb9b05c 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,8 +1,25 @@ +import os import pytest -from datetime import datetime, timedelta +from datetime import datetime, timedelta, time from ogn_lib import exceptions, parser, constants +def get_messages(n_messages=float('inf')): + test_dir = os.path.dirname(os.path.realpath(__file__)) + + with open(os.path.join(test_dir, 'messages.txt'), 'r') as f: + lines = f.readlines() + + messages = [] + for i, l in enumerate(lines): + if i >= n_messages: + break + + messages.append(l.strip()) + + return messages + + class TestParserBase: def test_new_no_id(self): @@ -19,11 +36,20 @@ class TestParserBase: def test_new_multi_id(self): class Callsign(parser.Parser): - __destto__ = ['CALL1234', 'CALL4321'] + __destto__ = ['CALL234', 'CALL4321'] - assert 'CALL1234' in parser.ParserBase.parsers + assert 'CALL234' in parser.ParserBase.parsers assert 'CALL4321' in parser.ParserBase.parsers + def test_no_destto(self): + old = parser.ParserBase.parsers + + class Anon(parser.Parser): + __destto__ = None + + assert parser.ParserBase.parsers == old + assert 'Anon' not in parser.ParserBase.parsers + def test_new_wrong_id(self): with pytest.raises(TypeError): class Callsign(parser.Parser): @@ -82,151 +108,171 @@ class TestParserBase: class TestParser: - def test_parse_msg_from(self, mocker): - msg = 'FROM12345>payload' - with mocker.patch('ogn_lib.parser.Parser._parse_header', return_value={}): - data = parser.Parser.parse_message(msg) - assert data['from'] == 'FROM12345' - assert data['raw'] == msg - assert data['beacon_type'] is constants.BeaconType.aircraft_beacon + messages = get_messages() + + message = ("FLRDDA5BA>APRS,qAS,LFMX:/165829h4415.41N/00600.03E'342/049/A=" + "005524 id0ADDA5BA -454fpm -1.1rot 8.8dB 0e+51.2kHz gps4x5") + + expected_matches = { + 'source': 'FLRDDA5BA', + 'destination': 'APRS', + 'digipeaters': 'qAS,LFMX', + 'time': '165829h', + 'latitude': '4415.41N', + 'longitude': '00600.03E', + 'altitude': '005524', + 'speed': '049', + 'heading': '342' + } + + def _test_matches_all(self, pattern): + for msg in self.messages: + match = pattern.search(msg) + if not match: + raise Exception('Message not matched: {}'.format(msg)) + + def _test_pattern_field(self, pattern, field): + match = pattern.search(self.message) + assert match.group(field) == self.expected_matches[field] + + def test_pattern_header(self): + for field in ['source', 'destination', 'digipeaters']: + self._test_pattern_field(parser.Parser.PATTERN_HEADER, field) + + match = parser.Parser.PATTERN_HEADER.match(self.message) + assert match.group('data') + + def test_pattern_header_matches_all(self): + self._test_matches_all(parser.Parser.PATTERN_HEADER) + + def test_pattern_location(self): + for field in ['time', 'latitude', 'longitude']: + self._test_pattern_field(parser.Parser.PATTERN_LOCATION, field) + + def test_pattern_location_matches_all(self): + self._test_matches_all(parser.Parser.PATTERN_LOCATION) + + def test_pattern_comment_common(self): + for field in ['heading', 'speed', 'altitude']: + self._test_pattern_field(parser.Parser.PATTERN_COMMENT_COMON, field) + + def test_pattern_comment_common_matches_all(self): + self._test_matches_all(parser.Parser.PATTERN_COMMENT_COMON) + + def test_pattern_all(self): + for k in self.expected_matches.keys(): + self._test_pattern_field(parser.Parser.PATTERN_ALL, k) + + def test_pattern_all_matches_all(self): + self._test_matches_all(parser.Parser.PATTERN_ALL) + + def test_parse_msg_no_match(self): + with pytest.raises(exceptions.ParseError): + parser.Parser.parse_message('invalid message') + + def test_parse_msg_calls(self, mocker): + mocker.spy(parser.Parser, '_parse_timestamp') + mocker.spy(parser.Parser, '_parse_location') + mocker.spy(parser.Parser, '_parse_altitude') + mocker.spy(parser.Parser, '_parse_digipeaters') + mocker.spy(parser.Parser, '_parse_heading_speed') + mocker.spy(parser.Parser, '_parse_protocol_specific') + + parser.Parser.parse_message( + 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' + '276/075/A=001551') + + parser.Parser._parse_timestamp.assert_called_once_with('163148h') + assert parser.Parser._parse_location.call_count == 2 + parser.Parser._parse_altitude.assert_called_once_with('001551') + parser.Parser._parse_digipeaters.assert_called_once_with('qAS,EDLF') + parser.Parser._parse_heading_speed.assert_called_once_with('276', '075') + parser.Parser._parse_protocol_specific.assert_not_called() def test_parse_msg(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_header', - return_value={}): - with mocker.patch('ogn_lib.parser.Parser._parse_comment', - return_value={}): - - parser.Parser.parse_message( - 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' - '276/075/A=001551') - parser.Parser._parse_comment.assert_not_called() - parser.Parser._parse_header.assert_called_once_with( - 'APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'276/075/A=001551') + data = parser.Parser.parse_message( + 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' + '276/075/A=001551') + + assert data['from'] == 'FLRDD83BC' + assert data['destto'] == 'APRS' def test_parse_msg_full(self, mocker): msg = ('NAV07220E>OGNAVI,qAS,NAVITER:/125447h4557.77N/01220.19E\'258/' '056/A=006562 !W76! id1C4007220E +180fpm +0.0rot') - with mocker.patch('ogn_lib.parser.Parser._parse_comment', - return_value={}): - parser.Parser.parse_message(msg) - parser.Parser._parse_comment.assert_called_once_with( - '!W76! id1C4007220E +180fpm +0.0rot') + mocker.spy(parser.Parser, '_parse_protocol_specific') + data = parser.Parser.parse_message(msg) + parser.Parser._parse_protocol_specific.assert_called_once_with( + '!W76! id1C4007220E +180fpm +0.0rot') + + assert data['raw'] == msg def test_parse_msg_delete_update(self, mocker): msg = ('NAV07220E>OGNAVI,qAS,NAVITER:/125447h4557.77N/01220.19E\'258/' '056/A=006562 !W76! id1C4007220E +180fpm +0.0rot') data = {'_update': [{'target': 'key', 'function': lambda x: x}]} - with mocker.patch('ogn_lib.parser.Parser._parse_header', - return_value={}): - with mocker.patch('ogn_lib.parser.Parser._parse_comment', - return_value=data): - with mocker.patch('ogn_lib.parser.Parser._update_data'): - parser.Parser.parse_message(msg) - assert parser.Parser._update_data.call_count == 1 + + mocker.patch('ogn_lib.parser.Parser._parse_protocol_specific', + return_value=data) + mocker.patch('ogn_lib.parser.Parser._update_data') + + parser.Parser.parse_message(msg) + assert parser.Parser._update_data.call_count == 1 def test_parse_msg_comment(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_header', - return_value={}): - with mocker.patch('ogn_lib.parser.Parser._parse_comment', - return_value={}): - - parser.Parser.parse_message( - 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' - '276/075/A=001551 [comment]') - parser.Parser._parse_comment.assert_called_once_with('[comment]') - - def test_parse_header(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_origin', - return_value={'a': 1}): - with mocker.patch('ogn_lib.parser.Parser._parse_position', - return_value={'b': 2}): - data = parser.Parser._parse_header('origin:/position') - - parser.Parser._parse_origin.assert_called_once_with('origin') - parser.Parser._parse_position.assert_called_once_with('position') - - assert data == {'a': 1, 'b': 2} - - def test_parse_header_separators(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_origin', - return_value={'a': 1}): - with mocker.patch('ogn_lib.parser.Parser._parse_position', - return_value={'b': 2}): - parser.Parser._parse_header('origin:/position') - - parser.Parser._parse_position.assert_called_once_with('position') - - def test_parse_origin(self): - data = parser.Parser._parse_origin('FMT-VERS,qAS,RECEIVER') + mocker.patch('ogn_lib.parser.Parser._parse_protocol_specific', + return_value={'comment': True}) + + data = parser.Parser.parse_message( + 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' + '276/075/A=001551 [comment]') + parser.Parser._parse_protocol_specific.assert_called_once_with('[comment]') + + assert data['comment'] + + def test_parse_digipeaters(self): + data = parser.Parser._parse_digipeaters('qAS,RECEIVER') assert data == { - 'destto': 'FMT-VERS', 'relayer': None, 'receiver': 'RECEIVER' } - def test_parse_origin_relayed(self): - data = parser.Parser._parse_origin('FMT-VERS,RELAYER*,qAS,RECEIVER') + def test_parse_digipeaters_relayed(self): + data = parser.Parser._parse_digipeaters('RELAYER*,qAS,RECEIVER') assert data == { - 'destto': 'FMT-VERS', 'relayer': 'RELAYER', 'receiver': 'RECEIVER' } - def test_parse_origin_unknown_format(self): + def test_parse_digipeaters_unknown_format(self): with pytest.raises(ValueError): - parser.Parser._parse_origin('FMT-VERS,qAS') - - def _parse_position_separator(self, mocker, p_separator, a_separator, msg): - c_time = datetime(2017, 1, 1, 1, 2, 3) - with mocker.patch('ogn_lib.parser.Parser._parse_timestamp', - return_value=c_time): - with mocker.patch('ogn_lib.parser.Parser._parse_attributes', - return_value={}): + parser.Parser._parse_digipeaters('qAS') - data = parser.Parser._parse_position(msg) - - parser.Parser._parse_attributes.assert_called_once_with( - '200/100/A=00042') - - assert data['timestamp'] == c_time - assert data['latitude'] == 1 - assert data['longitude'] == 2 - - def test_parse_position(self, mocker): - msg = '010203h0100.00N/00200.00E\'200/100/A=00042' - self._parse_position_separator(mocker, '/', '\'', msg) - - def test_parse_position_alt_separator(self, mocker): - msg = '010203h0100.00NI00200.00E&200/100/A=00042' - self._parse_position_separator(mocker, 'I', '&', msg) - - def test_parse_attrs(self): - pass - - def test_parse_attrs_na_hdg_speed(self): - data = parser.Parser._parse_position( - '010203h0100.00N/00200.00\'000/000/A=00042') + def test_parse_heading_speed(self): + data = parser.Parser._parse_heading_speed('100', '050') + assert data['heading'] == 100 + assert abs(data['ground_speed'] - 25.72) < 0.1 + def test_parse_heading_speed_both_missing(self): + data = parser.Parser._parse_heading_speed('000', '000') assert data['heading'] is None assert data['ground_speed'] is None - def test_parse_attrs_no_hdg_speed(self): - data = parser.Parser._parse_position( - '010203h0100.00N/00200.00\'/A=00042') + def test_parse_heading_speed_null_input(self): + assert not parser.Parser._parse_heading_speed(None, '000') + assert not parser.Parser._parse_heading_speed('000', None) + assert not parser.Parser._parse_heading_speed(None, None) - assert data['heading'] is None - assert data['ground_speed'] is None - assert data['altitude'] == 12.8016 + def test_parse_altitude(self): + assert abs(parser.Parser._parse_altitude('005000') - 1524) < 1 - def test_parse_attrs_no_alt(self): - data = parser.Parser._parse_position( - '010203h0100.00N/00200.00\'001/002') + def test_parse_altitude_missing(self): + assert parser.Parser._parse_altitude(None) is None - assert data['heading'] == 1 - assert abs(data['ground_speed'] - 1.028889) < 0.001 - assert data['altitude'] is None + def test_parse_attrs(self): + pass def test_parse_timestamp_h(self, mocker): with mocker.patch('ogn_lib.parser.Parser._parse_time'): @@ -270,12 +316,12 @@ class TestParser: def test_parse_location_value(self): val = parser.Parser._parse_location('0130.50N') - assert abs(val == 1.5083333) < 0.0001 + assert abs(val - 1.5083333) < 0.0001 val = parser.Parser._parse_location('01125.01W') assert abs(val - -11.416833) < 0.0001 - def test_parse_comment(self): - assert parser.Parser._parse_comment("1 2 3 4") == {} + def test_parse_protocol_specific(self): + assert parser.Parser._parse_protocol_specific("1 2 3 4") == {} def test_get_location_update_func(self): fn = parser.Parser._get_location_update_func(0) @@ -325,10 +371,10 @@ class TestParser: class TestAPRS: - def test_parse_comment(self): + def test_parse_protocol_specific(self): msg = ('!W12! id06DF0A52 +020fpm +0.0rot FL000.00 55.2dB 0e -6.2kHz' ' gps4x6 s6.01 h03 rDDACC4 +5.0dBm hearD7EA hearDA95') - data = parser.APRS._parse_comment(msg) + data = parser.APRS._parse_protocol_specific(msg) assert len(data['_update']) == 2 assert (set(map(lambda x: x['target'], data['_update'])) == {'latitude', 'longitude'}) @@ -366,9 +412,9 @@ class TestAPRS: class TestNaviter: - def test_parse_comment(self): + def test_parse_protocol_specific(self): msg = '!W76! id1C4007220E +180fpm +0.0rot' - data = parser.Naviter._parse_comment(msg) + data = parser.Naviter._parse_protocol_specific(msg) assert len(data['_update']) == 2 assert (set(map(lambda x: x['target'], data['_update'])) == {'latitude', 'longitude'}) @@ -397,66 +443,46 @@ class TestNaviter: class TestServerParser: def test_parse_message_beacon(self, mocker): - with mocker.patch('ogn_lib.parser.ServerParser.parse_beacon'): - msg = ('LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&' - '000/000/A=001689') - - data = parser.ServerParser.parse_message(msg) - parser.ServerParser.parse_beacon.assert_called_once_with(msg) - - assert data['raw'] + msg = ('LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&' + '000/000/A=001689') + + data = parser.ServerParser.parse_message(msg) + assert data['from'] == 'LKHS' + assert data['destto'] == 'APRS' + assert data['timestamp'].time() == time(21, 16, 35) + assert data['latitude'] == 49.04083333333333 + assert data['longitude'] == 14.491833333333334 + assert not data['heading'] + assert not data['ground_speed'] + assert abs(data['altitude'] - 514.8) < 1 + assert data['raw'] == msg + assert data['beacon_type'] == constants.BeaconType.server_beacon + assert 'comment' not in data def test_parse_message_status(self, mocker): - with mocker.patch('ogn_lib.parser.ServerParser.parse_status'): - msg = ( - 'LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h v0.2.6.ARM CPU:0.2 ' - 'RAM:777.7/972.2MB NTP:3.1ms/-3.8ppm 4.902V 0.583A +33.6C 14/' - '16Acfts[1h] RF:+62-0.8ppm/+33.66dB/+19.4dB@10km[112619]/+25.0' - 'dB@10km[8/15]') - - data = parser.ServerParser.parse_message(msg) - parser.ServerParser.parse_status.assert_called_once_with(msg) - + msg = ( + 'LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h v0.2.6.ARM CPU:0.2 ' + 'RAM:777.7/972.2MB NTP:3.1ms/-3.8ppm 4.902V 0.583A +33.6C 14/' + '16Acfts[1h] RF:+62-0.8ppm/+33.66dB/+19.4dB@10km[112619]/+25.0' + 'dB@10km[8/15]') + + data = parser.ServerParser.parse_message(msg) + assert data['from'] == 'LKHS' + assert data['destto'] == 'APRS' + assert data['timestamp'].time() == time(21, 16, 35) + assert not data['latitude'] + assert not data['longitude'] + assert 'heading' not in data + assert 'ground_speed' not in data + assert not data['altitude'] + assert data['raw'] == msg + assert data['beacon_type'] == constants.BeaconType.server_status assert data['raw'] - - def test_parse_beacon(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_header', return_value={}): - msg = ('LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&' - '000/000/A=001689') - data = parser.ServerParser.parse_beacon(msg) - parser.Parser._parse_header.assert_called_once_with( - 'APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&000/000/' - 'A=001689') - - assert data['from'] - assert data['beacon_type'] is constants.BeaconType.server_beacon - assert data['comment'] is None + assert data['comment'].startswith('v0.2.6') def test_parse_beacon_comment(self, mocker): - with mocker.patch('ogn_lib.parser.Parser._parse_header', return_value={}): - msg = ('LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&' - '000/000/A=001689 comment') - data = parser.ServerParser.parse_beacon(msg) + msg = ('LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h4902.45NI01429.51E&' + '000/000/A=001689 comment') + data = parser.ServerParser.parse_message(msg) assert data['comment'] == 'comment' - - def test_parse_status(self, mocker): - msg = ( - 'LKHS>APRS,TCPIP*,qAC,GLIDERN2:/211635h v0.2.6.ARM CPU:0.2 RAM:777' - '.7/972.2MB NTP:3.1ms/-3.8ppm 4.902V 0.583A +33.6C 14/16Acfts[1h]' - 'RF:+62-0.8ppm/+33.66dB/+19.4dB@10km[112619]/+25.0dB@10km[8/15]') - - with mocker.patch('ogn_lib.parser.Parser._parse_origin', - return_value={'origin': 1}): - with mocker.patch('ogn_lib.parser.Parser._parse_timestamp', - return_value='ts'): - data = parser.ServerParser.parse_status(msg) - - parser.Parser._parse_timestamp.assert_called_once_with( - '211635h') - parser.Parser._parse_origin.assert_called_once_with( - 'APRS,TCPIP*,qAC,GLIDERN2') - - assert data['timestamp'] == 'ts' - assert data['beacon_type'] is constants.BeaconType.server_status - assert 'origin' in data
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-mock" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/akolar/ogn-lib.git@a78134bedadbeba05ef3c4a514dd205fcd80300a#egg=ogn_lib packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-mock==3.6.1 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - pytest-mock==3.6.1 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_msg_calls", "tests/test_parser.py::TestParser::test_parse_msg_full", "tests/test_parser.py::TestParser::test_parse_msg_delete_update", "tests/test_parser.py::TestParser::test_parse_msg_comment", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestServerParser::test_parse_message_beacon", "tests/test_parser.py::TestServerParser::test_parse_message_status" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call", "tests/test_parser.py::TestParserBase::test_call_server", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParserBase::test_call_default", "tests/test_parser.py::TestParserBase::test_call_failed", "tests/test_parser.py::TestParser::test_parse_msg", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_timestamp_h", "tests/test_parser.py::TestParser::test_parse_timestamp_z", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_call", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_id_string", "tests/test_parser.py::TestServerParser::test_parse_beacon_comment" ]
[]
MIT License
2,209
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
palantir__python-language-server-275
67811f275c8ce61dea78400e2b202bf7f5dc4707
2018-02-26 13:08:12
67811f275c8ce61dea78400e2b202bf7f5dc4707
diff --git a/pyls/_utils.py b/pyls/_utils.py index fb353c3..56a8630 100644 --- a/pyls/_utils.py +++ b/pyls/_utils.py @@ -104,5 +104,5 @@ def format_docstring(contents): def clip_column(column, lines, line_number): # Normalise the position as per the LSP that accepts character positions > line length # https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#position - max_column = len(lines[line_number]) - 1 if len(lines) > line_number else 0 + max_column = len(lines[line_number].rstrip('\r\n')) if len(lines) > line_number else 0 return min(column, max_column) diff --git a/pyls/python_ls.py b/pyls/python_ls.py index 61de356..e1c9297 100644 --- a/pyls/python_ls.py +++ b/pyls/python_ls.py @@ -32,7 +32,7 @@ class _StreamHandlerWrapper(socketserver.StreamRequestHandler, object): def start_tcp_lang_server(bind_addr, port, handler_class): - if not isinstance(handler_class, PythonLanguageServer): + if not issubclass(handler_class, PythonLanguageServer): raise ValueError('Handler class must be an instance of PythonLanguageServer') # Construct a custom wrapper class around the user's handler_class
TCP mode is not working since v0.15.0 When I attempt to start the language server in TCP mode using the command ``` pyls --tcp --host 127.0.0.1 --port 7003 ``` it fails to start the language server with the error ```Handler class must be an instance of PythonLanguageServer```
palantir/python-language-server
diff --git a/test/test_utils.py b/test/test_utils.py index 6675374..9d00686 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -52,6 +52,12 @@ def test_merge_dicts(): def test_clip_column(): - assert _utils.clip_column(5, ['123'], 0) == 2 - assert _utils.clip_column(2, ['\n', '123'], 1) == 2 assert _utils.clip_column(0, [], 0) == 0 + assert _utils.clip_column(2, ['123'], 0) == 2 + assert _utils.clip_column(3, ['123'], 0) == 3 + assert _utils.clip_column(5, ['123'], 0) == 3 + assert _utils.clip_column(0, ['\n', '123'], 0) == 0 + assert _utils.clip_column(1, ['\n', '123'], 0) == 0 + assert _utils.clip_column(2, ['123\n', '123'], 0) == 2 + assert _utils.clip_column(3, ['123\n', '123'], 0) == 3 + assert _utils.clip_column(4, ['123\n', '123'], 1) == 3
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 2 }
0.15
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
cachetools==5.5.2 chardet==5.2.0 colorama==0.4.6 configparser==7.2.0 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 filelock==3.18.0 future==1.0.0 iniconfig==2.1.0 jedi==0.19.2 json-rpc==1.10.8 mccabe==0.7.0 mock==5.2.0 packaging==24.2 parso==0.8.4 platformdirs==4.3.7 pluggy==1.5.0 pycodestyle==2.13.0 pydocstyle==6.3.0 pyflakes==3.3.1 pyproject-api==1.9.0 pytest==8.3.5 pytest-cov==6.0.0 -e git+https://github.com/palantir/python-language-server.git@67811f275c8ce61dea78400e2b202bf7f5dc4707#egg=python_language_server pytoolconfig==1.3.1 rope==1.13.0 snowballstemmer==2.2.0 tomli==2.2.1 tox==4.25.0 typing_extensions==4.13.0 versioneer==0.29 virtualenv==20.29.3 yapf==0.43.0
name: python-language-server channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - cachetools==5.5.2 - chardet==5.2.0 - colorama==0.4.6 - configparser==7.2.0 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - filelock==3.18.0 - future==1.0.0 - iniconfig==2.1.0 - jedi==0.19.2 - json-rpc==1.10.8 - mccabe==0.7.0 - mock==5.2.0 - packaging==24.2 - parso==0.8.4 - platformdirs==4.3.7 - pluggy==1.5.0 - pycodestyle==2.13.0 - pydocstyle==6.3.0 - pyflakes==3.3.1 - pyproject-api==1.9.0 - pytest==8.3.5 - pytest-cov==6.0.0 - pytoolconfig==1.3.1 - rope==1.13.0 - snowballstemmer==2.2.0 - tomli==2.2.1 - tox==4.25.0 - typing-extensions==4.13.0 - versioneer==0.29 - virtualenv==20.29.3 - yapf==0.43.0 prefix: /opt/conda/envs/python-language-server
[ "test/test_utils.py::test_clip_column" ]
[]
[ "test/test_utils.py::test_debounce", "test/test_utils.py::test_list_to_string", "test/test_utils.py::test_camel_to_underscore", "test/test_utils.py::test_find_parents", "test/test_utils.py::test_merge_dicts" ]
[]
MIT License
2,210
[ "pyls/_utils.py", "pyls/python_ls.py" ]
[ "pyls/_utils.py", "pyls/python_ls.py" ]
akolar__ogn-lib-10
4526578e5fe7e897c6e0f08edfa180e75e88203f
2018-02-26 13:37:55
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index f43f6cb..a2ca7ea 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -627,3 +627,33 @@ class ServerParser(Parser): """ return {'comment': comment} + + +class Spot(Parser): + """ + Parser for Spot-formatted APRS messages. + """ + + __destto__ = ['OGSPOT', 'OGSPOT-1'] + + @staticmethod + def _parse_protocol_specific(comment): + """ + Parses the comment string from Spot's APRS messages. + + :param str comment: comment string + :return: parsed comment + :rtype: dict + """ + + fields = comment.split(' ', maxsplit=2) + + if len(fields) < 3: + raise exceptions.ParseError('SPOT comment incorrectly formatted: ' + 'received {}'.format(comment)) + + return { + 'id': fields[0], + 'model': fields[1], + 'status': fields[2] + }
Implement parser for Spot messages (OGSPOT)
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index cb9b05c..bcd3247 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -440,6 +440,18 @@ class TestNaviter: assert data['address_type'] is constants.AddressType.naviter +class TestSpot: + def test_parse_protocol_specific(self): + data = parser.Spot._parse_protocol_specific('id0-2860357 SPOT3 GOOD') + assert data['id'] == 'id0-2860357' + assert data['model'] == 'SPOT3' + assert data['status'] == 'GOOD' + + def test_parse_protocol_specific_fail(self): + with pytest.raises(exceptions.ParseError): + parser.Spot._parse_protocol_specific('id0-2860357 SPOT3') + + class TestServerParser: def test_parse_message_beacon(self, mocker):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 1 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/akolar/ogn-lib.git@4526578e5fe7e897c6e0f08edfa180e75e88203f#egg=ogn_lib packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string" ]
[]
MIT License
2,211
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
akolar__ogn-lib-11
5ab4b003315931c1d1f1ac3a9e29532305aa5fff
2018-02-26 13:39:09
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index a2ca7ea..00093ea 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -629,31 +629,32 @@ class ServerParser(Parser): return {'comment': comment} -class Spot(Parser): +class Spider(Parser): """ - Parser for Spot-formatted APRS messages. + Parser for Spider-formatted APRS messages. """ - __destto__ = ['OGSPOT', 'OGSPOT-1'] + __destto__ = ['OGSPID', 'OGSPID-1'] @staticmethod def _parse_protocol_specific(comment): """ - Parses the comment string from Spot's APRS messages. + Parses the comment string from Spider's APRS messages. :param str comment: comment string :return: parsed comment :rtype: dict """ - fields = comment.split(' ', maxsplit=2) + fields = comment.split(' ', maxsplit=3) - if len(fields) < 3: - raise exceptions.ParseError('SPOT comment incorrectly formatted: ' - 'received {}'.format(comment)) + if len(fields) < 4: + raise exceptions.ParseError('Spider comment incorrectly formatted:' + ' received {}'.format(comment)) return { 'id': fields[0], - 'model': fields[1], - 'status': fields[2] + 'signal_strength': fields[1], + 'spider_id': fields[2], + 'gps_status': fields[3] }
Implement parser for Spider messages (OGSPID)
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index bcd3247..f179511 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -442,14 +442,16 @@ class TestNaviter: class TestSpot: def test_parse_protocol_specific(self): - data = parser.Spot._parse_protocol_specific('id0-2860357 SPOT3 GOOD') - assert data['id'] == 'id0-2860357' - assert data['model'] == 'SPOT3' - assert data['status'] == 'GOOD' + data = parser.Spider._parse_protocol_specific('id300234010617040 +19dB' + ' LWE 3D') + assert data['id'] == 'id300234010617040' + assert data['signal_strength'] == '+19dB' + assert data['spider_id'] == 'LWE' + assert data['gps_status'] == '3D' def test_parse_protocol_specific_fail(self): with pytest.raises(exceptions.ParseError): - parser.Spot._parse_protocol_specific('id0-2860357 SPOT3') + parser.Spider._parse_protocol_specific('id300234010617040 +19dB') class TestServerParser:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/akolar/ogn-lib.git@5ab4b003315931c1d1f1ac3a9e29532305aa5fff#egg=ogn_lib packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string" ]
[]
MIT License
2,212
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
akolar__ogn-lib-12
8321ac10ba83ecf6566956b001b0644c34c7bdf9
2018-02-26 13:56:28
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index abbe761..4df961f 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -686,3 +686,31 @@ class Spider(Parser): 'spider_id': fields[2], 'gps_status': fields[3] } + + +class Skylines(Parser): + """ + Parser for Spider-formatted APRS messages. + """ + + __destto__ = ['OGSKYL', 'OGSKYL-1'] + + @staticmethod + def _parse_protocol_specific(comment): + """ + Parses the comment string from Spider's APRS messages. + :param str comment: comment string + :return: parsed comment + :rtype: dict + """ + + fields = comment.split(' ', maxsplit=1) + + if len(fields) < 2: + raise exceptions.ParseError('Skylines comment incorrectly formatted:' + ' received {}'.format(comment)) + + return { + 'id': fields[0], + 'vertical_speed': int(fields[1][:3]) * FEET_TO_METERS + }
Implement parser for Skylines messages (OGSKYL)
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index 8fd96a5..fa7c7cd 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -466,6 +466,21 @@ class TestSpider: parser.Spider._parse_protocol_specific('id300234010617040 +19dB') +class TestSkylines: + def test_parse_protocol_specific(self): + data = parser.Skylines._parse_protocol_specific('id2816 +000fpm') + assert data['id'] == 'id2816' + assert data['vertical_speed'] == 0 + + data = parser.Skylines._parse_protocol_specific('id2816 +159fpm') + assert data['id'] == 'id2816' + assert abs(data['vertical_speed'] - 4.57) < 0.1 + + def test_parse_protocol_specific_fail(self): + with pytest.raises(exceptions.ParseError): + parser.Skylines._parse_protocol_specific('id1111') + + class TestServerParser: def test_parse_message_beacon(self, mocker):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-mock" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 iniconfig==2.1.0 -e git+https://github.com/akolar/ogn-lib.git@8321ac10ba83ecf6566956b001b0644c34c7bdf9#egg=ogn_lib packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-mock==3.14.0 tomli==2.2.1
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-mock==3.14.0 - tomli==2.2.1 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestSkylines::test_parse_protocol_specific", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific_fail" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call", "tests/test_parser.py::TestParserBase::test_call_server", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParserBase::test_call_default", "tests/test_parser.py::TestParserBase::test_call_failed", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_msg_calls", "tests/test_parser.py::TestParser::test_parse_msg", "tests/test_parser.py::TestParser::test_parse_msg_full", "tests/test_parser.py::TestParser::test_parse_msg_delete_update", "tests/test_parser.py::TestParser::test_parse_msg_comment", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_timestamp_h", "tests/test_parser.py::TestParser::test_parse_timestamp_z", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_call", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string", "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSpider::test_parse_protocol_specific", "tests/test_parser.py::TestSpider::test_parse_protocol_specific_fail", "tests/test_parser.py::TestServerParser::test_parse_message_beacon", "tests/test_parser.py::TestServerParser::test_parse_message_status", "tests/test_parser.py::TestServerParser::test_parse_beacon_comment" ]
[]
MIT License
2,213
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
akolar__ogn-lib-13
dd7b9bf33caee17a839240a134246881e9c7c32f
2018-02-26 14:12:12
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index 4df961f..e50eb80 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -714,3 +714,32 @@ class Skylines(Parser): 'id': fields[0], 'vertical_speed': int(fields[1][:3]) * FEET_TO_METERS } + + +class LiveTrack24(Parser): + """ + Parser for LiveTrack24-formatted APRS messages. + """ + + __destto__ = ['OGLT24', 'OGLT24-1'] + + @staticmethod + def _parse_protocol_specific(comment): + """ + Parses the comment string from LiveTrack24's APRS messages. + :param str comment: comment string + :return: parsed comment + :rtype: dict + """ + + fields = comment.split(' ', maxsplit=2) + + if len(fields) < 3: + raise exceptions.ParseError('LT24 comment incorrectly formatted:' + ' received {}'.format(comment)) + + return { + 'id': fields[0], + 'vertical_speed': int(fields[1][:3]) * FEET_TO_METERS, + 'source': fields[2] + }
Implement parser for LiveTrack24 messages (OGLT24)
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index fa7c7cd..fb5ec8d 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -481,6 +481,23 @@ class TestSkylines: parser.Skylines._parse_protocol_specific('id1111') +class TestLT24: + def test_parse_protocol_specific(self): + data = parser.LiveTrack24._parse_protocol_specific('id25387 +000fpm GPS') + assert data['id'] == 'id25387' + assert data['vertical_speed'] == 0 + assert data['source'] == 'GPS' + + data = parser.LiveTrack24._parse_protocol_specific('id25387 +159fpm GPS') + assert data['id'] == 'id25387' + assert abs(data['vertical_speed'] - 4.57) < 0.1 + assert data['source'] == 'GPS' + + def test_parse_protocol_specific_fail(self): + with pytest.raises(exceptions.ParseError): + parser.LiveTrack24._parse_protocol_specific('id11111 GPS') + + class TestServerParser: def test_parse_message_beacon(self, mocker):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-mock" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/akolar/ogn-lib.git@dd7b9bf33caee17a839240a134246881e9c7c32f#egg=ogn_lib packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-mock==3.6.1 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - pytest-mock==3.6.1 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestLT24::test_parse_protocol_specific", "tests/test_parser.py::TestLT24::test_parse_protocol_specific_fail" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call", "tests/test_parser.py::TestParserBase::test_call_server", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParserBase::test_call_default", "tests/test_parser.py::TestParserBase::test_call_failed", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_msg_calls", "tests/test_parser.py::TestParser::test_parse_msg", "tests/test_parser.py::TestParser::test_parse_msg_full", "tests/test_parser.py::TestParser::test_parse_msg_delete_update", "tests/test_parser.py::TestParser::test_parse_msg_comment", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_timestamp_h", "tests/test_parser.py::TestParser::test_parse_timestamp_z", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_call", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string", "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSpider::test_parse_protocol_specific", "tests/test_parser.py::TestSpider::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific_fail", "tests/test_parser.py::TestServerParser::test_parse_message_beacon", "tests/test_parser.py::TestServerParser::test_parse_message_status", "tests/test_parser.py::TestServerParser::test_parse_beacon_comment" ]
[]
MIT License
2,214
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
akolar__ogn-lib-16
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
2018-02-26 14:59:17
b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index 0546bbb..4ae680a 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -153,6 +153,8 @@ class Parser(metaclass=ParserBase): using Parser.PATTERN_ALL """ + raw_message = cls._preprocess_message(raw_message) + match = cls.PATTERN_ALL.match(raw_message) if not match: @@ -187,6 +189,18 @@ class Parser(metaclass=ParserBase): data['raw'] = raw_message return data + @staticmethod + def _preprocess_message(message): + """ + Performs additional preprocessing on the received APRS message. + + :param str message: the received message + :return: processed message + :rtype: str + """ + + return message + @staticmethod def _parse_digipeaters(digipeaters): """ @@ -756,3 +770,11 @@ class LiveTrack24(Parser): 'vertical_speed': Parser._convert_fpm_to_ms(fields[1]), 'source': fields[2] } + + +class Capturs(Parser): + __destto__ = ['OGLT24', 'OGLT24-1'] + + @staticmethod + def _preprocess_message(message): + return message.strip('/')
Implement parser for Capturs messages (OGCAPT)
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index 2b99868..c1c0a9c 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -177,11 +177,13 @@ class TestParser: mocker.spy(parser.Parser, '_parse_digipeaters') mocker.spy(parser.Parser, '_parse_heading_speed') mocker.spy(parser.Parser, '_parse_protocol_specific') + mocker.spy(parser.Parser, '_preprocess_message') - parser.Parser.parse_message( - 'FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' - '276/075/A=001551') + msg = ('FLRDD83BC>APRS,qAS,EDLF:/163148h5124.56N/00634.42E\'' + '276/075/A=001551') + parser.Parser.parse_message(msg) + parser.Parser._preprocess_message.assert_called_once_with(msg) parser.Parser._parse_timestamp.assert_called_once_with('163148h') assert parser.Parser._parse_location.call_count == 2 parser.Parser._parse_altitude.assert_called_once_with('001551') @@ -232,6 +234,10 @@ class TestParser: assert data['comment'] + def test_preprocess_message(self): + msg = 'asdf' + assert parser.Parser._preprocess_message(msg) == msg + def test_parse_digipeaters(self): data = parser.Parser._parse_digipeaters('qAS,RECEIVER') assert data == { @@ -512,6 +518,19 @@ class TestLT24: parser.LiveTrack24._parse_protocol_specific('id11111 GPS') +class TestCapturs: + def test_process(self): + parser.Capturs.parse_message( + "FLRDDEEF1>OGCAPT,qAS,CAPTURS:/065144h4837.56N/00233.80E'000/000/") + + def test_preprocess(self): + msg_original = ("FLRDDEEF1>OGCAPT,qAS,CAPTURS:/065144h4837.56N/" + "00233.80E'000/000/") + msg = parser.Capturs._preprocess_message(msg_original) + + assert msg == msg_original[:-1] + + class TestServerParser: def test_parse_message_beacon(self, mocker):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-mock" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work -e git+https://github.com/akolar/ogn-lib.git@b2b444e1a990e6e84f09b76d93505c2bd9ed2bf5#egg=ogn_lib packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work pytest-mock==3.14.0 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - pytest-mock==3.14.0 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestParser::test_parse_msg_calls", "tests/test_parser.py::TestParser::test_preprocess_message", "tests/test_parser.py::TestCapturs::test_process", "tests/test_parser.py::TestCapturs::test_preprocess" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call", "tests/test_parser.py::TestParserBase::test_call_server", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParserBase::test_call_default", "tests/test_parser.py::TestParserBase::test_call_failed", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_msg", "tests/test_parser.py::TestParser::test_parse_msg_full", "tests/test_parser.py::TestParser::test_parse_msg_delete_update", "tests/test_parser.py::TestParser::test_parse_msg_comment", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_timestamp_h", "tests/test_parser.py::TestParser::test_parse_timestamp_z", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_conv_fpm_to_ms", "tests/test_parser.py::TestParser::test_conv_fpm_to_ms_sign", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_call", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string", "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSpider::test_parse_protocol_specific", "tests/test_parser.py::TestSpider::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific_fail", "tests/test_parser.py::TestLT24::test_parse_protocol_specific", "tests/test_parser.py::TestLT24::test_parse_protocol_specific_fail", "tests/test_parser.py::TestServerParser::test_parse_message_beacon", "tests/test_parser.py::TestServerParser::test_parse_message_status", "tests/test_parser.py::TestServerParser::test_parse_beacon_comment" ]
[]
MIT License
2,216
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
EdinburghGenomics__clarity_scripts-44
57d8c8da1958bf1f5769d385c5d679ab4b012294
2018-02-26 15:24:39
32c21fa719365176a9101a8a7ce72eb07f3ac85d
diff --git a/scripts/populate_review_step.py b/scripts/populate_review_step.py index 3ba1948..9cccfd0 100644 --- a/scripts/populate_review_step.py +++ b/scripts/populate_review_step.py @@ -1,5 +1,6 @@ #!/usr/bin/env python import datetime +from egcg_core import util from cached_property import cached_property from EPPs.common import StepEPP, RestCommunicationEPP, step_argparser from EPPs.config import load_config @@ -18,8 +19,8 @@ class StepPopulator(StepEPP, RestCommunicationEPP): if io[0]['uri'].samples[0].name == sample_name and io[1]['output-type'] == 'ResultFile' ] - def check_rest_data_and_artifacts(self, sample_name, selector): - query_args = {selector: {'sample_id': sample_name}} + def check_rest_data_and_artifacts(self, sample_name): + query_args = {'where': {'sample_id': sample_name}} rest_entities = self.get_documents(self.endpoint, **query_args) artifacts = self.output_artifacts_per_sample(sample_name=sample_name) if len(rest_entities) != len(artifacts): # in sample review this will be 1, in run review this will be more @@ -30,6 +31,18 @@ class StepPopulator(StepEPP, RestCommunicationEPP): ) return rest_entities, artifacts + def delivered(self, sample_name): + d = {'yes': True, 'no': False} + query_args = {'where': {'sample_id': sample_name}} + sample = self.get_documents('samples', **query_args)[0] + return d.get(sample.get('delivered')) + + def processed(self, sample_name): + query_args = {'where': {'sample_id': sample_name}} + sample = self.get_documents('samples', **query_args)[0] + processing_status = util.query_dict(sample, 'aggregated.most_recent_proc.status') + return processing_status == 'finished' + def _run(self): raise NotImplementedError @@ -51,7 +64,7 @@ class PullInfo(StepPopulator): self.lims.put_batch(artifacts_to_upload) def add_artifact_info(self, sample): - rest_entities, artifacts = self.check_rest_data_and_artifacts(sample.name, 'match') + rest_entities, artifacts = self.check_rest_data_and_artifacts(sample.name) artifacts_to_upload = set() for i in range(len(rest_entities)): for art_field, api_field in self.metrics_mapping: @@ -83,15 +96,16 @@ class PullInfo(StepPopulator): class PullRunElementInfo(PullInfo): - endpoint = 'aggregate/run_elements' + endpoint = 'run_elements' metrics_mapping = [ ('RE Id', 'run_element_id'), ('RE Nb Reads', 'passing_filter_reads'), - ('RE Yield', 'clean_yield_in_gb'), - ('RE Yield Q30', 'clean_yield_q30_in_gb'), - ('RE %Q30', 'clean_pc_q30'), + ('RE Yield', 'aggregated.clean_yield_in_gb'), + ('RE Yield Q30', 'aggregated.clean_yield_q30_in_gb'), + ('RE %Q30', 'aggregated.clean_pc_q30'), + ('RE Coverage', 'coverage.mean'), ('RE Estimated Duplicate Rate', 'lane_pc_optical_dups'), - ('RE %Adapter', 'pc_adapter'), + ('RE %Adapter', 'aggregated.pc_adaptor'), ('RE Review status', 'reviewed'), ('RE Review Comment', 'review_comments'), ('RE Review date', 'review_date'), @@ -102,7 +116,6 @@ class PullRunElementInfo(PullInfo): def assess_sample(self, sample): artifacts_to_upload = set() - artifacts = self.output_artifacts_per_sample(sample_name=sample.name) un_reviewed_artifacts = [a for a in artifacts if a.udf.get('RE Review status') not in ['pass', 'fail']] if un_reviewed_artifacts: @@ -111,36 +124,69 @@ class PullRunElementInfo(PullInfo): # Artifacts that pass the review pass_artifacts = [a for a in artifacts if a.udf.get('RE Review status') == 'pass'] - # Artifacts that fail the review fail_artifacts = [a for a in artifacts if a.udf.get('RE Review status') == 'fail'] + # Artifacts that are new + new_artifacts = [a for a in artifacts if a.udf.get('RE previous Useable') not in ['yes', 'no']] - target_yield = float(sample.udf.get('Yield for Quoted Coverage (Gb)')) - good_re_yield = sum([float(a.udf.get('RE Yield Q30')) for a in pass_artifacts]) + # skip samples which have been delivered, mark any new REs as such, not changing older RE comments + if self.delivered(sample.name): + for a in new_artifacts: + a.udf['RE Useable Comment'] = 'AR: Delivered' + a.udf['RE Useable'] = 'no' - # Just the right amount of good yield: take it all - if target_yield < good_re_yield < target_yield * 2: - for a in pass_artifacts: - a.udf['RE Useable'] = 'yes' - a.udf['RE Useable Comment'] = 'AR: Good yield' - for a in fail_artifacts: + for a in pass_artifacts + fail_artifacts: + if a.udf.get('RE previous Useable Comment') and a.udf.get('RE previous Useable'): + a.udf['RE Useable Comment'] = a.udf.get('RE previous Useable Comment') + a.udf['RE Useable'] = a.udf.get('RE previous Useable') + + artifacts_to_upload.update(artifacts) + return artifacts_to_upload + + # skip samples which have been processed, mark any new REs as such, not changing older RE comments + if self.processed(sample.name): + for a in pass_artifacts + fail_artifacts: + if a.udf.get('RE previous Useable Comment') and a.udf.get('RE previous Useable'): + a.udf['RE Useable Comment'] = a.udf.get('RE previous Useable Comment') + a.udf['RE Useable'] = a.udf.get('RE previous Useable') + + for a in new_artifacts: + a.udf['RE Useable Comment'] = 'AR: Sample already processed' a.udf['RE Useable'] = 'no' - a.udf['RE Useable Comment'] = 'AR: Failed and not needed' + artifacts_to_upload.update(artifacts) + return artifacts_to_upload + + target_yield = float(sample.udf.get('Required Yield (Gb)')) + good_re_yield = sum([float(a.udf.get('RE Yield')) for a in pass_artifacts]) + + # Increase target coverage by 5% to resolve borderline cases + target_coverage = 1.05 * sample.udf.get('Coverage (X)') + obtained_coverage = float(sum([a.udf.get('RE Coverage') for a in pass_artifacts])) # Too much good yield limit to the best quality ones - elif good_re_yield > target_yield * 2: + if good_re_yield > target_yield * 2 and obtained_coverage > target_coverage: # Too much yield: sort the good artifact by quality pass_artifacts.sort(key=lambda x: x.udf.get('RE %Q30'), reverse=True) current_yield = 0 for a in pass_artifacts: - current_yield += float(a.udf.get('RE Yield Q30')) + current_yield += float(a.udf.get('RE Yield')) if current_yield < target_yield * 2: a.udf['RE Useable'] = 'yes' a.udf['RE Useable Comment'] = 'AR: Good yield' else: a.udf['RE Useable'] = 'no' - a.udf['RE Useable Comment'] = 'AR: To much good yield' + a.udf['RE Useable Comment'] = 'AR: Too much good yield' + for a in fail_artifacts: + a.udf['RE Useable'] = 'no' + a.udf['RE Useable Comment'] = 'AR: Failed and not needed' + artifacts_to_upload.update(artifacts) + + # Just the right amount of good yield: take it all + elif target_yield < good_re_yield < target_yield * 2 or obtained_coverage > target_coverage: + for a in pass_artifacts: + a.udf['RE Useable'] = 'yes' + a.udf['RE Useable Comment'] = 'AR: Good yield' for a in fail_artifacts: a.udf['RE Useable'] = 'no' a.udf['RE Useable Comment'] = 'AR: Failed and not needed' @@ -153,16 +199,16 @@ class PullRunElementInfo(PullInfo): class PullSampleInfo(PullInfo): - endpoint = 'aggregate/samples' + endpoint = 'samples' metrics_mapping = [ - ('SR Yield (Gb)', 'clean_yield_in_gb'), - ('SR %Q30', 'clean_pc_q30'), - ('SR % Mapped', 'pc_mapped_reads'), - ('SR % Duplicates', 'pc_duplicate_reads'), - ('SR Mean Coverage', 'coverage.mean'), - ('SR Species Found', 'species_contamination'), - ('SR Sex Check Match', 'gender_match'), - ('SR Genotyping Match', 'genotype_match'), + ('SR Yield (Gb)', 'aggregated.clean_yield_in_gb'), + ('SR %Q30', 'aggregated.clean_pc_q30'), + ('SR % Mapped', 'aggregated.pc_mapped_reads'), + ('SR % Duplicates', 'aggregated.pc_duplicate_reads'), + ('SR Mean Coverage', 'aggregated.mean_coverage'), + ('SR Species Found', 'matching_species'), + ('SR Sex Check Match', 'aggregated.gender_match'), + ('SR Genotyping Match', 'aggregated.genotype_match'), ('SR Freemix', 'sample_contamination.freemix'), ('SR Review Status', 'reviewed'), ('SR Review Comments', 'review_comments'), @@ -192,9 +238,9 @@ class PullSampleInfo(PullInfo): def field_from_entity(self, entity, api_field): # TODO: remove once Rest API has a sensible field for species found - if api_field == 'species_contamination': - species = entity[api_field]['contaminant_unique_mapped'] - return ', '.join(k for k in sorted(species) if species[k] > 500) + if api_field == 'matching_species': + species = entity[api_field] + return ', '.join(species) return super().field_from_entity(entity, api_field) @@ -214,7 +260,7 @@ class PushInfo(StepPopulator): _ = self.output_artifacts for sample in self.samples: self.info('Pushing data for sample %s', sample.name) - rest_entities, artifacts = self.check_rest_data_and_artifacts(sample.name, 'where') + rest_entities, artifacts = self.check_rest_data_and_artifacts(sample.name) rest_api_data = {} for e in rest_entities: rest_api_data[e[self.api_id_field]] = e
Add new rules to sample assessment in Run review The sample assessment is using the require yield Q30 instead of the yield and %Q30 which makes it inaccurate in some cases. Change to use Yield and %Q30 - [ ] Add strategy to deal with sample that have been delivered already - [ ] Add strategy to protect previous call when additional data is generated but not needed assuming the resulting coverage met requirement. - [ ] Add strategy to take coverage into account.
EdinburghGenomics/clarity_scripts
diff --git a/tests/test_populate_review_step.py b/tests/test_populate_review_step.py index d1b2eea..6e6c8e0 100644 --- a/tests/test_populate_review_step.py +++ b/tests/test_populate_review_step.py @@ -1,7 +1,7 @@ from pyclarity_lims.entities import Artifact from scripts import populate_review_step as p from tests.test_common import TestEPP, NamedMock -from unittest.mock import Mock, patch, PropertyMock +from unittest.mock import Mock, patch, PropertyMock, call class TestPopulator(TestEPP): @@ -13,7 +13,7 @@ class TestPopulator(TestEPP): self.epp_cls, 'samples', new_callable=PropertyMock( - return_value=[NamedMock(real_name='a_sample', udf={'Yield for Quoted Coverage (Gb)': 95})] + return_value=[NamedMock(real_name='a_sample', udf={'Required Yield (Gb)': 95, 'Coverage (X)': 30})] ) ) self.patched_lims = patch.object(self.epp_cls, 'lims', new_callable=PropertyMock) @@ -30,37 +30,49 @@ class TestPopulator(TestEPP): class TestPullRunElementInfo(TestPopulator): epp_cls = p.PullRunElementInfo fake_rest_entity = { + 'aggregated': {'clean_yield_in_gb': 20, + 'clean_yield_q30_in_gb': 15, + 'clean_pc_q30': 75, + 'pc_adaptor': 1.2}, 'run_element_id': 'id', 'passing_filter_reads': 120000000, - 'clean_yield_in_gb': 20, - 'clean_yield_q30_in_gb': 15, - 'clean_pc_q30': 75, 'lane_pc_optical_dups': 10, - 'pc_adapter': 1.2, 'reviewed': 'pass', 'review_comments': 'alright', - 'review_date': '12_02_2107_12:43:24' + 'review_date': '12_02_2107_12:43:24', } expected_udfs = { 'RE Id': 'id', 'RE Nb Reads': 120000000, 'RE Yield': 20, 'RE Yield Q30': 15, + 'RE Coverage': 34.2, 'RE %Q30': 75, 'RE Estimated Duplicate Rate': 10, 'RE %Adapter': 1.2, 'RE Review status': 'pass', 'RE Review Comment': 'alright', - 'RE Review date': '2107-02-12' + 'RE Review date': '2107-02-12', + 'RE Useable': 'yes', + 'RE Useable Comment': 'AR: Good yield' } def test_pull(self): + + patched_output_artifacts_per_sample = patch.object( + self.epp_cls, + 'output_artifacts_per_sample', + return_value=[Mock(spec=Artifact, udf={'RE Coverage': 34.2}, samples=[NamedMock(real_name='a_sample')])] + ) + with self.patched_lims as pl, self.patched_samples, self.patched_get_docs as pg, \ - self.patched_output_artifacts_per_sample as poa: + patched_output_artifacts_per_sample as poa: self.epp.run() - assert pg.call_count == 1 - pg.assert_called_with(self.epp.endpoint, match={'sample_id': 'a_sample'}) + assert pg.call_count == 3 + assert pg.call_args_list == [call('run_elements', where={'sample_id': 'a_sample'}), + call('samples', where={'sample_id': 'a_sample'}), + call('samples', where={'sample_id': 'a_sample'})] # Check that the udfs have been added assert dict(poa.return_value[0].udf) == self.expected_udfs @@ -72,16 +84,16 @@ class TestPullRunElementInfo(TestPopulator): def patch_output_artifact(output_artifacts): return patch.object(self.epp_cls, 'output_artifacts_per_sample', return_value=output_artifacts) - sample = NamedMock(real_name='a_sample', udf={'Yield for Quoted Coverage (Gb)': 95}) + sample = NamedMock(real_name='a_sample', udf={'Required Yield (Gb)': 95, 'Coverage (X)': 30}) patched_output_artifacts_per_sample = patch_output_artifact([ - Mock(spec=Artifact, udf={'RE Yield Q30': 115, 'RE %Q30': 75, 'RE Review status': 'pass'}), - Mock(spec=Artifact, udf={'RE Yield Q30': 95, 'RE %Q30': 85, 'RE Review status': 'pass'}), - Mock(spec=Artifact, udf={'RE Yield Q30': 15, 'RE %Q30': 70, 'RE Review status': 'fail'}), + Mock(spec=Artifact, udf={'RE Yield': 115, 'RE %Q30': 75, 'RE Review status': 'pass', 'RE Coverage': 35.2}), + Mock(spec=Artifact, udf={'RE Yield': 95, 'RE %Q30': 85, 'RE Review status': 'pass', 'RE Coverage': 36.7}), + Mock(spec=Artifact, udf={'RE Yield': 15, 'RE %Q30': 70, 'RE Review status': 'fail', 'RE Coverage': 34.1}), ]) - with patched_output_artifacts_per_sample as poa: + with patched_output_artifacts_per_sample as poa, self.patched_get_docs as pg: self.epp.assess_sample(sample) assert poa.return_value[0].udf['RE Useable'] == 'no' - assert poa.return_value[0].udf['RE Useable Comment'] == 'AR: To much good yield' + assert poa.return_value[0].udf['RE Useable Comment'] == 'AR: Too much good yield' assert poa.return_value[1].udf['RE Useable'] == 'yes' assert poa.return_value[1].udf['RE Useable Comment'] == 'AR: Good yield' @@ -90,38 +102,61 @@ class TestPullRunElementInfo(TestPopulator): assert poa.return_value[2].udf['RE Useable Comment'] == 'AR: Failed and not needed' patched_output_artifacts_per_sample = patch_output_artifact([ - Mock(spec=Artifact, udf={'RE Yield Q30': 115, 'RE %Q30': 85, 'RE Review status': 'pass'}), - Mock(spec=Artifact, udf={'RE Yield Q30': 15, 'RE %Q30': 70, 'RE Review status': 'fail'}), + Mock(spec=Artifact, udf={'RE Yield': 115, 'RE %Q30': 85, 'RE Review status': 'pass', 'RE Coverage': 35.2}), + Mock(spec=Artifact, udf={'RE Yield': 15, 'RE %Q30': 70, 'RE Review status': 'fail', 'RE Coverage': 33.6}), ]) - with patched_output_artifacts_per_sample as poa: + with patched_output_artifacts_per_sample as poa, self.patched_get_docs as pg: self.epp.assess_sample(sample) assert poa.return_value[0].udf['RE Useable'] == 'yes' assert poa.return_value[0].udf['RE Useable Comment'] == 'AR: Good yield' + assert poa.return_value[1].udf['RE Useable'] == 'no' assert poa.return_value[1].udf['RE Useable Comment'] == 'AR: Failed and not needed' + patched_output_artifacts_per_sample = patch_output_artifact([ + Mock(spec=Artifact, udf={'RE Yield': 115, 'RE %Q30': 85, 'RE Review status': 'pass', 'RE Coverage': 35.2}), + Mock(spec=Artifact, udf={'RE Yield': 15, 'RE %Q30': 70, 'RE Review status': 'fail', 'RE Coverage': 33.6}), + ]) + + delivered = 'scripts.populate_review_step.PullRunElementInfo.delivered' + processed = 'scripts.populate_review_step.PullRunElementInfo.processed' + patched_delivered = patch(delivered, return_value=True) + pathed_processed = patch(processed, return_value=True) + + with patched_output_artifacts_per_sample as poa, self.patched_get_docs as pg, patched_delivered: + self.epp.assess_sample(sample) + assert poa.return_value[0].udf['RE Useable'] == 'no' + assert poa.return_value[0].udf['RE Useable Comment'] == 'AR: Delivered' + assert poa.return_value[1].udf['RE Useable'] == 'no' + assert poa.return_value[1].udf['RE Useable Comment'] == 'AR: Delivered' + + with patched_output_artifacts_per_sample as poa, self.patched_get_docs as pg, pathed_processed: + self.epp.assess_sample(sample) + assert poa.return_value[0].udf['RE Useable'] == 'no' + assert poa.return_value[0].udf['RE Useable Comment'] == 'AR: Sample already processed' + assert poa.return_value[1].udf['RE Useable'] == 'no' + assert poa.return_value[1].udf['RE Useable Comment'] == 'AR: Sample already processed' + def test_field_from_entity(self): entity = {'this': {'that': 'other'}} assert self.epp.field_from_entity(entity, 'this.that') == 'other' assert entity == {'this': {'that': 'other'}} # not changed -class TestPullSampleInfo(TestPullRunElementInfo): +class TestPullSampleInfo(TestPopulator): epp_cls = p.PullSampleInfo fake_rest_entity = { 'sample_id': 'a_sample', 'user_sample_id': 'a_user_sample_id', 'clean_yield_in_gb': 5, - 'clean_pc_q30': 70, - 'pc_mapped_reads': 75, - 'pc_duplicate_reads': 5, - 'coverage': {'mean': 30}, - 'species_contamination': { - 'contaminant_unique_mapped': {'Homo sapiens': 70000, 'Thingius thingy': 501, 'Sus scrofa': 499} - }, - 'gender_match': 'Match', - 'genotype_match': 'Match', + 'aggregated': {'clean_pc_q30': 70, + 'pc_mapped_reads': 75, + 'pc_duplicate_reads': 5, + 'mean_coverage': 30, + 'gender_match': 'Match', + 'genotype_match': 'Match'}, + 'matching_species': ['Homo sapiens', 'Thingius thingy'], 'sample_contamination': {'freemix': 0.1}, 'reviewed': 'pass', 'review_comments': 'alright', @@ -162,7 +197,7 @@ class TestPullSampleInfo(TestPullRunElementInfo): assert poa.return_value[1].udf['SR Useable Comments'] == 'AR: Review failed' def test_field_from_entity(self): - obs = self.epp.field_from_entity(self.fake_rest_entity, 'species_contamination') + obs = self.epp.field_from_entity(self.fake_rest_entity, 'matching_species') assert obs == 'Homo sapiens, Thingius thingy'
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asana==0.6.7 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 -e git+https://github.com/EdinburghGenomics/clarity_scripts.git@57d8c8da1958bf1f5769d385c5d679ab4b012294#egg=clarity_scripts EGCG-Core==0.8.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==2.8 MarkupSafe==2.0.1 oauthlib==3.2.2 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyclarity-lims==0.4.8 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 requests==2.14.2 requests-oauthlib==0.8.0 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: clarity_scripts channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asana==0.6.7 - attrs==22.2.0 - cached-property==1.5.2 - egcg-core==0.8.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==2.8 - markupsafe==2.0.1 - oauthlib==3.2.2 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyclarity-lims==0.4.8 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - requests==2.14.2 - requests-oauthlib==0.8.0 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/clarity_scripts
[ "tests/test_populate_review_step.py::TestPullRunElementInfo::test_assess_sample", "tests/test_populate_review_step.py::TestPullRunElementInfo::test_pull", "tests/test_populate_review_step.py::TestPullSampleInfo::test_field_from_entity" ]
[]
[ "tests/test_populate_review_step.py::TestEPP::test_init", "tests/test_populate_review_step.py::TestPopulator::test_init", "tests/test_populate_review_step.py::TestPullRunElementInfo::test_field_from_entity", "tests/test_populate_review_step.py::TestPullRunElementInfo::test_init", "tests/test_populate_review_step.py::TestPullSampleInfo::test_assess_sample", "tests/test_populate_review_step.py::TestPullSampleInfo::test_init", "tests/test_populate_review_step.py::TestPushRunElementInfo::test_init", "tests/test_populate_review_step.py::TestPushRunElementInfo::test_push", "tests/test_populate_review_step.py::TestPushSampleInfo::test_init", "tests/test_populate_review_step.py::TestPushSampleInfo::test_push" ]
[]
MIT License
2,217
[ "scripts/populate_review_step.py" ]
[ "scripts/populate_review_step.py" ]
theolind__pymysensors-122
b6deffc604865bba583bf82c089fdfc1d38da4e8
2018-02-26 19:06:22
b6deffc604865bba583bf82c089fdfc1d38da4e8
diff --git a/mysensors/__init__.py b/mysensors/__init__.py index 520165a..ef9dec3 100644 --- a/mysensors/__init__.py +++ b/mysensors/__init__.py @@ -31,6 +31,10 @@ def get_const(protocol_version): version = protocol_version if parse_ver('1.5') <= parse_ver(version) < parse_ver('2.0'): path = 'mysensors.const_15' + elif parse_ver(version) >= parse_ver('2.2'): + path = 'mysensors.const_22' + elif parse_ver(version) >= parse_ver('2.1'): + path = 'mysensors.const_21' elif parse_ver(version) >= parse_ver('2.0'): path = 'mysensors.const_20' else: @@ -127,8 +131,8 @@ class Gateway(object): type=self.const.MessageType.set, payload=value) return None - def _handle_heartbeat(self, msg): - """Process a heartbeat message.""" + def _handle_smartsleep(self, msg): + """Process a message before going back to smartsleep.""" if not self.is_sensor(msg.node_id): return while self.sensors[msg.node_id].queue: @@ -411,8 +415,8 @@ class Gateway(object): thread has sent all previously queued commands to the FIFO queue. If the sensor attribute new_state returns True, the command will not be put on the queue, but the internal sensor state will be updated. When a - heartbeat response is received, the internal state will be pushed to - the sensor, via _handle_heartbeat method. + smartsleep message is received, the internal state will be pushed to + the sensor, via _handle_smartsleep method. """ if not self.is_sensor(sensor_id, child_id): return diff --git a/mysensors/const_20.py b/mysensors/const_20.py index 98ff4fd..a27b6cd 100644 --- a/mysensors/const_20.py +++ b/mysensors/const_20.py @@ -1,4 +1,4 @@ -"""MySensors constants for version 1.5 of MySensors.""" +"""MySensors constants for version 2.0 of MySensors.""" from enum import IntEnum import voluptuous as vol @@ -246,11 +246,6 @@ class Internal(IntEnum): I_REGISTRATION_REQUEST = 26 # Register request to GW I_REGISTRATION_RESPONSE = 27 # Register response from GW I_DEBUG = 28 # Debug message - I_SIGNAL_REPORT_REQUEST = 29 # Device signal strength request - I_SIGNAL_REPORT_REVERSE = 30 # Internal - I_SIGNAL_REPORT_RESPONSE = 31 # Device signal strength response (RSSI) - I_PRE_SLEEP_NOTIFICATION = 32 # Message sent before node is going to sleep - I_POST_SLEEP_NOTIFICATION = 33 # Message sent after node woke up class Stream(IntEnum): @@ -344,15 +339,6 @@ VALID_INTERNAL.update({ Internal.I_REGISTRATION_REQUEST: str, Internal.I_REGISTRATION_RESPONSE: str, Internal.I_DEBUG: str, - Internal.I_SIGNAL_REPORT_REQUEST: str, - Internal.I_SIGNAL_REPORT_REVERSE: vol.All( - vol.Coerce(int), vol.Coerce(str)), - Internal.I_SIGNAL_REPORT_RESPONSE: vol.All( - vol.Coerce(int), vol.Coerce(str)), - Internal.I_PRE_SLEEP_NOTIFICATION: vol.All( - vol.Coerce(int), vol.Coerce(str)), - Internal.I_POST_SLEEP_NOTIFICATION: vol.All( - vol.Coerce(int), vol.Coerce(str)), }) VALID_PAYLOADS = { @@ -370,7 +356,7 @@ HANDLE_INTERNAL.update({ 'node_id': 255, 'ack': 0, 'sub_type': Internal.I_DISCOVER, 'payload': ''}}, Internal.I_HEARTBEAT_RESPONSE: { - 'fun': '_handle_heartbeat'}, + 'fun': '_handle_smartsleep'}, Internal.I_DISCOVER_RESPONSE: { 'is_sensor': True}, }) diff --git a/mysensors/const_21.py b/mysensors/const_21.py new file mode 100644 index 0000000..7e08924 --- /dev/null +++ b/mysensors/const_21.py @@ -0,0 +1,107 @@ +"""MySensors constants for version 2.1 of MySensors.""" +from enum import IntEnum + +# pylint: disable=unused-import +from mysensors.const_20 import (HANDLE_INTERNAL, MAX_NODE_ID, # noqa: F401 + VALID_INTERNAL, VALID_PRESENTATION, + VALID_SETREQ, VALID_STREAM, VALID_TYPES, + MessageType, Presentation, SetReq, Stream) + + +class Internal(IntEnum): + """MySensors internal sub-types.""" + + # pylint: disable=too-few-public-methods + # Use this to report the battery level (in percent 0-100). + I_BATTERY_LEVEL = 0 + # Sensors can request the current time from the Controller using this + # message. The time will be reported as the seconds since 1970 + I_TIME = 1 + # Sensors report their library version at startup using this message type + I_VERSION = 2 + # Use this to request a unique node id from the controller. + I_ID_REQUEST = 3 + # Id response back to sensor. Payload contains sensor id. + I_ID_RESPONSE = 4 + # Start/stop inclusion mode of the Controller (1=start, 0=stop). + I_INCLUSION_MODE = 5 + # Config request from node. Reply with (M)etric or (I)mperal back to sensor + I_CONFIG = 6 + # When a sensor starts up, it broadcast a search request to all neighbor + # nodes. They reply with a I_FIND_PARENT_RESPONSE. + I_FIND_PARENT_REQUEST = 7 + I_FIND_PARENT = 7 # alias from version 2.0 + # Reply message type to I_FIND_PARENT request. + I_FIND_PARENT_RESPONSE = 8 + # Sent by the gateway to the Controller to trace-log a message + I_LOG_MESSAGE = 9 + # A message that can be used to transfer child sensors + # (from EEPROM routing table) of a repeating node. + I_CHILDREN = 10 + # Optional sketch name that can be used to identify sensor in the + # Controller GUI + I_SKETCH_NAME = 11 + # Optional sketch version that can be reported to keep track of the version + # of sensor in the Controller GUI. + I_SKETCH_VERSION = 12 + # Used by OTA firmware updates. Request for node to reboot. + I_REBOOT = 13 + # Send by gateway to controller when startup is complete + I_GATEWAY_READY = 14 + # Provides signing related preferences (first byte is preference version). + I_SIGNING_PRESENTATION = 15 + I_REQUEST_SIGNING = 15 # alias from version 1.5 + # Request for a nonce. + I_NONCE_REQUEST = 16 + I_GET_NONCE = 16 # alias from version 1.5 + # Payload is nonce data. + I_NONCE_RESPONSE = 17 + I_GET_NONCE_RESPONSE = 17 # alias from version 1.5 + I_HEARTBEAT_REQUEST = 18 + I_HEARTBEAT = 18 # alias from version 2.0 + I_PRESENTATION = 19 + I_DISCOVER_REQUEST = 20 + I_DISCOVER = 20 # alias from version 2.0 + I_DISCOVER_RESPONSE = 21 + I_HEARTBEAT_RESPONSE = 22 + # Node is locked (reason in string-payload). + I_LOCKED = 23 + I_PING = 24 # Ping sent to node, payload incremental hop counter + # In return to ping, sent back to sender, payload incremental hop counter + I_PONG = 25 + I_REGISTRATION_REQUEST = 26 # Register request to GW + I_REGISTRATION_RESPONSE = 27 # Register response from GW + I_DEBUG = 28 # Debug message + + +VALID_MESSAGE_TYPES = { + MessageType.presentation: list(Presentation), + MessageType.set: list(SetReq), + MessageType.req: list(SetReq), + MessageType.internal: list(Internal), + MessageType.stream: list(Stream), +} + + +VALID_INTERNAL = dict(VALID_INTERNAL) +VALID_INTERNAL.update({ + Internal.I_FIND_PARENT_REQUEST: '', + Internal.I_HEARTBEAT_REQUEST: '', + Internal.I_DISCOVER_REQUEST: '', +}) + +VALID_PAYLOADS = { + MessageType.presentation: VALID_PRESENTATION, + MessageType.set: VALID_SETREQ, + MessageType.req: {member: '' for member in list(SetReq)}, + MessageType.internal: VALID_INTERNAL, + MessageType.stream: VALID_STREAM, +} + +HANDLE_INTERNAL = dict(HANDLE_INTERNAL) +HANDLE_INTERNAL.update({ + Internal.I_GATEWAY_READY: { + 'log': 'info', 'msg': { + 'node_id': 255, 'ack': 0, 'sub_type': Internal.I_DISCOVER_REQUEST, + 'payload': ''}}, +}) diff --git a/mysensors/const_22.py b/mysensors/const_22.py new file mode 100644 index 0000000..6289960 --- /dev/null +++ b/mysensors/const_22.py @@ -0,0 +1,119 @@ +"""MySensors constants for version 2.2 of MySensors.""" +from enum import IntEnum + +import voluptuous as vol + +# pylint: disable=unused-import +from mysensors.const_21 import (HANDLE_INTERNAL, MAX_NODE_ID, # noqa: F401 + VALID_INTERNAL, VALID_PRESENTATION, + VALID_SETREQ, VALID_STREAM, VALID_TYPES, + MessageType, Presentation, SetReq, Stream) + + +class Internal(IntEnum): + """MySensors internal sub-types.""" + + # pylint: disable=too-few-public-methods + # Use this to report the battery level (in percent 0-100). + I_BATTERY_LEVEL = 0 + # Sensors can request the current time from the Controller using this + # message. The time will be reported as the seconds since 1970 + I_TIME = 1 + # Sensors report their library version at startup using this message type + I_VERSION = 2 + # Use this to request a unique node id from the controller. + I_ID_REQUEST = 3 + # Id response back to sensor. Payload contains sensor id. + I_ID_RESPONSE = 4 + # Start/stop inclusion mode of the Controller (1=start, 0=stop). + I_INCLUSION_MODE = 5 + # Config request from node. Reply with (M)etric or (I)mperal back to sensor + I_CONFIG = 6 + # When a sensor starts up, it broadcast a search request to all neighbor + # nodes. They reply with a I_FIND_PARENT_RESPONSE. + I_FIND_PARENT_REQUEST = 7 + I_FIND_PARENT = 7 # alias from version 2.0 + # Reply message type to I_FIND_PARENT request. + I_FIND_PARENT_RESPONSE = 8 + # Sent by the gateway to the Controller to trace-log a message + I_LOG_MESSAGE = 9 + # A message that can be used to transfer child sensors + # (from EEPROM routing table) of a repeating node. + I_CHILDREN = 10 + # Optional sketch name that can be used to identify sensor in the + # Controller GUI + I_SKETCH_NAME = 11 + # Optional sketch version that can be reported to keep track of the version + # of sensor in the Controller GUI. + I_SKETCH_VERSION = 12 + # Used by OTA firmware updates. Request for node to reboot. + I_REBOOT = 13 + # Send by gateway to controller when startup is complete + I_GATEWAY_READY = 14 + # Provides signing related preferences (first byte is preference version). + I_SIGNING_PRESENTATION = 15 + I_REQUEST_SIGNING = 15 # alias from version 1.5 + # Request for a nonce. + I_NONCE_REQUEST = 16 + I_GET_NONCE = 16 # alias from version 1.5 + # Payload is nonce data. + I_NONCE_RESPONSE = 17 + I_GET_NONCE_RESPONSE = 17 # alias from version 1.5 + I_HEARTBEAT_REQUEST = 18 + I_HEARTBEAT = 18 # alias from version 2.0 + I_PRESENTATION = 19 + I_DISCOVER_REQUEST = 20 + I_DISCOVER = 20 # alias from version 2.0 + I_DISCOVER_RESPONSE = 21 + I_HEARTBEAT_RESPONSE = 22 + # Node is locked (reason in string-payload). + I_LOCKED = 23 + I_PING = 24 # Ping sent to node, payload incremental hop counter + # In return to ping, sent back to sender, payload incremental hop counter + I_PONG = 25 + I_REGISTRATION_REQUEST = 26 # Register request to GW + I_REGISTRATION_RESPONSE = 27 # Register response from GW + I_DEBUG = 28 # Debug message + I_SIGNAL_REPORT_REQUEST = 29 # Device signal strength request + I_SIGNAL_REPORT_REVERSE = 30 # Internal + I_SIGNAL_REPORT_RESPONSE = 31 # Device signal strength response (RSSI) + I_PRE_SLEEP_NOTIFICATION = 32 # Message sent before node is going to sleep + I_POST_SLEEP_NOTIFICATION = 33 # Message sent after node woke up + + +VALID_MESSAGE_TYPES = { + MessageType.presentation: list(Presentation), + MessageType.set: list(SetReq), + MessageType.req: list(SetReq), + MessageType.internal: list(Internal), + MessageType.stream: list(Stream), +} + + +VALID_INTERNAL = dict(VALID_INTERNAL) +VALID_INTERNAL.update({ + Internal.I_SIGNAL_REPORT_REQUEST: str, + Internal.I_SIGNAL_REPORT_REVERSE: vol.All( + vol.Coerce(int), vol.Coerce(str)), + Internal.I_SIGNAL_REPORT_RESPONSE: vol.All( + vol.Coerce(int), vol.Coerce(str)), + Internal.I_PRE_SLEEP_NOTIFICATION: vol.All( + vol.Coerce(int), vol.Coerce(str)), + Internal.I_POST_SLEEP_NOTIFICATION: vol.All( + vol.Coerce(int), vol.Coerce(str)), +}) + +VALID_PAYLOADS = { + MessageType.presentation: VALID_PRESENTATION, + MessageType.set: VALID_SETREQ, + MessageType.req: {member: '' for member in list(SetReq)}, + MessageType.internal: VALID_INTERNAL, + MessageType.stream: VALID_STREAM, +} + +HANDLE_INTERNAL = dict(HANDLE_INTERNAL) +HANDLE_INTERNAL.pop(Internal.I_HEARTBEAT_RESPONSE, None) +HANDLE_INTERNAL.update({ + Internal.I_PRE_SLEEP_NOTIFICATION: { + 'fun': '_handle_smartsleep'}, +})
Add support for I_PRE_SLEEP_NOTIFICATION in 2.2.0 Version 2.2.0 changed the behavior of smartsleep. Instead of sending a hearbeat before going back to sleep, the node now sends a `I_PRE_SLEEP_NOTIFICATION` internal message. The node also sends a `I_POST_SLEEP_NOTIFICATION` after waking up from sleep. See: https://github.com/mysensors/MySensors/pull/722 **Breaking change** The change outlined above broke the smartsleep feature in pymysensors for users using version 2.2.0 of mysensors. **Suggested fix** Use different internal message types for smartsleep method for different mysensors versions. Make a new const module and update these lines: https://github.com/theolind/pymysensors/blob/dev/mysensors/const_20.py#L372-L373
theolind/pymysensors
diff --git a/tests/test_message.py b/tests/test_message.py index bdd7dda..aac8a1b 100644 --- a/tests/test_message.py +++ b/tests/test_message.py @@ -162,6 +162,19 @@ INTERNAL_FIXTURES_20.update({ 'I_REGISTRATION_REQUEST': '2.0.0', 'I_REGISTRATION_RESPONSE': '1', 'I_DEBUG': 'test debug', +}) + + +INTERNAL_FIXTURES_21 = dict(INTERNAL_FIXTURES_20) +INTERNAL_FIXTURES_21.update({ + 'I_FIND_PARENT_REQUEST': '', + 'I_HEARTBEAT_REQUEST': '', + 'I_DISCOVER_REQUEST': '', +}) + + +INTERNAL_FIXTURES_22 = dict(INTERNAL_FIXTURES_21) +INTERNAL_FIXTURES_22.update({ 'I_SIGNAL_REPORT_REQUEST': 'test', 'I_SIGNAL_REPORT_REVERSE': '123', 'I_SIGNAL_REPORT_RESPONSE': '123', @@ -274,7 +287,8 @@ def test_validate_internal(): """Test Internal messages.""" versions = [ ('1.4', INTERNAL_FIXTURES_14), ('1.5', INTERNAL_FIXTURES_15), - ('2.0', INTERNAL_FIXTURES_20)] + ('2.0', INTERNAL_FIXTURES_20), ('2.1', INTERNAL_FIXTURES_21), + ('2.2', INTERNAL_FIXTURES_22)] for protocol_version, fixture in versions: gateway = get_gateway(protocol_version) const = get_const(protocol_version) @@ -287,7 +301,15 @@ def test_validate_internal(): return_value = None sub_type = const.Internal[name] msg = Message('1;255;3;0;{};{}\n'.format(sub_type, _payload)) - valid = msg.validate(protocol_version) + try: + valid = msg.validate(protocol_version) + except vol.MultipleInvalid: + print('fixture version: ', protocol_version) + print('gateway version: ', gateway.protocol_version) + print('name: ', name) + print('subtype: ', sub_type) + print('payload: ', _payload) + raise assert valid == { 'node_id': 1, 'child_id': 255, 'type': 3, 'ack': 0, 'sub_type': sub_type, 'payload': _payload} diff --git a/tests/test_mysensors.py b/tests/test_mysensors.py index 56879bd..e9eb14b 100644 --- a/tests/test_mysensors.py +++ b/tests/test_mysensors.py @@ -668,8 +668,8 @@ class TestGateway20(TestGateway): ret = self.gateway.handle_queue() self.assertEqual(ret, '1;255;3;0;19;\n') - def test_heartbeat(self): - """Test heartbeat message.""" + def test_smartsleep(self): + """Test smartsleep feature.""" sensor = self._add_sensor(1) sensor.children[0] = ChildSensor( 0, self.gateway.const.Presentation.S_LIGHT_LEVEL) @@ -708,8 +708,8 @@ class TestGateway20(TestGateway): # nothing has changed self.assertEqual(ret, None) - def test_heartbeat_from_unknown(self): - """Test heartbeat message from unknown node.""" + def test_smartsleep_from_unknown(self): + """Test smartsleep message from unknown node.""" self.gateway.logic('1;255;3;0;22;\n') ret = self.gateway.handle_queue() self.assertEqual(ret, '1;255;3;0;19;\n') @@ -774,6 +774,81 @@ class TestGateway20(TestGateway): '10.0,10.0,10.0') +class TestGateway21(TestGateway20): + """Use protocol_version 2.1.""" + + def setUp(self): + """Set up gateway.""" + self.gateway = Gateway(protocol_version='2.1') + + +class TestGateway22(TestGateway21): + """Use protocol_version 2.2.""" + + def setUp(self): + """Set up gateway.""" + self.gateway = Gateway(protocol_version='2.2') + + def test_smartsleep(self): + """Test smartsleep feature.""" + sensor = self._add_sensor(1) + sensor.children[0] = ChildSensor( + 0, self.gateway.const.Presentation.S_LIGHT_LEVEL) + self.gateway.logic('1;0;1;0;23;43\n') + ret = self.gateway.handle_queue() + self.assertEqual(ret, None) + # pre sleep message + self.gateway.logic('1;255;3;0;32;500\n') + ret = self.gateway.handle_queue() + # nothing has changed + self.assertEqual(ret, None) + # change from controller side + self.gateway.set_child_value( + 1, 0, self.gateway.const.SetReq.V_LIGHT_LEVEL, '57') + ret = self.gateway.handle_queue() + # no pre sleep message + self.assertEqual(ret, None) + # pre sleep message comes in + self.gateway.logic('1;255;3;0;32;500\n') + ret = self.gateway.handle_queue() + # instance responds with new values + self.assertEqual(ret, '1;0;1;0;23;57\n') + # request from node + self.gateway.logic('1;0;2;0;23;\n') + ret = self.gateway.handle_queue() + # no pre sleep message + self.assertEqual(ret, None) + # pre sleep message + self.gateway.logic('1;255;3;0;32;500\n') + ret = self.gateway.handle_queue() + # instance responds to request with current value + self.assertEqual(ret, '1;0;1;0;23;57\n') + # pre sleep message + self.gateway.logic('1;255;3;0;32;500\n') + ret = self.gateway.handle_queue() + # nothing has changed + self.assertEqual(ret, None) + + def test_smartsleep_from_unknown(self): + """Test smartsleep message from unknown node.""" + self.gateway.logic('1;255;3;0;32;500\n') + ret = self.gateway.handle_queue() + self.assertEqual(ret, '1;255;3;0;19;\n') + + def test_set_with_new_state(self): + """Test set message with populated new_state.""" + sensor = self._add_sensor(1) + sensor.children[0] = ChildSensor( + 0, self.gateway.const.Presentation.S_LIGHT_LEVEL) + self.gateway.logic('1;0;1;0;23;43\n') + self.gateway.logic('1;255;3;0;32;500\n') + self.gateway.logic('1;0;1;0;23;57\n') + self.assertEqual( + sensor.children[0].values[self.gateway.const.SetReq.V_LIGHT_LEVEL], + sensor.new_state[0].values[ + self.gateway.const.SetReq.V_LIGHT_LEVEL]) + + def test_gateway_bad_protocol(): """Test initializing gateway with a bad protocol_version.""" gateway = Gateway(protocol_version=None)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.12
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "flake8", "pylint", "pydocstyle" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 certifi==2021.5.30 crcmod==1.7 dill==0.3.4 flake8==5.0.4 importlib-metadata==4.2.0 iniconfig==1.1.1 intelhex==2.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pydocstyle==6.3.0 pyflakes==2.5.0 pylint==2.13.9 -e git+https://github.com/theolind/pymysensors.git@b6deffc604865bba583bf82c089fdfc1d38da4e8#egg=pymysensors pyparsing==3.1.4 pyserial==3.1.1 pytest==7.0.1 snowballstemmer==2.2.0 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 voluptuous==0.10.5 wrapt==1.16.0 zipp==3.6.0
name: pymysensors channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - crcmod==1.7 - dill==0.3.4 - flake8==5.0.4 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - intelhex==2.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pydocstyle==6.3.0 - pyflakes==2.5.0 - pylint==2.13.9 - pyparsing==3.1.4 - pyserial==3.1.1 - pytest==7.0.1 - snowballstemmer==2.2.0 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - voluptuous==0.10.5 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/pymysensors
[ "tests/test_message.py::test_validate_internal", "tests/test_mysensors.py::TestGateway22::test_set_with_new_state", "tests/test_mysensors.py::TestGateway22::test_smartsleep", "tests/test_mysensors.py::TestGateway22::test_smartsleep_from_unknown" ]
[]
[ "tests/test_message.py::TestMessage::test_decode", "tests/test_message.py::TestMessage::test_decode_bad_message", "tests/test_message.py::TestMessage::test_encode", "tests/test_message.py::TestMessage::test_encode_bad_message", "tests/test_message.py::test_validate_pres", "tests/test_message.py::test_validate_bad_pres", "tests/test_message.py::test_validate_set", "tests/test_mysensors.py::TestGateway::test_bad_battery_level", "tests/test_mysensors.py::TestGateway::test_bad_file_name", "tests/test_mysensors.py::TestGateway::test_battery_level", "tests/test_mysensors.py::TestGateway::test_callback", "tests/test_mysensors.py::TestGateway::test_callback_exception", "tests/test_mysensors.py::TestGateway::test_child_validate", "tests/test_mysensors.py::TestGateway::test_id_request_with_node_zero", "tests/test_mysensors.py::TestGateway::test_internal_config", "tests/test_mysensors.py::TestGateway::test_internal_gateway_ready", "tests/test_mysensors.py::TestGateway::test_internal_id_request", "tests/test_mysensors.py::TestGateway::test_internal_log_message", "tests/test_mysensors.py::TestGateway::test_internal_sketch_name", "tests/test_mysensors.py::TestGateway::test_internal_sketch_version", "tests/test_mysensors.py::TestGateway::test_internal_time", "tests/test_mysensors.py::TestGateway::test_json_empty_file_good_bak", "tests/test_mysensors.py::TestGateway::test_json_empty_files", "tests/test_mysensors.py::TestGateway::test_json_no_files", "tests/test_mysensors.py::TestGateway::test_json_persistence", "tests/test_mysensors.py::TestGateway::test_json_upgrade", "tests/test_mysensors.py::TestGateway::test_logic_bad_message", "tests/test_mysensors.py::TestGateway::test_non_presented_sensor", "tests/test_mysensors.py::TestGateway::test_persistence_at_init", "tests/test_mysensors.py::TestGateway::test_pickle_empty_files", "tests/test_mysensors.py::TestGateway::test_pickle_persistence", "tests/test_mysensors.py::TestGateway::test_pickle_upgrade", "tests/test_mysensors.py::TestGateway::test_present_humidity_sensor", "tests/test_mysensors.py::TestGateway::test_present_light_level_sensor", "tests/test_mysensors.py::TestGateway::test_present_same_child", "tests/test_mysensors.py::TestGateway::test_present_to_non_sensor", "tests/test_mysensors.py::TestGateway::test_presentation_arduino_node", "tests/test_mysensors.py::TestGateway::test_req", "tests/test_mysensors.py::TestGateway::test_req_notasensor", "tests/test_mysensors.py::TestGateway::test_req_novalue", "tests/test_mysensors.py::TestGateway::test_req_zerovalue", "tests/test_mysensors.py::TestGateway::test_set_and_reboot", "tests/test_mysensors.py::TestGateway::test_set_bad_battery_attribute", "tests/test_mysensors.py::TestGateway::test_set_child_no_children", "tests/test_mysensors.py::TestGateway::test_set_child_value", "tests/test_mysensors.py::TestGateway::test_set_child_value_bad_ack", "tests/test_mysensors.py::TestGateway::test_set_child_value_bad_type", "tests/test_mysensors.py::TestGateway::test_set_child_value_no_sensor", "tests/test_mysensors.py::TestGateway::test_set_child_value_value_type", "tests/test_mysensors.py::TestGateway::test_set_forecast", "tests/test_mysensors.py::TestGateway::test_set_humidity_level", "tests/test_mysensors.py::TestGateway::test_set_light_level", "tests/test_mysensors.py::TestGateway15::test_bad_battery_level", "tests/test_mysensors.py::TestGateway15::test_bad_file_name", "tests/test_mysensors.py::TestGateway15::test_battery_level", "tests/test_mysensors.py::TestGateway15::test_callback", "tests/test_mysensors.py::TestGateway15::test_callback_exception", "tests/test_mysensors.py::TestGateway15::test_child_validate", "tests/test_mysensors.py::TestGateway15::test_id_request_with_node_zero", "tests/test_mysensors.py::TestGateway15::test_internal_config", "tests/test_mysensors.py::TestGateway15::test_internal_gateway_ready", "tests/test_mysensors.py::TestGateway15::test_internal_id_request", "tests/test_mysensors.py::TestGateway15::test_internal_log_message", "tests/test_mysensors.py::TestGateway15::test_internal_sketch_name", "tests/test_mysensors.py::TestGateway15::test_internal_sketch_version", "tests/test_mysensors.py::TestGateway15::test_internal_time", "tests/test_mysensors.py::TestGateway15::test_json_empty_file_good_bak", "tests/test_mysensors.py::TestGateway15::test_json_empty_files", "tests/test_mysensors.py::TestGateway15::test_json_no_files", "tests/test_mysensors.py::TestGateway15::test_json_persistence", "tests/test_mysensors.py::TestGateway15::test_json_upgrade", "tests/test_mysensors.py::TestGateway15::test_logic_bad_message", "tests/test_mysensors.py::TestGateway15::test_non_presented_sensor", "tests/test_mysensors.py::TestGateway15::test_persistence_at_init", "tests/test_mysensors.py::TestGateway15::test_pickle_empty_files", "tests/test_mysensors.py::TestGateway15::test_pickle_persistence", "tests/test_mysensors.py::TestGateway15::test_pickle_upgrade", "tests/test_mysensors.py::TestGateway15::test_present_humidity_sensor", "tests/test_mysensors.py::TestGateway15::test_present_light_level_sensor", "tests/test_mysensors.py::TestGateway15::test_present_same_child", "tests/test_mysensors.py::TestGateway15::test_present_to_non_sensor", "tests/test_mysensors.py::TestGateway15::test_presentation_arduino_node", "tests/test_mysensors.py::TestGateway15::test_req", "tests/test_mysensors.py::TestGateway15::test_req_notasensor", "tests/test_mysensors.py::TestGateway15::test_req_novalue", "tests/test_mysensors.py::TestGateway15::test_req_zerovalue", "tests/test_mysensors.py::TestGateway15::test_set_and_reboot", "tests/test_mysensors.py::TestGateway15::test_set_bad_battery_attribute", "tests/test_mysensors.py::TestGateway15::test_set_child_no_children", "tests/test_mysensors.py::TestGateway15::test_set_child_value", "tests/test_mysensors.py::TestGateway15::test_set_child_value_bad_ack", "tests/test_mysensors.py::TestGateway15::test_set_child_value_bad_type", "tests/test_mysensors.py::TestGateway15::test_set_child_value_no_sensor", "tests/test_mysensors.py::TestGateway15::test_set_child_value_value_type", "tests/test_mysensors.py::TestGateway15::test_set_forecast", "tests/test_mysensors.py::TestGateway15::test_set_humidity_level", "tests/test_mysensors.py::TestGateway15::test_set_light_level", "tests/test_mysensors.py::TestGateway15::test_set_rgb", "tests/test_mysensors.py::TestGateway15::test_set_rgbw", "tests/test_mysensors.py::TestGateway20::test_bad_battery_level", "tests/test_mysensors.py::TestGateway20::test_bad_file_name", "tests/test_mysensors.py::TestGateway20::test_battery_level", "tests/test_mysensors.py::TestGateway20::test_callback", "tests/test_mysensors.py::TestGateway20::test_callback_exception", "tests/test_mysensors.py::TestGateway20::test_child_validate", "tests/test_mysensors.py::TestGateway20::test_discover_response_known", "tests/test_mysensors.py::TestGateway20::test_discover_response_unknown", "tests/test_mysensors.py::TestGateway20::test_id_request_with_node_zero", "tests/test_mysensors.py::TestGateway20::test_internal_config", "tests/test_mysensors.py::TestGateway20::test_internal_gateway_ready", "tests/test_mysensors.py::TestGateway20::test_internal_id_request", "tests/test_mysensors.py::TestGateway20::test_internal_log_message", "tests/test_mysensors.py::TestGateway20::test_internal_sketch_name", "tests/test_mysensors.py::TestGateway20::test_internal_sketch_version", "tests/test_mysensors.py::TestGateway20::test_internal_time", "tests/test_mysensors.py::TestGateway20::test_json_empty_file_good_bak", "tests/test_mysensors.py::TestGateway20::test_json_empty_files", "tests/test_mysensors.py::TestGateway20::test_json_no_files", "tests/test_mysensors.py::TestGateway20::test_json_persistence", "tests/test_mysensors.py::TestGateway20::test_json_upgrade", "tests/test_mysensors.py::TestGateway20::test_logic_bad_message", "tests/test_mysensors.py::TestGateway20::test_non_presented_child", "tests/test_mysensors.py::TestGateway20::test_non_presented_sensor", "tests/test_mysensors.py::TestGateway20::test_persistence_at_init", "tests/test_mysensors.py::TestGateway20::test_pickle_empty_files", "tests/test_mysensors.py::TestGateway20::test_pickle_persistence", "tests/test_mysensors.py::TestGateway20::test_pickle_upgrade", "tests/test_mysensors.py::TestGateway20::test_present_humidity_sensor", "tests/test_mysensors.py::TestGateway20::test_present_light_level_sensor", "tests/test_mysensors.py::TestGateway20::test_present_same_child", "tests/test_mysensors.py::TestGateway20::test_present_to_non_sensor", "tests/test_mysensors.py::TestGateway20::test_presentation_arduino_node", "tests/test_mysensors.py::TestGateway20::test_req", "tests/test_mysensors.py::TestGateway20::test_req_notasensor", "tests/test_mysensors.py::TestGateway20::test_req_novalue", "tests/test_mysensors.py::TestGateway20::test_req_zerovalue", "tests/test_mysensors.py::TestGateway20::test_set_and_reboot", "tests/test_mysensors.py::TestGateway20::test_set_bad_battery_attribute", "tests/test_mysensors.py::TestGateway20::test_set_child_no_children", "tests/test_mysensors.py::TestGateway20::test_set_child_value", "tests/test_mysensors.py::TestGateway20::test_set_child_value_bad_ack", "tests/test_mysensors.py::TestGateway20::test_set_child_value_bad_type", "tests/test_mysensors.py::TestGateway20::test_set_child_value_no_sensor", "tests/test_mysensors.py::TestGateway20::test_set_child_value_value_type", "tests/test_mysensors.py::TestGateway20::test_set_forecast", "tests/test_mysensors.py::TestGateway20::test_set_humidity_level", "tests/test_mysensors.py::TestGateway20::test_set_light_level", "tests/test_mysensors.py::TestGateway20::test_set_position", "tests/test_mysensors.py::TestGateway20::test_set_with_new_state", "tests/test_mysensors.py::TestGateway20::test_smartsleep", "tests/test_mysensors.py::TestGateway20::test_smartsleep_from_unknown", "tests/test_mysensors.py::TestGateway21::test_bad_battery_level", "tests/test_mysensors.py::TestGateway21::test_bad_file_name", "tests/test_mysensors.py::TestGateway21::test_battery_level", "tests/test_mysensors.py::TestGateway21::test_callback", "tests/test_mysensors.py::TestGateway21::test_callback_exception", "tests/test_mysensors.py::TestGateway21::test_child_validate", "tests/test_mysensors.py::TestGateway21::test_discover_response_known", "tests/test_mysensors.py::TestGateway21::test_discover_response_unknown", "tests/test_mysensors.py::TestGateway21::test_id_request_with_node_zero", "tests/test_mysensors.py::TestGateway21::test_internal_config", "tests/test_mysensors.py::TestGateway21::test_internal_gateway_ready", "tests/test_mysensors.py::TestGateway21::test_internal_id_request", "tests/test_mysensors.py::TestGateway21::test_internal_log_message", "tests/test_mysensors.py::TestGateway21::test_internal_sketch_name", "tests/test_mysensors.py::TestGateway21::test_internal_sketch_version", "tests/test_mysensors.py::TestGateway21::test_internal_time", "tests/test_mysensors.py::TestGateway21::test_json_empty_file_good_bak", "tests/test_mysensors.py::TestGateway21::test_json_empty_files", "tests/test_mysensors.py::TestGateway21::test_json_no_files", "tests/test_mysensors.py::TestGateway21::test_json_persistence", "tests/test_mysensors.py::TestGateway21::test_json_upgrade", "tests/test_mysensors.py::TestGateway21::test_logic_bad_message", "tests/test_mysensors.py::TestGateway21::test_non_presented_child", "tests/test_mysensors.py::TestGateway21::test_non_presented_sensor", "tests/test_mysensors.py::TestGateway21::test_persistence_at_init", "tests/test_mysensors.py::TestGateway21::test_pickle_empty_files", "tests/test_mysensors.py::TestGateway21::test_pickle_persistence", "tests/test_mysensors.py::TestGateway21::test_pickle_upgrade", "tests/test_mysensors.py::TestGateway21::test_present_humidity_sensor", "tests/test_mysensors.py::TestGateway21::test_present_light_level_sensor", "tests/test_mysensors.py::TestGateway21::test_present_same_child", "tests/test_mysensors.py::TestGateway21::test_present_to_non_sensor", "tests/test_mysensors.py::TestGateway21::test_presentation_arduino_node", "tests/test_mysensors.py::TestGateway21::test_req", "tests/test_mysensors.py::TestGateway21::test_req_notasensor", "tests/test_mysensors.py::TestGateway21::test_req_novalue", "tests/test_mysensors.py::TestGateway21::test_req_zerovalue", "tests/test_mysensors.py::TestGateway21::test_set_and_reboot", "tests/test_mysensors.py::TestGateway21::test_set_bad_battery_attribute", "tests/test_mysensors.py::TestGateway21::test_set_child_no_children", "tests/test_mysensors.py::TestGateway21::test_set_child_value", "tests/test_mysensors.py::TestGateway21::test_set_child_value_bad_ack", "tests/test_mysensors.py::TestGateway21::test_set_child_value_bad_type", "tests/test_mysensors.py::TestGateway21::test_set_child_value_no_sensor", "tests/test_mysensors.py::TestGateway21::test_set_child_value_value_type", "tests/test_mysensors.py::TestGateway21::test_set_forecast", "tests/test_mysensors.py::TestGateway21::test_set_humidity_level", "tests/test_mysensors.py::TestGateway21::test_set_light_level", "tests/test_mysensors.py::TestGateway21::test_set_position", "tests/test_mysensors.py::TestGateway21::test_set_with_new_state", "tests/test_mysensors.py::TestGateway21::test_smartsleep", "tests/test_mysensors.py::TestGateway21::test_smartsleep_from_unknown", "tests/test_mysensors.py::TestGateway22::test_bad_battery_level", "tests/test_mysensors.py::TestGateway22::test_bad_file_name", "tests/test_mysensors.py::TestGateway22::test_battery_level", "tests/test_mysensors.py::TestGateway22::test_callback", "tests/test_mysensors.py::TestGateway22::test_callback_exception", "tests/test_mysensors.py::TestGateway22::test_child_validate", "tests/test_mysensors.py::TestGateway22::test_discover_response_known", "tests/test_mysensors.py::TestGateway22::test_discover_response_unknown", "tests/test_mysensors.py::TestGateway22::test_id_request_with_node_zero", "tests/test_mysensors.py::TestGateway22::test_internal_config", "tests/test_mysensors.py::TestGateway22::test_internal_gateway_ready", "tests/test_mysensors.py::TestGateway22::test_internal_id_request", "tests/test_mysensors.py::TestGateway22::test_internal_log_message", "tests/test_mysensors.py::TestGateway22::test_internal_sketch_name", "tests/test_mysensors.py::TestGateway22::test_internal_sketch_version", "tests/test_mysensors.py::TestGateway22::test_internal_time", "tests/test_mysensors.py::TestGateway22::test_json_empty_file_good_bak", "tests/test_mysensors.py::TestGateway22::test_json_empty_files", "tests/test_mysensors.py::TestGateway22::test_json_no_files", "tests/test_mysensors.py::TestGateway22::test_json_persistence", "tests/test_mysensors.py::TestGateway22::test_json_upgrade", "tests/test_mysensors.py::TestGateway22::test_logic_bad_message", "tests/test_mysensors.py::TestGateway22::test_non_presented_child", "tests/test_mysensors.py::TestGateway22::test_non_presented_sensor", "tests/test_mysensors.py::TestGateway22::test_persistence_at_init", "tests/test_mysensors.py::TestGateway22::test_pickle_empty_files", "tests/test_mysensors.py::TestGateway22::test_pickle_persistence", "tests/test_mysensors.py::TestGateway22::test_pickle_upgrade", "tests/test_mysensors.py::TestGateway22::test_present_humidity_sensor", "tests/test_mysensors.py::TestGateway22::test_present_light_level_sensor", "tests/test_mysensors.py::TestGateway22::test_present_same_child", "tests/test_mysensors.py::TestGateway22::test_present_to_non_sensor", "tests/test_mysensors.py::TestGateway22::test_presentation_arduino_node", "tests/test_mysensors.py::TestGateway22::test_req", "tests/test_mysensors.py::TestGateway22::test_req_notasensor", "tests/test_mysensors.py::TestGateway22::test_req_novalue", "tests/test_mysensors.py::TestGateway22::test_req_zerovalue", "tests/test_mysensors.py::TestGateway22::test_set_and_reboot", "tests/test_mysensors.py::TestGateway22::test_set_bad_battery_attribute", "tests/test_mysensors.py::TestGateway22::test_set_child_no_children", "tests/test_mysensors.py::TestGateway22::test_set_child_value", "tests/test_mysensors.py::TestGateway22::test_set_child_value_bad_ack", "tests/test_mysensors.py::TestGateway22::test_set_child_value_bad_type", "tests/test_mysensors.py::TestGateway22::test_set_child_value_no_sensor", "tests/test_mysensors.py::TestGateway22::test_set_child_value_value_type", "tests/test_mysensors.py::TestGateway22::test_set_forecast", "tests/test_mysensors.py::TestGateway22::test_set_humidity_level", "tests/test_mysensors.py::TestGateway22::test_set_light_level", "tests/test_mysensors.py::TestGateway22::test_set_position", "tests/test_mysensors.py::test_gateway_bad_protocol", "tests/test_mysensors.py::test_gateway_low_protocol" ]
[]
MIT License
2,218
[ "mysensors/const_21.py", "mysensors/const_22.py", "mysensors/const_20.py", "mysensors/__init__.py" ]
[ "mysensors/const_21.py", "mysensors/const_22.py", "mysensors/const_20.py", "mysensors/__init__.py" ]
discos__simulators-125
8f491d8dda1a217d9233b0f4680e11c52dafaa2a
2018-02-26 21:26:14
8f491d8dda1a217d9233b0f4680e11c52dafaa2a
coveralls: [![Coverage Status](https://coveralls.io/builds/15706170/badge)](https://coveralls.io/builds/15706170) Coverage increased (+0.002%) to 98.68% when pulling **f70ca7456ba0dacfd653fcdaf2fd2dea8abfe113 on fix-issue-124** into **8f491d8dda1a217d9233b0f4680e11c52dafaa2a on master**.
diff --git a/simulators/active_surface.py b/simulators/active_surface.py index 5bea129..afd751a 100644 --- a/simulators/active_surface.py +++ b/simulators/active_surface.py @@ -579,7 +579,7 @@ class System(ListeningSystem): self.drivers[params[0]].current_position ) - val = utils.binary_to_bytes(binary_position) + val = utils.binary_to_bytes(binary_position, little_endian=False) if params[1] == 0xFA: retval += val @@ -642,7 +642,10 @@ class System(ListeningSystem): else: return self.byte_nak else: - frequency = utils.bytes_to_int([chr(x) for x in params[2]]) + frequency = utils.bytes_to_int( + [chr(x) for x in params[2]], + little_endian=False + ) if frequency >= 20 and frequency <= 10000: if params[0] == -1: @@ -669,7 +672,10 @@ class System(ListeningSystem): else: return self.byte_nak else: - frequency = utils.bytes_to_int([chr(x) for x in params[2]]) + frequency = utils.bytes_to_int( + [chr(x) for x in params[2]], + little_endian=False + ) if frequency >= 20 and frequency <= 10000: if params[0] == -1: @@ -714,7 +720,8 @@ class System(ListeningSystem): return self.byte_nak else: reference_position = utils.bytes_to_int( - [chr(x) for x in params[2]] + [chr(x) for x in params[2]], + little_endian=False ) if params[0] == -1: @@ -811,7 +818,10 @@ class System(ListeningSystem): else: return self.byte_nak else: - absolute_position = utils.bytes_to_int([chr(x) for x in params[2]]) + absolute_position = utils.bytes_to_int( + [chr(x) for x in params[2]], + little_endian=False + ) if params[0] == -1: for driver in self.drivers: @@ -829,7 +839,10 @@ class System(ListeningSystem): else: return self.byte_nak else: - relative_position = utils.bytes_to_int([chr(x) for x in params[2]]) + relative_position = utils.bytes_to_int( + [chr(x) for x in params[2]], + little_endian=False + ) if params[0] == -1: for driver in self.drivers: @@ -867,7 +880,10 @@ class System(ListeningSystem): else: return self.byte_nak else: - velocity = utils.bytes_to_int([chr(x) for x in params[2]]) + velocity = utils.bytes_to_int( + [chr(x) for x in params[2]], + little_endian=False + ) if velocity > 100000 or velocity < -100000: if params[0] == -1: diff --git a/simulators/acu.py b/simulators/acu.py index 2beeb1a..6faa96e 100644 --- a/simulators/acu.py +++ b/simulators/acu.py @@ -17,8 +17,8 @@ from simulators.common import ListeningSystem, SendingSystem servers = [] servers.append((('127.0.0.1', 13000), ('127.0.0.1', 13001), ())) -start_flag = b'\x1D\xFC\xCF\x1A' -end_flag = b'\xA1\xFC\xCF\xD1' +start_flag = b'\x1A\xCF\xFC\x1D' +end_flag = b'\xD1\xCF\xFC\xA1' class System(ListeningSystem, SendingSystem): @@ -80,7 +80,7 @@ class System(ListeningSystem, SendingSystem): return False if len(self.msg) == 8: - self.msg_length = utils.bytes_to_int(self.msg[-4:]) + self.msg_length = utils.bytes_to_uint(self.msg[-4:]) if len(self.msg) == 12: macro_cmd_counter = utils.bytes_to_uint(self.msg[-4:]) diff --git a/simulators/acu_status/acu_utils.py b/simulators/acu_status/acu_utils.py index 4abd82e..8c46654 100644 --- a/simulators/acu_status/acu_utils.py +++ b/simulators/acu_status/acu_utils.py @@ -1,8 +1,8 @@ import time from simulators import utils -start_flag = b'\x1D\xFC\xCF\x1A' -end_flag = b'\xA1\xFC\xCF\xD1' +start_flag = b'\x1A\xCF\xFC\x1D' +end_flag = b'\xD1\xCF\xFC\xA1' class ModeCommand(object): diff --git a/simulators/utils.py b/simulators/utils.py index b839bd4..20c4ad4 100644 --- a/simulators/utils.py +++ b/simulators/utils.py @@ -121,10 +121,10 @@ def int_to_twos(val, n_bytes=4): return ("{0:0>%s}" % n_bits).format(binary_string) -def binary_to_bytes(binary_string): +def binary_to_bytes(binary_string, little_endian=True): """Convert a binary string in a string of bytes. - >>> binary_to_bytes('0110100001100101011011000110110001101111') + >>> binary_to_bytes('0110100001100101011011000110110001101111', False) '\x68\x65\x6C\x6C\x6F' """ @@ -133,31 +133,37 @@ def binary_to_bytes(binary_string): for i in range(0, len(binary_string), 8): byte_string += chr(int(binary_string[i:i + 8], 2)) - return byte_string + return byte_string[::-1] if little_endian else byte_string -def bytes_to_int(byte_string): +def bytes_to_int(byte_string, little_endian=True): """Convert a string of bytes to an integer (like C atoi function). - >>> bytes_to_int(b'hello') + >>> bytes_to_int(b'hello', False) 448378203247 """ binary_string = '' + if little_endian: + byte_string = byte_string[::-1] + for char in byte_string: binary_string += bin(ord(char))[2:].zfill(8) return twos_to_int(binary_string) -def bytes_to_uint(byte_string): +def bytes_to_uint(byte_string, little_endian=True): """Convert a string of bytes to an unsigned integer. - >>> bytes_to_uint(b'hi') + >>> bytes_to_uint(b'hi', little_endian=False) 26729 """ binary_string = '' + if little_endian: + byte_string = byte_string[::-1] + for char in byte_string: binary_string += bin(ord(char))[2:].zfill(8) @@ -198,32 +204,35 @@ def real_to_binary(num, precision=1): ) -def real_to_bytes(num, precision=1): +def real_to_bytes(num, precision=1, little_endian=True): """Return the bytestring representation of a floating-point number (IEEE 754 standard). - >>> [hex(ord(x)) for x in real_to_bytes(436.56, 1)] + >>> [hex(ord(x)) for x in real_to_bytes(436.56, 1, False)] ['0x43', '0xda', '0x47', '0xae'] - >>> [hex(ord(x)) for x in real_to_bytes(436.56, 2)] + >>> [hex(ord(x)) for x in real_to_bytes(436.56, 2, False)] ['0x40', '0x7b', '0x48', '0xf5', '0xc2', '0x8f', '0x5c', '0x29'] """ binary_number = real_to_binary(num, precision) - return binary_to_bytes(binary_number) + return binary_to_bytes(binary_number, little_endian=little_endian) -def bytes_to_real(bytes_real, precision=1): +def bytes_to_real(bytes_real, precision=1, little_endian=True): """Return the floating-point representation (IEEE 754 standard) of bytestring number. - >>> round(bytes_to_real('\x43\xDA\x47\xAE', 1), 2) + >>> round(bytes_to_real('\x43\xDA\x47\xAE', 1, False), 2) 436.56 - >>> round(bytes_to_real('\x40\x7B\x48\xF5\xC2\x8F\x5C\x29', 2), 2) + >>> round(bytes_to_real('\x40\x7B\x48\xF5\xC2\x8F\x5C\x29', 2, False), 2) 436.56 """ + if little_endian: + bytes_real = bytes_real[::-1] + if precision == 1: return struct.unpack('!f', bytes_real)[0] elif precision == 2: @@ -235,20 +244,20 @@ def bytes_to_real(bytes_real, precision=1): ) -def int_to_bytes(val, n_bytes=4): +def int_to_bytes(val, n_bytes=4, little_endian=True): """Return the bytestring representation of a given signed integer. - >>> [hex(ord(x)) for x in int_to_bytes(354)] + >>> [hex(ord(x)) for x in int_to_bytes(354, little_endian=False)] ['0x0', '0x0', '0x1', '0x62'] """ - return binary_to_bytes(int_to_twos(val, n_bytes)) + return binary_to_bytes(int_to_twos(val, n_bytes), little_endian) -def uint_to_bytes(val, n_bytes=4): +def uint_to_bytes(val, n_bytes=4, little_endian=True): """Return the bytestring representation of a given unsigned integer. - >>> [hex(ord(x)) for x in uint_to_bytes(657)] + >>> [hex(ord(x)) for x in uint_to_bytes(657, little_endian=False)] ['0x0', '0x0', '0x2', '0x91'] """ @@ -262,7 +271,10 @@ def uint_to_bytes(val, n_bytes=4): % (val, min_range, max_range) ) - return binary_to_bytes(bin(val)[2:].zfill(n_bytes * 8)) + return binary_to_bytes( + bin(val)[2:].zfill(n_bytes * 8), + little_endian=little_endian + ) def sign(number):
ACU commands and status use little-endian byte order, the current implementation uses big-endian.
discos/simulators
diff --git a/tests/test_acu.py b/tests/test_acu.py index 674856f..f305c94 100644 --- a/tests/test_acu.py +++ b/tests/test_acu.py @@ -20,7 +20,7 @@ class TestACU(unittest.TestCase): def test_status_message_length(self): status = self.system.get_message() - msg_length = utils.bytes_to_int(status[4:8]) + msg_length = utils.bytes_to_uint(status[4:8]) self.assertEqual(msg_length, 813) def test_duplicated_macro_command_counter(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index 86f8db5..6763dbe 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -68,7 +68,7 @@ class TestServer(unittest.TestCase): def test_right_binary_to_bytes(self): """Convert a binary string into a string of bytes.""" binary_string = '00000101000110100010100011010010' - byte_string = utils.binary_to_bytes(binary_string) + byte_string = utils.binary_to_bytes(binary_string, little_endian=False) expected_byte_string = b'\x05\x1A\x28\xD2' self.assertEqual(byte_string, expected_byte_string) @@ -81,7 +81,7 @@ class TestServer(unittest.TestCase): def test_right_bytes_to_int(self): """Convert a string of bytes into an integer (like C atoi function).""" byte_string = b'\x00\x00\xFA\xFF' - result = utils.bytes_to_int(byte_string) + result = utils.bytes_to_int(byte_string, little_endian=False) expected_result = 64255 self.assertEqual(result, expected_result) @@ -137,13 +137,13 @@ class TestServer(unittest.TestCase): def test_real_to_bytes_single_precision(self): """Convert a real number to a string of bytes.""" number = 45.12371938725634 - result = utils.real_to_bytes(number) + result = utils.real_to_bytes(number, little_endian=False) expected_result = b'\x42\x34\x7E\xB0' self.assertEqual(result, expected_result) def test_real_to_bytes_double_precision(self): number = 3.14159265358979323846264338327950288419716939937510582097494 - result = utils.real_to_bytes(number, 2) + result = utils.real_to_bytes(number, precision=2, little_endian=False) expected_result = b'\x40\x09\x21\xFB\x54\x44\x2D\x18' self.assertEqual(result, expected_result) @@ -155,13 +155,17 @@ class TestServer(unittest.TestCase): def test_bytes_to_real_single_precision(self): """Convert a string of bytes to a floating point number.""" byte_string = b'\x42\x34\x7E\xB0' - result = utils.bytes_to_real(byte_string) + result = utils.bytes_to_real(byte_string, little_endian=False) expected_result = 45.12371826171875 self.assertEqual(result, expected_result) def test_bytes_to_real_double_precision(self): byte_string = b'\x40\x09\x21\xFB\x54\x44\x2D\x18' - result = utils.bytes_to_real(byte_string, 2) + result = utils.bytes_to_real( + byte_string, + precision=2, + little_endian=False + ) expected_result = ( 3.14159265358979323846264338327950288419716939937510582097494 ) @@ -175,13 +179,13 @@ class TestServer(unittest.TestCase): def test_int_to_bytes_positive(self): """Convert a signed integer to a string of bytes.""" number = 232144 - result = utils.int_to_bytes(number) + result = utils.int_to_bytes(number, little_endian=False) expected_result = b'\x00\x03\x8A\xD0' self.assertEqual(result, expected_result) def test_int_to_bytes_negative(self): number = -4522764 - result = utils.int_to_bytes(number) + result = utils.int_to_bytes(number, little_endian=False) expected_result = b'\xFF\xBA\xFC\xF4' self.assertEqual(result, expected_result) @@ -199,7 +203,7 @@ class TestServer(unittest.TestCase): def test_uint_to_bytes(self): """Convert an unsigned integer to a string of bytes.""" number = 1284639736 - result = utils.uint_to_bytes(number) + result = utils.uint_to_bytes(number, little_endian=False) expected_result = b'\x4C\x92\x0B\xF8' self.assertEqual(result, expected_result)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 4 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "numpy>=1.16.0", "pandas>=1.0.0" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/discos/simulators.git@8f491d8dda1a217d9233b0f4680e11c52dafaa2a#egg=discos_simulators exceptiongroup==1.2.2 iniconfig==2.1.0 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 scipy==1.13.1 six==1.17.0 tomli==2.2.1 tzdata==2025.2
name: simulators channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - scipy==1.13.1 - six==1.17.0 - tomli==2.2.1 - tzdata==2025.2 prefix: /opt/conda/envs/simulators
[ "tests/test_utils.py::TestServer::test_bytes_to_real_double_precision", "tests/test_utils.py::TestServer::test_bytes_to_real_single_precision" ]
[ "tests/test_acu.py::TestACU::test_duplicated_macro_command_counter", "tests/test_acu.py::TestACU::test_mode_command_active", "tests/test_acu.py::TestACU::test_mode_command_azimuth", "tests/test_acu.py::TestACU::test_mode_command_drive_to_stow", "tests/test_acu.py::TestACU::test_mode_command_drive_to_stow_wrong_parameters", "tests/test_acu.py::TestACU::test_mode_command_elevation", "tests/test_acu.py::TestACU::test_mode_command_inactive", "tests/test_acu.py::TestACU::test_mode_command_interlock", "tests/test_acu.py::TestACU::test_mode_command_preset_absolute", "tests/test_acu.py::TestACU::test_mode_command_preset_absolute_wrong_parameters", "tests/test_acu.py::TestACU::test_mode_command_preset_relative", "tests/test_acu.py::TestACU::test_mode_command_preset_relative_wrong_parameters", "tests/test_acu.py::TestACU::test_mode_command_program_track", "tests/test_acu.py::TestACU::test_mode_command_reset", "tests/test_acu.py::TestACU::test_mode_command_slew", "tests/test_acu.py::TestACU::test_mode_command_slew_wrong_parameters", "tests/test_acu.py::TestACU::test_mode_command_slew_zero_speed", "tests/test_acu.py::TestACU::test_mode_command_stop", "tests/test_acu.py::TestACU::test_mode_command_stow", "tests/test_acu.py::TestACU::test_mode_command_stow_wrong_position", "tests/test_acu.py::TestACU::test_mode_command_unknown_mode_id", "tests/test_acu.py::TestACU::test_mode_command_unknown_subsystem", "tests/test_acu.py::TestACU::test_mode_command_unstow", "tests/test_acu.py::TestACU::test_mode_command_without_activate", "tests/test_acu.py::TestACU::test_mode_command_wrong_state_active", "tests/test_acu.py::TestACU::test_multiple_commands_wrong_count", "tests/test_acu.py::TestACU::test_parse_correct_end_flag", "tests/test_acu.py::TestACU::test_parse_wrong_end_flag", "tests/test_acu.py::TestACU::test_parse_wrong_start_flag", "tests/test_acu.py::TestACU::test_program_track_command_add_entries", "tests/test_acu.py::TestACU::test_program_track_command_add_entries_empty_table", "tests/test_acu.py::TestACU::test_program_track_command_add_entries_wrong_start_time", "tests/test_acu.py::TestACU::test_program_track_command_load_new_table", "tests/test_acu.py::TestACU::test_program_track_execution", "tests/test_acu.py::TestACU::test_program_track_out_of_range_rate", "tests/test_acu.py::TestACU::test_program_track_too_long_sequence", "tests/test_acu.py::TestACU::test_program_track_too_short_sequence", "tests/test_acu.py::TestACU::test_program_track_unknown_subsystem", "tests/test_acu.py::TestACU::test_program_track_wrong_delta_time", "tests/test_acu.py::TestACU::test_program_track_wrong_first_relative_time", "tests/test_acu.py::TestACU::test_program_track_wrong_interpolation_mode", "tests/test_acu.py::TestACU::test_program_track_wrong_load_mode", "tests/test_acu.py::TestACU::test_program_track_wrong_parameter_id", "tests/test_acu.py::TestACU::test_program_track_wrong_sequence_length", "tests/test_acu.py::TestACU::test_program_track_wrong_subsequent_relative_time", "tests/test_acu.py::TestACU::test_program_track_wrong_tracking_mode", "tests/test_acu.py::TestACU::test_status_message_length", "tests/test_acu.py::TestACU::test_unknown_command", "tests/test_acu.py::TestACU::test_utils_get_command_counter", "tests/test_utils.py::TestServer::test_day_milliseconds", "tests/test_utils.py::TestServer::test_int_to_bytes_negative", "tests/test_utils.py::TestServer::test_int_to_bytes_positive", "tests/test_utils.py::TestServer::test_int_to_bytes_wrong", "tests/test_utils.py::TestServer::test_mjd_to_date", "tests/test_utils.py::TestServer::test_mjd_to_date_old_date", "tests/test_utils.py::TestServer::test_real_to_binary_double_precision", "tests/test_utils.py::TestServer::test_real_to_binary_single_precision", "tests/test_utils.py::TestServer::test_real_to_bytes_double_precision", "tests/test_utils.py::TestServer::test_real_to_bytes_single_precision", "tests/test_utils.py::TestServer::test_right_binary_to_bytes", "tests/test_utils.py::TestServer::test_right_bytes_to_int", "tests/test_utils.py::TestServer::test_right_bytes_to_uint", "tests/test_utils.py::TestServer::test_sign_negative", "tests/test_utils.py::TestServer::test_sign_positive", "tests/test_utils.py::TestServer::test_sign_zero", "tests/test_utils.py::TestServer::test_uint_to_bytes", "tests/test_utils.py::TestServer::test_uint_to_bytes_wrong", "tests/test_utils.py::TestServer::test_wrong_binary_to_bytes", "tests/test_utils.py::TestServer::test_wrong_bytes_to_int", "tests/test_utils.py::TestServer::test_wrong_bytes_to_uint", "tests/test_utils.py::TestServer::test_wrong_datatype_sign", "tests/test_utils.py::TestServer::test_wrong_real_to_binary" ]
[ "tests/test_acu.py::TestACU::test_duplicated_command_counter", "tests/test_acu.py::TestACU::test_utils_macro_command_append", "tests/test_acu.py::TestACU::test_utils_macro_command_wrong_type_init", "tests/test_acu.py::TestACU::test_utils_program_track_command_wrong_entry", "tests/test_acu.py::TestACU::test_utils_program_track_get_empty_table", "tests/test_utils.py::TestServer::test_binary_complement_longer_mask", "tests/test_utils.py::TestServer::test_binary_complement_shorter_equal_mask", "tests/test_utils.py::TestServer::test_bytes_to_real_unknown_precision", "tests/test_utils.py::TestServer::test_int_to_bytes_out_of_range", "tests/test_utils.py::TestServer::test_int_to_twos", "tests/test_utils.py::TestServer::test_mjd_given_date", "tests/test_utils.py::TestServer::test_mjd_now", "tests/test_utils.py::TestServer::test_mjd_old_date", "tests/test_utils.py::TestServer::test_out_of_range_int_to_twos", "tests/test_utils.py::TestServer::test_real_to_binary_unknown_precision", "tests/test_utils.py::TestServer::test_real_to_bytes_unknown_precision", "tests/test_utils.py::TestServer::test_right_binary_complement", "tests/test_utils.py::TestServer::test_right_checksum", "tests/test_utils.py::TestServer::test_right_twos_to_int", "tests/test_utils.py::TestServer::test_uint_to_bytes_out_of_range", "tests/test_utils.py::TestServer::test_wrong_binary_complement", "tests/test_utils.py::TestServer::test_wrong_checksum", "tests/test_utils.py::TestServer::test_wrong_twos_to_int", "tests/test_utils.py::TestServer::test_wrong_type_binary_complement", "tests/test_utils.py::TestServer::test_wrong_type_mask_binary_complement" ]
[]
null
2,219
[ "simulators/acu.py", "simulators/utils.py", "simulators/acu_status/acu_utils.py", "simulators/active_surface.py" ]
[ "simulators/acu.py", "simulators/utils.py", "simulators/acu_status/acu_utils.py", "simulators/active_surface.py" ]
oasis-open__cti-python-stix2-131
c682838239fce1abd3b3e471fb3c601de8663501
2018-02-26 22:12:57
4a9c38e0b50415f4733072fc76eb8ebd0749c84b
diff --git a/docs/guide/datastore.ipynb b/docs/guide/datastore.ipynb index ba8ad53..31635ba 100644 --- a/docs/guide/datastore.ipynb +++ b/docs/guide/datastore.ipynb @@ -52,6 +52,19 @@ "print = json_print" ] }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# without this configuration, only last print() call is outputted in cells\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = \"all\"" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -85,50 +98,229 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 4, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"type\": \"indicator\",\n", - " \"id\": \"indicator--797ae2b5-3f7a-44c5-8ecd-33ba22fdc2b5\",\n", - " \"created\": \"2017-10-04T19:27:41.000Z\",\n", - " \"modified\": \"2017-10-04T19:27:41.000Z\",\n", - " \"labels\": [\n", - " \"malicious-activity\"\n", - " ],\n", - " \"name\": \"Emerging Threats - Block Rules - Compromised IPs\",\n", - " \"pattern\": \"[ ipv4-addr:value = '98.138.19.88' ]\",\n", - " \"valid_from\": \"2017-10-04T19:27:41Z\",\n", - " \"kill_chain_phases\": [\n", - " {\n", - " \"kill_chain_name\": \"lockheed-martin-cyber-kill-chain\",\n", - " \"phase_name\": \"delivery\"\n", - " }\n", - " ]\n", - "}\n", - "{\n", - " \"type\": \"indicator\",\n", - " \"id\": \"indicator--11913f42-2d52-4b9d-842f-94bf06819a66\",\n", - " \"created\": \"2017-10-04T19:27:41.000Z\",\n", - " \"modified\": \"2017-10-04T19:27:41.000Z\",\n", - " \"labels\": [\n", - " \"malicious-activity\"\n", - " ],\n", - " \"name\": \"Emerging Threats - Block Rules - Compromised IPs\",\n", - " \"pattern\": \"[ ipv4-addr:value = '98.138.19.88' ]\",\n", - " \"valid_from\": \"2017-10-04T19:27:41Z\",\n", - " \"kill_chain_phases\": [\n", - " {\n", - " \"kill_chain_name\": \"lockheed-martin-cyber-kill-chain\",\n", - " \"phase_name\": \"delivery\"\n", - " }\n", - " ]\n", - "}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;intrusion-set&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created_by_ref&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-31T21:31:53.197Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-31T21:31:53.197Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;DragonOK&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;DragonOK is a threat group that has targeted Japanese organizations with phishing emails. Due to overlapping TTPs, including similar custom tools, DragonOK is thought to have a direct or indirect relationship with the threat group Moafee. [[Citation: Operation Quantum Entanglement]][[Citation: Symbiotic APT Groups]] It is known to use a variety of malware, including Sysget/HelloBridge, PlugX, PoisonIvy, FormerFirstRat, NFlog, and NewCT. [[Citation: New DragonOK]]&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;aliases&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;DragonOK&quot;</span>\n", + " <span class=\"p\">],</span>\n", + " <span class=\"nt\">&quot;external_references&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;source_name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;mitre-attack&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;url&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;https://attack.mitre.org/wiki/Group/G0017&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;external_id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;G0017&quot;</span>\n", + " <span class=\"p\">},</span>\n", + " <span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;source_name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Operation Quantum Entanglement&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Haq, T., Moran, N., Vashisht, S., Scott, M. (2014, September). OPERATION QUANTUM ENTANGLEMENT. Retrieved November 4, 2015.&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;url&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;https://www.fireeye.com/content/dam/fireeye-www/global/en/current-threats/pdfs/wp-operation-quantum-entanglement.pdf&quot;</span>\n", + " <span class=\"p\">},</span>\n", + " <span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;source_name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Symbiotic APT Groups&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Haq, T. (2014, October). An Insight into Symbiotic APT Groups. Retrieved November 4, 2015.&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;url&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;https://dl.mandiant.com/EE/library/MIRcon2014/MIRcon%202014%20R&amp;D%20Track%20Insight%20into%20Symbiotic%20APT.pdf&quot;</span>\n", + " <span class=\"p\">},</span>\n", + " <span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;source_name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;New DragonOK&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Miller-Osborn, J., Grunzweig, J.. (2015, April). Unit 42 Identifies New DragonOK Backdoor Malware Deployed Against Japanese Targets. Retrieved November 4, 2015.&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;url&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;http://researchcenter.paloaltonetworks.com/2015/04/unit-42-identifies-new-dragonok-backdoor-malware-deployed-against-japanese-targets/&quot;</span>\n", + " <span class=\"p\">}</span>\n", + " <span class=\"p\">],</span>\n", + " <span class=\"nt\">&quot;object_marking_refs&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator--02b90f02-a96a-43ee-88f1-1e87297941f2&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-13T07:00:24.000Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-13T07:00:24.000Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Ransomware IP Blocklist&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;IP Blocklist address from abuse.ch&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;pattern&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;[ ipv4-addr:value = &#39;91.237.247.24&#39; ]&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;valid_from&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-13T07:00:24Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;malicious-activity&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"s2\">&quot;Ransomware&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"s2\">&quot;Botnet&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"s2\">&quot;C&amp;C&quot;</span>\n", + " <span class=\"p\">],</span>\n", + " <span class=\"nt\">&quot;external_references&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;source_name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;abuse.ch&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;url&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;https://ransomwaretracker.abuse.ch/blocklist/&quot;</span>\n", + " <span class=\"p\">}</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -136,22 +328,22 @@ "from stix2 import CompositeDataSource, FileSystemSource, TAXIICollectionSource\n", "\n", "# create FileSystemStore\n", - "fs = FileSystemSource(\"/tmp/stix2_data\")\n", + "fs = FileSystemSource(\"/home/michael/cti-python-stix2/stix2/test/stix2_data/\")\n", "\n", "# create TAXIICollectionSource\n", - "colxn = Collection('https://test.freetaxii.com:8000/api1/collections/9cfa669c-ee94-4ece-afd2-f8edac37d8fd/')\n", + "colxn = Collection('https://test.freetaxii.com:8000/osint/collections/a9c22eaf-0f3e-482c-8bb4-45ae09e75d9b/')\n", "ts = TAXIICollectionSource(colxn)\n", "\n", "# add them both to the CompositeDataSource\n", "cs = CompositeDataSource()\n", - "cs.add_data_sources([fs, ts])\n", + "cs.add_data_sources([fs,ts])\n", "\n", "# get an object that is only in the filesystem\n", - "ta = cs.get('intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a')\n", - "print(ta)\n", + "intrusion_set = cs.get('intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a')\n", + "print(intrusion_set)\n", "\n", "# get an object that is only in the TAXII collection\n", - "ind = cs.get('indicator--37a6a5de-a5b9-425a-903a-4ae9cbf1ff3f')\n", + "ind = cs.get('indicator--02b90f02-a96a-43ee-88f1-1e87297941f2')\n", "print(ind)\n" ] }, @@ -197,7 +389,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { "collapsed": true }, @@ -231,7 +423,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": { "collapsed": true }, @@ -537,9 +729,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "cti-python-stix2", "language": "python", - "name": "python2" + "name": "cti-python-stix2" }, "language_info": { "codemirror_mode": { diff --git a/docs/guide/filesystem.ipynb b/docs/guide/filesystem.ipynb index 4b5bd6f..f494e6e 100644 --- a/docs/guide/filesystem.ipynb +++ b/docs/guide/filesystem.ipynb @@ -82,7 +82,7 @@ " /STIX2 Domain Object type\n", "```\n", "\n", - "Essentially a master STIX2 content directory where each subdirectory aligns to a STIX2 domain object type (i.e. \"attack-pattern\", \"campaign\", \"malware\" etc..). Within each STIX2 domain object subdirectory are JSON files that are STIX2 domain objects of the specified type. The name of the json files correspond to the ID of the STIX2 domain object found within that file. A real example of the FileSystem directory structure:\n", + "The master STIX2 content directory contains subdirectories, each of which aligns to a STIX2 domain object type (i.e. \"attack-pattern\", \"campaign\", \"malware\", etc.). Within each STIX2 domain object subdirectory are JSON files that are STIX2 domain objects of the specified type. The name of the json files correspond to the ID of the STIX2 domain object found within that file. A real example of the FileSystem directory structure:\n", "\n", "```\n", "stix2_content/\n", @@ -109,11 +109,11 @@ "\n", "[FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) is intended for use cases where STIX2 content is retrieved and pushed to the same file directory. As [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) is just a wrapper around a paired [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink) that point the same file directory.\n", "\n", - "Use cases where STIX2 content will only be retrieved or pushed, then a [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink) can be used individually. Or for the use case where STIX2 content will be retrieved from one distinct file directory and pushed to another.\n", + "For use cases where STIX2 content will only be retrieved or pushed, then a [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink) can be used individually. They can also be used individually when STIX2 content will be retrieved from one distinct file directory and pushed to another.\n", "\n", "### FileSystem API\n", "\n", - "A note on [get()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.get), [all_versions()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.all_versions), and [query()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.query). The format of the STIX2 content targeted by the FileSystem suite is JSON files. When STIX2 content (in JSON) is retrieved by the [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) from disk, the content will attempt to be parsed into full-featured python STIX2 objects and returned as such. \n", + "A note on [get()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.get), [all_versions()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.all_versions), and [query()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.query): The format of the STIX2 content targeted by the FileSystem suite is JSON files. When the [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) retrieves STIX2 content (in JSON) from disk, it will attempt to parse the content into full-featured python-stix2 objects and returned as such. \n", "\n", "A note on [add()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink.add): When STIX content is added (pushed) to the file system, the STIX content can be supplied in the following forms: Python STIX objects, Python dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. Any of the previous STIX content forms will be converted to a STIX JSON object (in a STIX Bundle) and written to disk. \n", "\n", @@ -264,7 +264,7 @@ "source": [ "from stix2 import FileSystemSource\n", "\"\"\"\n", - "Working with FileSystemSource for retrieveing STIX content.\n", + "Working with FileSystemSource for retrieving STIX content.\n", "\"\"\"\n", "\n", "# create FileSystemSource\n", diff --git a/docs/guide/memory.ipynb b/docs/guide/memory.ipynb index 75c0475..d651525 100644 --- a/docs/guide/memory.ipynb +++ b/docs/guide/memory.ipynb @@ -62,10 +62,11 @@ "\n", "\n", "### Memory API\n", + "A note on adding and retreiving STIX content to the Memory suite: As mentioned, under the hood the Memory suite is an internal, in-memory dictionary. STIX content that is to be added can be in the following forms: python-stix2 objects, (Python) dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) actually stores STIX content either as python-stix2 objects or as (Python) dictionaries, reducing and converting any of the aforementioned types to one of those. Additionally, whatever form the STIX object is stored as, is how it will be returned when retrieved. python-stix2 objects, and json-encoded strings (of STIX content) are stored as python-stix2 objects, while (Python) dictionaries (of STIX objects) are stored as (Python) dictionaries.\n", "\n", - "A note on [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file) and [save_to_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.save_to_file). These methods both add STIX content to an internal dictionary (maintained by [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore)). STIX content that is to be added can be in the following forms: Python STIX objects, Python dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) actually stores STIX content either as python STIX objects or as python dictionaries, reducing and converting any of the aforementioned types to one of those; and whatever form the STIX object is stored as, is how it will be returned as when queried or retrieved. Python STIX objects, and json-encoded strings (of STIX content) are stored as python STIX objects. Python dictionaries (of STIX objects) are stored as Python dictionaries. This is done, as can be efficiently supported, in order to return STIX content in the form it was added to the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore). Also, for [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file), STIX content is assumed to be in JSON form within the file, individually or in a Bundle. \n", + "A note on [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file): For [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file), STIX content is assumed to be in JSON form within the file, as an individual STIX object or in a Bundle. When the JSON is loaded, the STIX objects are parsed into python-stix2 objects before being stored in the in-memory dictionary.\n", "\n", - "A note on [save_to_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.save_to_file). This method dumps all STIX content that is in [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) to the specified file. The file format will be JSON, and the STIX content will be within a STIX Bundle. Note also that the the output form will be a JSON STIX Bundle regardless of the form that the individual STIX objects are stored (i.e. supplied) to the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore). \n", + "A note on [save_to_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.save_to_file): This method dumps all STIX content that is in the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) to the specified file. The file format will be JSON, and the STIX content will be within a STIX Bundle. Note also that the output form will be a JSON STIX Bundle regardless of the form that the individual STIX objects are stored in (i.e. supplied to) the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore). \n", "\n", "### Memory Examples\n", "\n", @@ -74,26 +75,101 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 3, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"type\": \"indicator\",\n", - " \"id\": \"indicator--d91ef175-8a82-470a-a610-bbd2ee8a1516\",\n", - " \"created\": \"2017-09-29T19:52:16.930Z\",\n", - " \"modified\": \"2017-09-29T19:52:16.930Z\",\n", - " \"labels\": [\n", - " \"malicious-activity\"\n", - " ],\n", - " \"description\": \"Crusades C2 implant\",\n", - " \"pattern\": \"[file:hashes.'SHA-256' = '54b7e05e39a59428743635242e4a867c932140a999f52a1e54fa7ee6a440c73b']\",\n", - " \"valid_from\": \"2017-09-29T19:52:16.930909Z\"\n", - "}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator--2f61e4e7-0891-4e09-b79a-66f5e594fec0&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.590Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.590Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Crusades C2 implant&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;pattern&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;[file:hashes.&#39;SHA-256&#39; = &#39;54b7e05e39a59428743635242e4a867c932140a999f52a1e54fa7ee6a440c73b&#39;]&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;valid_from&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.590939Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;malicious-activity&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -115,26 +191,101 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 4, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"type\": \"indicator\",\n", - " \"id\": \"indicator--79fdaad7-c461-49bb-ad1d-caa5e9c51c90\",\n", - " \"created\": \"2017-09-29T19:52:17.021Z\",\n", - " \"modified\": \"2017-09-29T19:52:17.021Z\",\n", - " \"labels\": [\n", - " \"malicious-activity\"\n", - " ],\n", - " \"description\": \"Crusades stage 2 implant variant\",\n", - " \"pattern\": \"[file:hashes.'SHA-256' = '31a45e777e4d58b97f4c43e38006f8cd6580ddabc4037905b2fad734712b582c']\",\n", - " \"valid_from\": \"2017-09-29T19:52:17.021728Z\"\n", - "}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;indicator--ddb765ba-ff1e-4285-bf33-1f6d08f583d6&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.799Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.799Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;description&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Crusades stage 2 implant variant&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;pattern&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;[file:hashes.&#39;SHA-256&#39; = &#39;31a45e777e4d58b97f4c43e38006f8cd6580ddabc4037905b2fad734712b582c&#39;]&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;valid_from&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.799228Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;malicious-activity&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -157,89 +308,216 @@ }, { "cell_type": "code", - "execution_count": 3, - "metadata": {}, + "execution_count": 5, + "metadata": { + "scrolled": true + }, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "-----------------------\n", - "{'name': 'Urban2', 'created': '2017-09-12T13:26:18.023Z', 'labels': ['rootkit'], 'modified': '2017-09-12T13:26:18.023Z', 'type': 'malware', 'id': 'malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4'}\n", - "-----------------------\n", - "{\n", - " \"type\": \"malware\",\n", - " \"id\": \"malware--2b3dd412-18a5-4e81-8742-4977068eb3eb\",\n", - " \"created\": \"2017-09-29T19:52:17.028Z\",\n", - " \"modified\": \"2017-09-29T19:52:17.028Z\",\n", - " \"name\": \"Alexios\",\n", - " \"labels\": [\n", - " \"rootkit\"\n", - " ]\n", - "}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;malware&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;malware--e8170e70-522f-4ec3-aa22-afb55bfad0b0&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.806Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-11-17T17:01:31.806Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;Alexios&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;rootkit&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ "from stix2 import Filter\n", "\n", - "# add dictionary (of STIX object) to MemoryStore\n", - "# (this dict would assumably come from output of another source,\n", - "# i.e. a loaded json file, NOT manually created as done here for sample purposes)\n", - "\n", - "malware = {\n", - " \"type\": \"malware\",\n", - " \"id\" : \"malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4\",\n", - " \"labels\": [\"rootkit\"],\n", - " \"name\": \"Urban2\",\n", - " \"created\": \"2017-09-12T13:26:18.023Z\",\n", - " \"modified\": \"2017-09-12T13:26:18.023Z\"\n", - "}\n", - "\n", - "mem.add(malware)\n", - "\n", - "results = mem.query([Filter(\"labels\",\"=\", \"rootkit\")])\n", - "for r in results:\n", - " # note that python STIX objects are pretty-printed\n", - " # due to some python dunder method magic, but normal\n", - " # python dictionaries are not by default. Thus the\n", - " # python STIX objects and python STIX dictionaries\n", - " # that match the above query can be easily identified visually\n", - " print(\"-----------------------\")\n", - " print(r)" + "mal = mem.query([Filter(\"labels\",\"=\", \"rootkit\")])[0]\n", + "print(mal)" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 6, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"type\": \"report\",\n", - " \"id\": \"report--2add14d6-bbf3-4308-bb8e-226d314a08e4\",\n", - " \"created\": \"2017-05-08T18:34:08.042Z\",\n", - " \"modified\": \"2017-05-08T18:34:08.042Z\",\n", - " \"name\": \"The Crusades: Looking into the relentless infiltration of Israels digital infrastructure.\",\n", - " \"published\": \"2017-05-08T10:24:11.011Z\",\n", - " \"object_refs\": [\n", - " \"malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4\"\n", - " ],\n", - " \"labels\": [\n", - " \"threat-report\"\n", - " ]\n", - "}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;report&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;report--2add14d6-bbf3-4308-bb8e-226d314a08e4&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T18:34:08.042Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T18:34:08.042Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;The Crusades: Looking into the relentless infiltration of Israels digital infrastructure.&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;published&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T10:24:11.011Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;object_refs&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4&quot;</span>\n", + " <span class=\"p\">],</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;threat-report&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ "from stix2 import Filter\n", "\n", "# add json formatted string to MemoryStore\n", - "# Again, would NOT manual create json-formatted string\n", + "# Again, would NOT manually create json-formatted string\n", "# but taken as an output form from another source\n", "report = '{\"type\": \"report\",\"id\": \"report--2add14d6-bbf3-4308-bb8e-226d314a08e4\",\"labels\": [\"threat-report\"], \"name\": \"The Crusades: Looking into the relentless infiltration of Israels digital infrastructure.\", \"published\": \"2017-05-08T10:24:11.011Z\", \"object_refs\":[\"malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4\"], \"created\": \"2017-05-08T18:34:08.042Z\", \"modified\": \"2017-05-08T18:34:08.042Z\"}'\n", "\n", @@ -257,15 +535,103 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 8, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "{u'name': u'The Crusades: Looking into the relentless infiltration of Israels digital infrastructure.', u'created': u'2017-05-08T18:34:08.042Z', u'labels': [u'threat-report'], u'modified': u'2017-05-08T18:34:08.042Z', u'object_refs': [u'malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4'], u'published': u'2017-05-08T10:24:11.011Z', u'type': u'report', u'id': u'report--2add14d6-bbf3-4308-bb8e-226d314a08e4'}\n" - ] + "data": { + "text/html": [ + "<style type=\"text/css\">.highlight .hll { background-color: #ffffcc }\n", + ".highlight { background: #f8f8f8; }\n", + ".highlight .c { color: #408080; font-style: italic } /* Comment */\n", + ".highlight .err { border: 1px solid #FF0000 } /* Error */\n", + ".highlight .k { color: #008000; font-weight: bold } /* Keyword */\n", + ".highlight .o { color: #666666 } /* Operator */\n", + ".highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */\n", + ".highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */\n", + ".highlight .cp { color: #BC7A00 } /* Comment.Preproc */\n", + ".highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */\n", + ".highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */\n", + ".highlight .cs { color: #408080; font-style: italic } /* Comment.Special */\n", + ".highlight .gd { color: #A00000 } /* Generic.Deleted */\n", + ".highlight .ge { font-style: italic } /* Generic.Emph */\n", + ".highlight .gr { color: #FF0000 } /* Generic.Error */\n", + ".highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */\n", + ".highlight .gi { color: #00A000 } /* Generic.Inserted */\n", + ".highlight .go { color: #888888 } /* Generic.Output */\n", + ".highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */\n", + ".highlight .gs { font-weight: bold } /* Generic.Strong */\n", + ".highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */\n", + ".highlight .gt { color: #0044DD } /* Generic.Traceback */\n", + ".highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */\n", + ".highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */\n", + ".highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */\n", + ".highlight .kp { color: #008000 } /* Keyword.Pseudo */\n", + ".highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */\n", + ".highlight .kt { color: #B00040 } /* Keyword.Type */\n", + ".highlight .m { color: #666666 } /* Literal.Number */\n", + ".highlight .s { color: #BA2121 } /* Literal.String */\n", + ".highlight .na { color: #7D9029 } /* Name.Attribute */\n", + ".highlight .nb { color: #008000 } /* Name.Builtin */\n", + ".highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */\n", + ".highlight .no { color: #880000 } /* Name.Constant */\n", + ".highlight .nd { color: #AA22FF } /* Name.Decorator */\n", + ".highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */\n", + ".highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */\n", + ".highlight .nf { color: #0000FF } /* Name.Function */\n", + ".highlight .nl { color: #A0A000 } /* Name.Label */\n", + ".highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */\n", + ".highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */\n", + ".highlight .nv { color: #19177C } /* Name.Variable */\n", + ".highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */\n", + ".highlight .w { color: #bbbbbb } /* Text.Whitespace */\n", + ".highlight .mb { color: #666666 } /* Literal.Number.Bin */\n", + ".highlight .mf { color: #666666 } /* Literal.Number.Float */\n", + ".highlight .mh { color: #666666 } /* Literal.Number.Hex */\n", + ".highlight .mi { color: #666666 } /* Literal.Number.Integer */\n", + ".highlight .mo { color: #666666 } /* Literal.Number.Oct */\n", + ".highlight .sa { color: #BA2121 } /* Literal.String.Affix */\n", + ".highlight .sb { color: #BA2121 } /* Literal.String.Backtick */\n", + ".highlight .sc { color: #BA2121 } /* Literal.String.Char */\n", + ".highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */\n", + ".highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */\n", + ".highlight .s2 { color: #BA2121 } /* Literal.String.Double */\n", + ".highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */\n", + ".highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */\n", + ".highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */\n", + ".highlight .sx { color: #008000 } /* Literal.String.Other */\n", + ".highlight .sr { color: #BB6688 } /* Literal.String.Regex */\n", + ".highlight .s1 { color: #BA2121 } /* Literal.String.Single */\n", + ".highlight .ss { color: #19177C } /* Literal.String.Symbol */\n", + ".highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */\n", + ".highlight .fm { color: #0000FF } /* Name.Function.Magic */\n", + ".highlight .vc { color: #19177C } /* Name.Variable.Class */\n", + ".highlight .vg { color: #19177C } /* Name.Variable.Global */\n", + ".highlight .vi { color: #19177C } /* Name.Variable.Instance */\n", + ".highlight .vm { color: #19177C } /* Name.Variable.Magic */\n", + ".highlight .il { color: #666666 } /* Literal.Number.Integer.Long */</style><div class=\"highlight\"><pre><span></span><span class=\"p\">{</span>\n", + " <span class=\"nt\">&quot;type&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;report&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;id&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;report--2add14d6-bbf3-4308-bb8e-226d314a08e4&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;created&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T18:34:08.042Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;modified&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T18:34:08.042Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;name&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;The Crusades: Looking into the relentless infiltration of Israels digital infrastructure.&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;published&quot;</span><span class=\"p\">:</span> <span class=\"s2\">&quot;2017-05-08T10:24:11.011Z&quot;</span><span class=\"p\">,</span>\n", + " <span class=\"nt\">&quot;object_refs&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;malware--2daa14d6-cbf3-4308-bb8e-226d324a08e4&quot;</span>\n", + " <span class=\"p\">],</span>\n", + " <span class=\"nt\">&quot;labels&quot;</span><span class=\"p\">:</span> <span class=\"p\">[</span>\n", + " <span class=\"s2\">&quot;threat-report&quot;</span>\n", + " <span class=\"p\">]</span>\n", + "<span class=\"p\">}</span>\n", + "</pre></div>\n" + ], + "text/plain": [ + "<IPython.core.display.HTML object>" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -280,17 +646,15 @@ "report = mem_2.get(\"report--2add14d6-bbf3-4308-bb8e-226d314a08e4\")\n", "\n", "# for visualpurposes\n", - "# Note: Since STIX content was added to MemoryStore as json,\n", - "# it is maintained as python dictionaries ( as opposed to STIX objects)\n", "print(report)" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "cti-python-stix2", "language": "python", - "name": "python2" + "name": "cti-python-stix2" }, "language_info": { "codemirror_mode": { diff --git a/docs/guide/taxii.ipynb b/docs/guide/taxii.ipynb index 2f8905b..b0f0cea 100644 --- a/docs/guide/taxii.ipynb +++ b/docs/guide/taxii.ipynb @@ -58,9 +58,9 @@ "source": [ "## TAXIICollection\n", "\n", - "The TAXIICollection suite contains [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore), [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource), and [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink). [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore) for pushing and retrieving STIX content to local/remote TAXII Collection(s). [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource) for retrieving STIX content to local/remote TAXII Collection(s). [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink) for pushing STIX content to local/remote TAXII Collection(s). Each of the interfaces is designed to be bound to a Collection from the [taxii2client](https://github.com/oasis-open/cti-taxii-client) library (taxii2client.Collection), where all [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API calls will be executed through that Collection instance.\n", + "The TAXIICollection suite contains [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore), [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource), and [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink). [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore) pushes and retrieves STIX content to local/remote TAXII Collection(s). [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource) retrieves STIX content from local/remote TAXII Collection(s). [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink) pushes STIX content to local/remote TAXII Collection(s). Each of the interfaces is designed to be bound to a Collection from the [taxii2client](https://github.com/oasis-open/cti-taxii-client) library (taxii2client.Collection), where all [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API calls will be executed through that Collection instance.\n", "\n", - "A note on TAXII2 searching/filtering of STIX content. TAXII2 server implementations natively support searching on the STIX2 object properties: id, type and version; API requests made to TAXII2 can contain filter arguments for those 3 properties. However, the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) suite supports searching on all STIX2 common object properties (see [Filters](../api/sources/stix2.sources.filters.rst) documentation for full listing). This works simply by augmenting the filtering that is done remotely at the TAXII2 server instance. [TAXIICollection](../api/sources/stix2.sources.taxii.rst) will seperate any supplied queries into TAXII supported filters and non-supported filters. During a [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call, TAXII2 supported filters get inserted into the TAXII2 server request (to be evaluated at the server). The rest of the filters are kept locally and then applied to the STIX2 content that is returned from the TAXII2 server, before being returned from the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call. \n", + "A note on TAXII2 searching/filtering of STIX content: TAXII2 server implementations natively support searching on the STIX2 object properties: id, type and version; API requests made to TAXII2 can contain filter arguments for those 3 properties. However, the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) suite supports searching on all STIX2 common object properties (see [Filters](../api/sources/stix2.sources.filters.rst) documentation for full listing). This works simply by augmenting the filtering that is done remotely at the TAXII2 server instance. [TAXIICollection](../api/sources/stix2.sources.taxii.rst) will seperate any supplied queries into TAXII supported filters and non-supported filters. During a [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call, TAXII2 supported filters get inserted into the TAXII2 server request (to be evaluated at the server). The rest of the filters are kept locally and then applied to the STIX2 content that is returned from the TAXII2 server, before being returned from the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call. \n", "\n", "### TAXIICollection API\n", "\n", diff --git a/stix2/sources/filesystem.py b/stix2/sources/filesystem.py index e92c525..f4311be 100644 --- a/stix2/sources/filesystem.py +++ b/stix2/sources/filesystem.py @@ -1,8 +1,6 @@ """ Python STIX 2.0 FileSystem Source/Sink -TODO: - Test everything """ import json @@ -22,7 +20,12 @@ class FileSystemStore(DataStore): Args: stix_dir (str): path to directory of STIX objects - bundlify (bool): Whether to wrap objects in bundles when saving them. + allow_custom (bool): whether to allow custom STIX content to be + pushed/retrieved. Defaults to True for FileSystemSource side(retrieving data) + and False for FileSystemSink side(pushing data). However, when + parameter is supplied, it will be applied to both FileSystemSource + and FileSystemSink. + bundlify (bool): whether to wrap objects in bundles when saving them. Default: False. Attributes: @@ -30,10 +33,16 @@ class FileSystemStore(DataStore): sink (FileSystemSink): FileSystemSink """ - def __init__(self, stix_dir, bundlify=False): + def __init__(self, stix_dir, allow_custom=None, bundlify=False): + if allow_custom is None: + allow_custom_source = True + allow_custom_sink = False + else: + allow_custom_sink = allow_custom_source = allow_custom + super(FileSystemStore, self).__init__( - source=FileSystemSource(stix_dir=stix_dir), - sink=FileSystemSink(stix_dir=stix_dir, bundlify=bundlify) + source=FileSystemSource(stix_dir=stix_dir, allow_custom=allow_custom_source), + sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify) ) @@ -46,13 +55,16 @@ class FileSystemSink(DataSink): Args: stix_dir (str): path to directory of STIX objects. + allow_custom (bool): Whether to allow custom STIX content to be + added to the FileSystemSource. Default: False bundlify (bool): Whether to wrap objects in bundles when saving them. Default: False. """ - def __init__(self, stix_dir, bundlify=False): + def __init__(self, stix_dir, allow_custom=False, bundlify=False): super(FileSystemSink, self).__init__() self._stix_dir = os.path.abspath(stix_dir) + self.allow_custom = allow_custom self.bundlify = bundlify if not os.path.exists(self._stix_dir): @@ -71,20 +83,18 @@ class FileSystemSink(DataSink): os.makedirs(os.path.dirname(path)) if self.bundlify: - stix_obj = Bundle(stix_obj) + stix_obj = Bundle(stix_obj, allow_custom=self.allow_custom) with open(path, "w") as f: f.write(str(stix_obj)) - def add(self, stix_data=None, allow_custom=False, version=None): + def add(self, stix_data=None, version=None): """Add STIX objects to file directory. Args: stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content in a STIX object (or list of), dict (or list of), or a STIX 2.0 json encoded string. - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -100,24 +110,24 @@ class FileSystemSink(DataSink): self._check_path_and_write(stix_data) elif isinstance(stix_data, (str, dict)): - stix_data = parse(stix_data, allow_custom=allow_custom, version=version) + stix_data = parse(stix_data, allow_custom=self.allow_custom, version=version) if stix_data["type"] == "bundle": # extract STIX objects for stix_obj in stix_data.get("objects", []): - self.add(stix_obj, allow_custom=allow_custom, version=version) + self.add(stix_obj, version=version) else: # adding json-formatted STIX - self._check_path_and_write(stix_data) + self._check_path_and_write(stix_data,) elif isinstance(stix_data, Bundle): # recursively add individual STIX objects for stix_obj in stix_data.get("objects", []): - self.add(stix_obj, allow_custom=allow_custom, version=version) + self.add(stix_obj, version=version) elif isinstance(stix_data, list): # recursively add individual STIX objects for stix_obj in stix_data: - self.add(stix_obj, allow_custom=allow_custom, version=version) + self.add(stix_obj, version=version) else: raise TypeError("stix_data must be a STIX object (or list of), " @@ -134,11 +144,14 @@ class FileSystemSource(DataSource): Args: stix_dir (str): path to directory of STIX objects + allow_custom (bool): Whether to allow custom STIX content to be + added to the FileSystemSink. Default: True """ - def __init__(self, stix_dir): + def __init__(self, stix_dir, allow_custom=True): super(FileSystemSource, self).__init__() self._stix_dir = os.path.abspath(stix_dir) + self.allow_custom = allow_custom if not os.path.exists(self._stix_dir): raise ValueError("directory path for STIX data does not exist: %s" % self._stix_dir) @@ -147,15 +160,13 @@ class FileSystemSource(DataSource): def stix_dir(self): return self._stix_dir - def get(self, stix_id, allow_custom=False, version=None, _composite_filters=None): + def get(self, stix_id, version=None, _composite_filters=None): """Retrieve STIX object from file directory via STIX ID. Args: stix_id (str): The STIX ID of the STIX object to be retrieved. _composite_filters (set): set of filters passed from the parent CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -167,7 +178,7 @@ class FileSystemSource(DataSource): """ query = [Filter("id", "=", stix_id)] - all_data = self.query(query=query, allow_custom=allow_custom, version=version, _composite_filters=_composite_filters) + all_data = self.query(query=query, version=version, _composite_filters=_composite_filters) if all_data: stix_obj = sorted(all_data, key=lambda k: k['modified'])[0] @@ -176,7 +187,7 @@ class FileSystemSource(DataSource): return stix_obj - def all_versions(self, stix_id, allow_custom=False, version=None, _composite_filters=None): + def all_versions(self, stix_id, version=None, _composite_filters=None): """Retrieve STIX object from file directory via STIX ID, all versions. Note: Since FileSystem sources/sinks don't handle multiple versions @@ -186,8 +197,6 @@ class FileSystemSource(DataSource): stix_id (str): The STIX ID of the STIX objects to be retrieved. _composite_filters (set): set of filters passed from the parent CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -197,9 +206,9 @@ class FileSystemSource(DataSource): a python STIX objects and then returned """ - return [self.get(stix_id=stix_id, allow_custom=allow_custom, version=version, _composite_filters=_composite_filters)] + return [self.get(stix_id=stix_id, version=version, _composite_filters=_composite_filters)] - def query(self, query=None, allow_custom=False, version=None, _composite_filters=None): + def query(self, query=None, version=None, _composite_filters=None): """Search and retrieve STIX objects based on the complete query. A "complete query" includes the filters from the query, the filters @@ -210,8 +219,6 @@ class FileSystemSource(DataSource): query (list): list of filters to search on _composite_filters (set): set of filters passed from the CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -221,6 +228,7 @@ class FileSystemSource(DataSource): parsed into a python STIX objects and then returned. """ + all_data = [] if query is None: @@ -304,7 +312,7 @@ class FileSystemSource(DataSource): all_data = deduplicate(all_data) # parse python STIX objects from the STIX object dicts - stix_objs = [parse(stix_obj_dict, allow_custom=allow_custom, version=version) for stix_obj_dict in all_data] + stix_objs = [parse(stix_obj_dict, allow_custom=self.allow_custom, version=version) for stix_obj_dict in all_data] return stix_objs diff --git a/stix2/sources/memory.py b/stix2/sources/memory.py index 308d0d0..5d08d7c 100644 --- a/stix2/sources/memory.py +++ b/stix2/sources/memory.py @@ -1,9 +1,6 @@ """ Python STIX 2.0 Memory Source/Sink -TODO: - Run through tests again, lot of changes. - TODO: Use deduplicate() calls only when memory corpus is dirty (been added to) can save a lot of time for successive queries @@ -24,7 +21,7 @@ from stix2.sources import DataSink, DataSource, DataStore from stix2.sources.filters import Filter, apply_common_filters -def _add(store, stix_data=None, allow_custom=False, version=None): +def _add(store, stix_data=None, version=None): """Add STIX objects to MemoryStore/Sink. Adds STIX objects to an in-memory dictionary for fast lookup. @@ -32,8 +29,6 @@ def _add(store, stix_data=None, allow_custom=False, version=None): Args: stix_data (list OR dict OR STIX object): STIX objects to be added - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -46,28 +41,19 @@ def _add(store, stix_data=None, allow_custom=False, version=None): if stix_data["type"] == "bundle": # adding a json bundle - so just grab STIX objects for stix_obj in stix_data.get("objects", []): - _add(store, stix_obj, allow_custom=allow_custom, version=version) + _add(store, stix_obj, version=version) else: # adding a json STIX object store._data[stix_data["id"]] = stix_data - elif isinstance(stix_data, str): - # adding json encoded string of STIX content - stix_data = parse(stix_data, allow_custom=allow_custom, version=version) - if stix_data["type"] == "bundle": - # recurse on each STIX object in bundle - for stix_obj in stix_data.get("objects", []): - _add(store, stix_obj, allow_custom=allow_custom, version=version) - else: - _add(store, stix_data, allow_custom=allow_custom, version=version) - elif isinstance(stix_data, list): # STIX objects are in a list- recurse on each object for stix_obj in stix_data: - _add(store, stix_obj, allow_custom=allow_custom, version=version) + _add(store, stix_obj, version=version) else: - raise TypeError("stix_data must be a STIX object (or list of), JSON formatted STIX (or list of), or a JSON formatted STIX bundle") + raise TypeError("stix_data expected to be a python-stix2 object (or list of), JSON formatted STIX (or list of)," + " or a JSON formatted STIX bundle. stix_data was of type: " + str(type(stix_data))) class MemoryStore(DataStore): @@ -81,8 +67,9 @@ class MemoryStore(DataStore): Args: stix_data (list OR dict OR STIX object): STIX content to be added - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. + allow_custom (bool): whether to allow custom STIX content. + Only applied when export/input functions called, i.e. + load_from_file() and save_to_file(). Defaults to True. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -92,11 +79,11 @@ class MemoryStore(DataStore): sink (MemorySink): MemorySink """ - def __init__(self, stix_data=None, allow_custom=False, version=None): + def __init__(self, stix_data=None, allow_custom=True, version=None): self._data = {} if stix_data: - _add(self, stix_data, allow_custom=allow_custom, version=version) + _add(self, stix_data, version=version) super(MemoryStore, self).__init__( source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True), @@ -109,8 +96,6 @@ class MemoryStore(DataStore): Args: file_path (str): file path to write STIX data to - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. """ return self.sink.save_to_file(*args, **kwargs) @@ -123,8 +108,6 @@ class MemoryStore(DataStore): Args: file_path (str): file path to load STIX data from - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -141,37 +124,39 @@ class MemorySink(DataSink): Args: stix_data (dict OR list): valid STIX 2.0 content in bundle or a list. - _store (bool): if the MemorySink is a part of a DataStore, + _store (bool): whether the MemorySink is a part of a DataStore, in which case "stix_data" is a direct reference to shared memory with DataSource. Not user supplied - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. + allow_custom (bool): whether to allow custom objects/properties + when exporting STIX content to file. + Default: True. Attributes: _data (dict): the in-memory dict that holds STIX objects. - If apart of a MemoryStore, dict is shared between with - a MemorySource + If part of a MemoryStore, the dict is shared with a MemorySource """ - def __init__(self, stix_data=None, allow_custom=False, version=None, _store=False): + def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False): super(MemorySink, self).__init__() self._data = {} + self.allow_custom = allow_custom if _store: self._data = stix_data elif stix_data: - _add(self, stix_data, allow_custom=allow_custom, version=version) + _add(self, stix_data, version=version) - def add(self, stix_data, allow_custom=False, version=None): - _add(self, stix_data, allow_custom=allow_custom, version=version) + def add(self, stix_data, version=None): + _add(self, stix_data, version=version) add.__doc__ = _add.__doc__ - def save_to_file(self, file_path, allow_custom=False): + def save_to_file(self, file_path): file_path = os.path.abspath(file_path) + if not os.path.exists(os.path.dirname(file_path)): os.makedirs(os.path.dirname(file_path)) with open(file_path, "w") as f: - f.write(str(Bundle(list(self._data.values()), allow_custom=allow_custom))) + f.write(str(Bundle(list(self._data.values()), allow_custom=self.allow_custom))) save_to_file.__doc__ = MemoryStore.save_to_file.__doc__ @@ -188,23 +173,24 @@ class MemorySource(DataSource): _store (bool): if the MemorySource is a part of a DataStore, in which case "stix_data" is a direct reference to shared memory with DataSink. Not user supplied - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. + allow_custom (bool): whether to allow custom objects/properties + when importing STIX content from file. + Default: True. Attributes: _data (dict): the in-memory dict that holds STIX objects. - If apart of a MemoryStore, dict is shared between with - a MemorySink + If part of a MemoryStore, the dict is shared with a MemorySink """ - def __init__(self, stix_data=None, allow_custom=False, version=None, _store=False): + def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False): super(MemorySource, self).__init__() self._data = {} + self.allow_custom = allow_custom if _store: self._data = stix_data elif stix_data: - _add(self, stix_data, allow_custom=allow_custom, version=version) + _add(self, stix_data, version=version) def get(self, stix_id, _composite_filters=None): """Retrieve STIX object from in-memory dict via STIX ID. @@ -260,6 +246,7 @@ class MemorySource(DataSource): is returned in the same form as it as added """ + return [self.get(stix_id=stix_id, _composite_filters=_composite_filters)] def query(self, query=None, _composite_filters=None): @@ -301,8 +288,12 @@ class MemorySource(DataSource): return all_data - def load_from_file(self, file_path, allow_custom=False, version=None): - file_path = os.path.abspath(file_path) - stix_data = json.load(open(file_path, "r")) - _add(self, stix_data, allow_custom=allow_custom, version=version) + def load_from_file(self, file_path, version=None): + stix_data = json.load(open(os.path.abspath(file_path), "r")) + + if stix_data["type"] == "bundle": + for stix_obj in stix_data["objects"]: + _add(self, stix_data=parse(stix_obj, allow_custom=self.allow_custom, version=stix_data["spec_version"])) + else: + _add(self, stix_data=parse(stix_obj, allow_custom=self.allow_custom, version=version)) load_from_file.__doc__ = MemoryStore.load_from_file.__doc__ diff --git a/stix2/sources/taxii.py b/stix2/sources/taxii.py index 8eb5069..2d54725 100644 --- a/stix2/sources/taxii.py +++ b/stix2/sources/taxii.py @@ -1,6 +1,7 @@ """ Python STIX 2.x TAXIICollectionStore """ +from requests.exceptions import HTTPError from stix2.base import _STIXBase from stix2.core import Bundle, parse @@ -18,11 +19,23 @@ class TAXIICollectionStore(DataStore): Args: collection (taxii2.Collection): TAXII Collection instance + allow_custom (bool): whether to allow custom STIX content to be + pushed/retrieved. Defaults to True for TAXIICollectionSource + side(retrieving data) and False for TAXIICollectionSink + side(pushing data). However, when parameter is supplied, it will + be applied to both TAXIICollectionSource/Sink. + """ - def __init__(self, collection): + def __init__(self, collection, allow_custom=None): + if allow_custom is None: + allow_custom_source = True + allow_custom_sink = False + else: + allow_custom_sink = allow_custom_source = allow_custom + super(TAXIICollectionStore, self).__init__( - source=TAXIICollectionSource(collection), - sink=TAXIICollectionSink(collection) + source=TAXIICollectionSource(collection, allow_custom=allow_custom_source), + sink=TAXIICollectionSink(collection, allow_custom=allow_custom_sink) ) @@ -32,48 +45,49 @@ class TAXIICollectionSink(DataSink): Args: collection (taxii2.Collection): TAXII2 Collection instance + allow_custom (bool): Whether to allow custom STIX content to be + added to the TAXIICollectionSink. Default: False """ - def __init__(self, collection): + def __init__(self, collection, allow_custom=False): super(TAXIICollectionSink, self).__init__() self.collection = collection + self.allow_custom = allow_custom - def add(self, stix_data, allow_custom=False, version=None): + def add(self, stix_data, version=None): """Add/push STIX content to TAXII Collection endpoint Args: stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content in a STIX object (or Bundle), STIX onject dict (or Bundle dict), or a STIX 2.0 json encoded string, or list of any of the following - allow_custom (bool): whether to allow custom objects/properties or - not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. """ if isinstance(stix_data, _STIXBase): # adding python STIX object - bundle = dict(Bundle(stix_data, allow_custom=allow_custom)) + bundle = dict(Bundle(stix_data, allow_custom=self.allow_custom)) elif isinstance(stix_data, dict): # adding python dict (of either Bundle or STIX obj) if stix_data["type"] == "bundle": bundle = stix_data else: - bundle = dict(Bundle(stix_data, allow_custom=allow_custom)) + bundle = dict(Bundle(stix_data, allow_custom=self.allow_custom)) elif isinstance(stix_data, list): # adding list of something - recurse on each for obj in stix_data: - self.add(obj, allow_custom=allow_custom, version=version) + self.add(obj, version=version) elif isinstance(stix_data, str): # adding json encoded string of STIX content - stix_data = parse(stix_data, allow_custom=allow_custom, version=version) + stix_data = parse(stix_data, allow_custom=self.allow_custom, version=version) if stix_data["type"] == "bundle": bundle = dict(stix_data) else: - bundle = dict(Bundle(stix_data, allow_custom=allow_custom)) + bundle = dict(Bundle(stix_data, allow_custom=self.allow_custom)) else: raise TypeError("stix_data must be as STIX object(or list of),json formatted STIX (or list of), or a json formatted STIX bundle") @@ -87,13 +101,16 @@ class TAXIICollectionSource(DataSource): Args: collection (taxii2.Collection): TAXII Collection instance + allow_custom (bool): Whether to allow custom STIX content to be + added to the FileSystemSink. Default: True """ - def __init__(self, collection): + def __init__(self, collection, allow_custom=True): super(TAXIICollectionSource, self).__init__() self.collection = collection + self.allow_custom = allow_custom - def get(self, stix_id, allow_custom=False, version=None, _composite_filters=None): + def get(self, stix_id, version=None, _composite_filters=None): """Retrieve STIX object from local/remote STIX Collection endpoint. @@ -101,8 +118,6 @@ class TAXIICollectionSource(DataSource): stix_id (str): The STIX ID of the STIX object to be retrieved. _composite_filters (set): set of filters passed from the parent CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -121,12 +136,16 @@ class TAXIICollectionSource(DataSource): # dont extract TAXII filters from query (to send to TAXII endpoint) # as directly retrieveing a STIX object by ID - stix_objs = self.collection.get_object(stix_id)["objects"] + try: + stix_objs = self.collection.get_object(stix_id)["objects"] + stix_obj = list(apply_common_filters(stix_objs, query)) - stix_obj = list(apply_common_filters(stix_objs, query)) + except HTTPError: + # if resource not found or access is denied from TAXII server, return None + stix_obj = [] if len(stix_obj): - stix_obj = parse(stix_obj[0], allow_custom=allow_custom, version=version) + stix_obj = parse(stix_obj[0], allow_custom=self.allow_custom, version=version) if stix_obj.id != stix_id: # check - was added to handle erroneous TAXII servers stix_obj = None @@ -135,7 +154,7 @@ class TAXIICollectionSource(DataSource): return stix_obj - def all_versions(self, stix_id, allow_custom=False, version=None, _composite_filters=None): + def all_versions(self, stix_id, version=None, _composite_filters=None): """Retrieve STIX object from local/remote TAXII Collection endpoint, all versions of it @@ -143,8 +162,6 @@ class TAXIICollectionSource(DataSource): stix_id (str): The STIX ID of the STIX objects to be retrieved. _composite_filters (set): set of filters passed from the parent CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -158,17 +175,17 @@ class TAXIICollectionSource(DataSource): Filter("match[version]", "=", "all") ] - all_data = self.query(query=query, allow_custom=allow_custom, _composite_filters=_composite_filters) + all_data = self.query(query=query, _composite_filters=_composite_filters) # parse STIX objects from TAXII returned json - all_data = [parse(stix_obj, allow_custom=allow_custom, version=version) for stix_obj in all_data] + all_data = [parse(stix_obj, allow_custom=self.allow_custom, version=version) for stix_obj in all_data] # check - was added to handle erroneous TAXII servers all_data_clean = [stix_obj for stix_obj in all_data if stix_obj.id == stix_id] return all_data_clean - def query(self, query=None, allow_custom=False, version=None, _composite_filters=None): + def query(self, query=None, version=None, _composite_filters=None): """Search and retreive STIX objects based on the complete query A "complete query" includes the filters from the query, the filters @@ -179,8 +196,6 @@ class TAXIICollectionSource(DataSource): query (list): list of filters to search on _composite_filters (set): set of filters passed from the CompositeDataSource, not user supplied - allow_custom (bool): whether to retrieve custom objects/properties - or not. Default: False. version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, use latest version. @@ -209,16 +224,21 @@ class TAXIICollectionSource(DataSource): taxii_filters = self._parse_taxii_filters(query) # query TAXII collection - all_data = self.collection.get_objects(filters=taxii_filters)["objects"] + try: + all_data = self.collection.get_objects(filters=taxii_filters)["objects"] + + # deduplicate data (before filtering as reduces wasted filtering) + all_data = deduplicate(all_data) - # deduplicate data (before filtering as reduces wasted filtering) - all_data = deduplicate(all_data) + # apply local (CompositeDataSource, TAXIICollectionSource and query filters) + all_data = list(apply_common_filters(all_data, query)) - # apply local (CompositeDataSource, TAXIICollectionSource and query filters) - all_data = list(apply_common_filters(all_data, query)) + except HTTPError: + # if resources not found or access is denied from TAXII server, return empty list + all_data = [] # parse python STIX objects from the STIX object dicts - stix_objs = [parse(stix_obj_dict, allow_custom=allow_custom, version=version) for stix_obj_dict in all_data] + stix_objs = [parse(stix_obj_dict, allow_custom=self.allow_custom, version=version) for stix_obj_dict in all_data] return stix_objs diff --git a/stix2/utils.py b/stix2/utils.py index 541e6d8..73337d0 100644 --- a/stix2/utils.py +++ b/stix2/utils.py @@ -1,5 +1,4 @@ """Utility functions and classes for the stix2 library.""" - from collections import Mapping import copy import datetime as dt @@ -16,6 +15,9 @@ from .exceptions import (InvalidValueError, RevokeError, # timestamps in a single object, the timestamps will vary by a few microseconds. NOW = object() +# STIX object properties that cannot be modified +STIX_UNMOD_PROPERTIES = ["created", "created_by_ref", "id", "type"] + class STIXdatetime(dt.datetime): def __new__(cls, *args, **kwargs): @@ -215,7 +217,7 @@ def new_version(data, **kwargs): properties_to_change = kwargs.keys() # Make sure certain properties aren't trying to change - for prop in ["created", "created_by_ref", "id", "type"]: + for prop in STIX_UNMOD_PROPERTIES: if prop in properties_to_change: unchangable_properties.append(prop) if unchangable_properties: @@ -227,8 +229,11 @@ def new_version(data, **kwargs): elif 'modified' in data: old_modified_property = parse_into_datetime(data.get('modified'), precision='millisecond') new_modified_property = parse_into_datetime(kwargs['modified'], precision='millisecond') - if new_modified_property < old_modified_property: - raise InvalidValueError(cls, 'modified', "The new modified datetime cannot be before the current modified datatime.") + if new_modified_property <= old_modified_property: + raise InvalidValueError(cls, 'modified', + "The new modified datetime cannot be before than or equal to the current modified datetime." + "It cannot be equal, as according to STIX 2 specification, objects that are different " + "but have the same id and modified timestamp do not have defined consumer behavior.") new_obj_inner.update(kwargs) # Exclude properties with a value of 'None' in case data is not an instance of a _STIXBase subclass return cls(**{k: v for k, v in new_obj_inner.items() if v is not None}) @@ -257,5 +262,68 @@ def get_class_hierarchy_names(obj): return names +def remove_custom_stix(stix_obj): + """remove any custom STIX objects or properties + + Warning: This function is a best effort utility, in that + it will remove custom objects and properties based on the + type names; i.e. if "x-" prefixes object types, and "x_" + prefixes property types. According to the STIX2 spec, + those naming conventions are a SHOULDs not MUSTs, meaning + that valid custom STIX content may ignore those conventions + and in effect render this utility function invalid when used + on that STIX content. + + Args: + stix_obj (dict OR python-stix obj): a single python-stix object + or dict of a STIX object + + Returns: + A new version of the object with any custom content removed + """ + + if stix_obj["type"].startswith("x-"): + # if entire object is custom, discard + return None + + custom_props = [] + for prop in stix_obj.items(): + if prop[0].startswith("x_"): + # for every custom property, record it and set value to None + # (so we can pass it to new_version() and it will be dropped) + custom_props.append((prop[0], None)) + + if custom_props: + # obtain set of object properties that can be transferred + # to a new object version. This is 1)custom props with their + # values set to None, and 2)any properties left that are not + # unmodifiable STIX properties or the "modified" property + + # set of properties that are not supplied to new_version() + # to be used for updating properties. This includes unmodifiable + # properties (properties that new_version() just re-uses from the + # existing STIX object) and the "modified" property. We dont supply the + # "modified" property so that new_version() creates a new datetime + # value for this property + non_supplied_props = STIX_UNMOD_PROPERTIES + ["modified"] + + props = [(prop, stix_obj[prop]) for prop in stix_obj if prop not in non_supplied_props] + + # add to set the custom properties we want to get rid of (with their value=None) + props.extend(custom_props) + + new_obj = new_version(stix_obj, **(dict(props))) + + while parse_into_datetime(new_obj["modified"]) == parse_into_datetime(stix_obj["modified"]): + # Prevents bug when fast computation allows multiple STIX object + # versions to be created in single unit of time + new_obj = new_version(stix_obj, **(dict(props))) + + return new_obj + + else: + return stix_obj + + def get_type_from_id(stix_id): return stix_id.split('--', 1)[0]
Documentation: CompositeDataSource Examples seems wrong First the variable ta probably should be something different, since it suggests a Threat Actor. The 'output' seems to be two indicators - neither one of which has the id 'indicator--37a6a5de-a5b9-425a-903a-4ae9cbf1ff3f get an object that is only in the filesystem ta = cs.get('intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a') print(ta) get an object that is only in the TAXII collection ind = cs.get('indicator--37a6a5de-a5b9-425a-903a-4ae9cbf1ff3f') print(ind)
oasis-open/cti-python-stix2
diff --git a/stix2/test/test_filesystem.py b/stix2/test/test_filesystem.py index 68fc185..020fee5 100644 --- a/stix2/test/test_filesystem.py +++ b/stix2/test/test_filesystem.py @@ -364,7 +364,7 @@ def test_filesystem_object_with_custom_property(fs_store): fs_store.add(camp, True) - camp_r = fs_store.get(camp.id, allow_custom=True) + camp_r = fs_store.get(camp.id) assert camp_r.id == camp.id assert camp_r.x_empire == camp.x_empire @@ -376,9 +376,9 @@ def test_filesystem_object_with_custom_property_in_bundle(fs_store): allow_custom=True) bundle = Bundle(camp, allow_custom=True) - fs_store.add(bundle, allow_custom=True) + fs_store.add(bundle) - camp_r = fs_store.get(camp.id, allow_custom=True) + camp_r = fs_store.get(camp.id) assert camp_r.id == camp.id assert camp_r.x_empire == camp.x_empire @@ -391,9 +391,9 @@ def test_filesystem_custom_object(fs_store): pass newobj = NewObj(property1='something') - fs_store.add(newobj, allow_custom=True) + fs_store.add(newobj) - newobj_r = fs_store.get(newobj.id, allow_custom=True) + newobj_r = fs_store.get(newobj.id) assert newobj_r.id == newobj.id assert newobj_r.property1 == 'something' diff --git a/stix2/test/test_memory.py b/stix2/test/test_memory.py index a7d88a8..ad78611 100644 --- a/stix2/test/test_memory.py +++ b/stix2/test/test_memory.py @@ -203,63 +203,12 @@ def test_memory_store_save_load_file(mem_store): shutil.rmtree(os.path.dirname(filename)) -def test_memory_store_add_stix_object_str(mem_store): - # add stix object string - camp_id = "campaign--111111b6-1112-4fb0-111b-b111107ca70a" - camp_name = "Aurelius" - camp_alias = "Purple Robes" - camp = """{ - "name": "%s", - "type": "campaign", - "objective": "German and French Intelligence Services", - "aliases": ["%s"], - "id": "%s", - "created": "2017-05-31T21:31:53.197755Z" - }""" % (camp_name, camp_alias, camp_id) - - mem_store.add(camp) - - camp_r = mem_store.get(camp_id) - assert camp_r["id"] == camp_id - assert camp_r["name"] == camp_name - assert camp_alias in camp_r["aliases"] - - -def test_memory_store_add_stix_bundle_str(mem_store): - # add stix bundle string - camp_id = "campaign--133111b6-1112-4fb0-111b-b111107ca70a" - camp_name = "Atilla" - camp_alias = "Huns" - bund = """{ - "type": "bundle", - "id": "bundle--112211b6-1112-4fb0-111b-b111107ca70a", - "spec_version": "2.0", - "objects": [ - { - "name": "%s", - "type": "campaign", - "objective": "Bulgarian, Albanian and Romanian Intelligence Services", - "aliases": ["%s"], - "id": "%s", - "created": "2017-05-31T21:31:53.197755Z" - } - ] - }""" % (camp_name, camp_alias, camp_id) - - mem_store.add(bund) - - camp_r = mem_store.get(camp_id) - assert camp_r["id"] == camp_id - assert camp_r["name"] == camp_name - assert camp_alias in camp_r["aliases"] - - def test_memory_store_add_invalid_object(mem_store): ind = ('indicator', IND1) # tuple isn't valid with pytest.raises(TypeError) as excinfo: mem_store.add(ind) - assert 'stix_data must be' in str(excinfo.value) - assert 'a STIX object' in str(excinfo.value) + assert 'stix_data expected to be' in str(excinfo.value) + assert 'a python-stix2 object' in str(excinfo.value) assert 'JSON formatted STIX' in str(excinfo.value) assert 'JSON formatted STIX bundle' in str(excinfo.value) diff --git a/stix2/test/test_versioning.py b/stix2/test/test_versioning.py index 8695a30..233587e 100644 --- a/stix2/test/test_versioning.py +++ b/stix2/test/test_versioning.py @@ -88,11 +88,15 @@ def test_versioning_error_bad_modified_value(): assert excinfo.value.cls == stix2.Campaign assert excinfo.value.prop_name == "modified" - assert excinfo.value.reason == "The new modified datetime cannot be before the current modified datatime." + assert excinfo.value.reason == "The new modified datetime cannot be before than or equal to the current modified datetime." \ + "It cannot be equal, as according to STIX 2 specification, objects that are different " \ + "but have the same id and modified timestamp do not have defined consumer behavior." msg = "Invalid value for {0} '{1}': {2}" msg = msg.format(stix2.Campaign.__name__, "modified", - "The new modified datetime cannot be before the current modified datatime.") + "The new modified datetime cannot be before than or equal to the current modified datetime." + "It cannot be equal, as according to STIX 2 specification, objects that are different " + "but have the same id and modified timestamp do not have defined consumer behavior.") assert str(excinfo.value) == msg @@ -153,7 +157,9 @@ def test_versioning_error_dict_bad_modified_value(): assert excinfo.value.cls == dict assert excinfo.value.prop_name == "modified" - assert excinfo.value.reason == "The new modified datetime cannot be before the current modified datatime." + assert excinfo.value.reason == "The new modified datetime cannot be before than or equal to the current modified datetime." \ + "It cannot be equal, as according to STIX 2 specification, objects that are different " \ + "but have the same id and modified timestamp do not have defined consumer behavior." def test_versioning_error_dict_no_modified_value(): @@ -206,3 +212,33 @@ def test_revoke_invalid_cls(): stix2.utils.revoke(campaign_v1) assert 'cannot revoke object of this type' in str(excinfo.value) + + +def test_remove_custom_stix_property(): + mal = stix2.Malware(name="ColePowers", + labels=["rootkit"], + x_custom="armada", + allow_custom=True) + + mal_nc = stix2.utils.remove_custom_stix(mal) + + assert "x_custom" not in mal_nc + assert stix2.utils.parse_into_datetime(mal["modified"], precision="millisecond") < stix2.utils.parse_into_datetime(mal_nc["modified"], + precision="millisecond") + + +def test_remove_custom_stix_object(): + @stix2.CustomObject("x-animal", [ + ("species", stix2.properties.StringProperty(required=True)), + ("animal_class", stix2.properties.StringProperty()), + ]) + class Animal(object): + def __init__(self, animal_class=None, **kwargs): + if animal_class and animal_class not in ["mammal", "bird"]: + raise ValueError("Not a recognized class of animal") + + animal = Animal(species="lion", animal_class="mammal") + + nc = stix2.utils.remove_custom_stix(animal) + + assert nc is None
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 8 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 antlr4-python3-runtime==4.9.3 async-generator==1.10 attrs==22.2.0 Babel==2.11.0 bleach==4.1.0 bump2version==1.0.1 bumpversion==0.6.0 certifi==2021.5.30 cfgv==3.3.1 charset-normalizer==2.0.12 coverage==6.2 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.9 docutils==0.18.1 entrypoints==0.4 filelock==3.4.1 identify==2.4.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 ipython-genutils==0.2.0 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nbsphinx==0.8.8 nest-asyncio==1.6.0 nodeenv==1.6.0 packaging==21.3 pandocfilters==1.5.1 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 pyzmq==25.1.2 requests==2.27.1 simplejson==3.20.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-prompt==1.5.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -e git+https://github.com/oasis-open/cti-python-stix2.git@c682838239fce1abd3b3e471fb3c601de8663501#egg=stix2 stix2-patterns==2.0.0 taxii2-client==2.3.0 testpath==0.6.0 toml==0.10.2 tomli==1.2.3 tornado==6.1 tox==3.28.0 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 webencodings==0.5.1 zipp==3.6.0
name: cti-python-stix2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - antlr4-python3-runtime==4.9.3 - async-generator==1.10 - attrs==22.2.0 - babel==2.11.0 - bleach==4.1.0 - bump2version==1.0.1 - bumpversion==0.6.0 - cfgv==3.3.1 - charset-normalizer==2.0.12 - coverage==6.2 - decorator==5.1.1 - defusedxml==0.7.1 - distlib==0.3.9 - docutils==0.18.1 - entrypoints==0.4 - filelock==3.4.1 - identify==2.4.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - ipython-genutils==0.2.0 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbsphinx==0.8.8 - nest-asyncio==1.6.0 - nodeenv==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - pyzmq==25.1.2 - requests==2.27.1 - simplejson==3.20.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-prompt==1.5.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - stix2-patterns==2.0.0 - taxii2-client==2.3.0 - testpath==0.6.0 - toml==0.10.2 - tomli==1.2.3 - tornado==6.1 - tox==3.28.0 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/cti-python-stix2
[ "stix2/test/test_filesystem.py::test_filesystem_object_with_custom_property", "stix2/test/test_filesystem.py::test_filesystem_object_with_custom_property_in_bundle", "stix2/test/test_memory.py::test_memory_store_add_invalid_object", "stix2/test/test_versioning.py::test_versioning_error_bad_modified_value", "stix2/test/test_versioning.py::test_versioning_error_dict_bad_modified_value", "stix2/test/test_versioning.py::test_remove_custom_stix_property", "stix2/test/test_versioning.py::test_remove_custom_stix_object" ]
[]
[ "stix2/test/test_filesystem.py::test_filesystem_source_nonexistent_folder", "stix2/test/test_filesystem.py::test_filesystem_sink_nonexistent_folder", "stix2/test/test_filesystem.py::test_filesytem_source_get_object", "stix2/test/test_filesystem.py::test_filesytem_source_get_nonexistent_object", "stix2/test/test_filesystem.py::test_filesytem_source_all_versions", "stix2/test/test_filesystem.py::test_filesytem_source_query_single", "stix2/test/test_filesystem.py::test_filesytem_source_query_multiple", "stix2/test/test_filesystem.py::test_filesystem_sink_add_python_stix_object", "stix2/test/test_filesystem.py::test_filesystem_sink_add_stix_object_dict", "stix2/test/test_filesystem.py::test_filesystem_sink_add_stix_bundle_dict", "stix2/test/test_filesystem.py::test_filesystem_sink_add_json_stix_object", "stix2/test/test_filesystem.py::test_filesystem_sink_json_stix_bundle", "stix2/test/test_filesystem.py::test_filesystem_sink_add_objects_list", "stix2/test/test_filesystem.py::test_filesystem_store_get_stored_as_bundle", "stix2/test/test_filesystem.py::test_filesystem_store_get_stored_as_object", "stix2/test/test_filesystem.py::test_filesystem_store_all_versions", "stix2/test/test_filesystem.py::test_filesystem_store_query", "stix2/test/test_filesystem.py::test_filesystem_store_query_single_filter", "stix2/test/test_filesystem.py::test_filesystem_store_empty_query", "stix2/test/test_filesystem.py::test_filesystem_store_query_multiple_filters", "stix2/test/test_filesystem.py::test_filesystem_store_query_dont_include_type_folder", "stix2/test/test_filesystem.py::test_filesystem_store_add", "stix2/test/test_filesystem.py::test_filesystem_store_add_as_bundle", "stix2/test/test_filesystem.py::test_filesystem_add_bundle_object", "stix2/test/test_filesystem.py::test_filesystem_store_add_invalid_object", "stix2/test/test_filesystem.py::test_filesystem_custom_object", "stix2/test/test_filesystem.py::test_relationships", "stix2/test/test_filesystem.py::test_relationships_by_type", "stix2/test/test_filesystem.py::test_relationships_by_source", "stix2/test/test_filesystem.py::test_relationships_by_target", "stix2/test/test_filesystem.py::test_relationships_by_target_and_type", "stix2/test/test_filesystem.py::test_relationships_by_target_and_source", "stix2/test/test_filesystem.py::test_related_to", "stix2/test/test_filesystem.py::test_related_to_by_source", "stix2/test/test_filesystem.py::test_related_to_by_target", "stix2/test/test_memory.py::test_memory_source_get", "stix2/test/test_memory.py::test_memory_source_get_nonexistant_object", "stix2/test/test_memory.py::test_memory_store_all_versions", "stix2/test/test_memory.py::test_memory_store_query", "stix2/test/test_memory.py::test_memory_store_query_single_filter", "stix2/test/test_memory.py::test_memory_store_query_empty_query", "stix2/test/test_memory.py::test_memory_store_query_multiple_filters", "stix2/test/test_memory.py::test_memory_store_save_load_file", "stix2/test/test_memory.py::test_memory_store_object_with_custom_property", "stix2/test/test_memory.py::test_memory_store_object_with_custom_property_in_bundle", "stix2/test/test_memory.py::test_memory_store_custom_object", "stix2/test/test_memory.py::test_relationships", "stix2/test/test_memory.py::test_relationships_by_type", "stix2/test/test_memory.py::test_relationships_by_source", "stix2/test/test_memory.py::test_relationships_by_target", "stix2/test/test_memory.py::test_relationships_by_target_and_type", "stix2/test/test_memory.py::test_relationships_by_target_and_source", "stix2/test/test_memory.py::test_related_to", "stix2/test/test_memory.py::test_related_to_by_source", "stix2/test/test_memory.py::test_related_to_by_target", "stix2/test/test_versioning.py::test_making_new_version", "stix2/test/test_versioning.py::test_making_new_version_with_unset", "stix2/test/test_versioning.py::test_making_new_version_with_embedded_object", "stix2/test/test_versioning.py::test_revoke", "stix2/test/test_versioning.py::test_versioning_error_invalid_property", "stix2/test/test_versioning.py::test_versioning_error_usetting_required_property", "stix2/test/test_versioning.py::test_versioning_error_new_version_of_revoked", "stix2/test/test_versioning.py::test_versioning_error_revoke_of_revoked", "stix2/test/test_versioning.py::test_making_new_version_dict", "stix2/test/test_versioning.py::test_versioning_error_dict_no_modified_value", "stix2/test/test_versioning.py::test_making_new_version_invalid_cls", "stix2/test/test_versioning.py::test_revoke_dict", "stix2/test/test_versioning.py::test_versioning_error_revoke_of_revoked_dict", "stix2/test/test_versioning.py::test_revoke_invalid_cls" ]
[]
BSD 3-Clause "New" or "Revised" License
2,220
[ "docs/guide/memory.ipynb", "docs/guide/filesystem.ipynb", "stix2/sources/taxii.py", "docs/guide/taxii.ipynb", "stix2/sources/memory.py", "docs/guide/datastore.ipynb", "stix2/sources/filesystem.py", "stix2/utils.py" ]
[ "docs/guide/memory.ipynb", "docs/guide/filesystem.ipynb", "stix2/sources/taxii.py", "docs/guide/taxii.ipynb", "stix2/sources/memory.py", "docs/guide/datastore.ipynb", "stix2/sources/filesystem.py", "stix2/utils.py" ]
pika__pika-980
7f81bc721807c5416726016838398804c07effbd
2018-02-26 22:59:43
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
vitaly-krugl: Ready for review
diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index b1fb7dc..be1db63 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -542,22 +542,24 @@ class BlockingConnection(object): self.add_timeout(0, user_callback) - def _on_connection_blocked(self, user_callback, method_frame): + def _on_connection_blocked(self, user_callback, _impl, method_frame): """Handle Connection.Blocked notification from RabbitMQ broker :param callable user_callback: callback passed to `add_on_connection_blocked_callback` + :param SelectConnection _impl: :param pika.frame.Method method_frame: method frame having `method` member of type `pika.spec.Connection.Blocked` """ self._ready_events.append( _ConnectionBlockedEvt(user_callback, method_frame)) - def _on_connection_unblocked(self, user_callback, method_frame): + def _on_connection_unblocked(self, user_callback, _impl, method_frame): """Handle Connection.Unblocked notification from RabbitMQ broker :param callable user_callback: callback passed to `add_on_connection_unblocked_callback` + :param SelectConnection _impl: :param pika.frame.Method method_frame: method frame having `method` member of type `pika.spec.Connection.Blocked` """ @@ -595,13 +597,14 @@ class BlockingConnection(object): See also `ConnectionParameters.blocked_connection_timeout`. :param method callback: Callback to call on `Connection.Blocked`, - having the signature `callback(pika.frame.Method)`, where the - method frame's `method` member is of type - `pika.spec.Connection.Blocked` + having the signature `callback(connection, pika.frame.Method)`, + where connection is the `BlockingConnection` instance and the method + frame's `method` member is of type `pika.spec.Connection.Blocked` """ self._impl.add_on_connection_blocked_callback( - functools.partial(self._on_connection_blocked, callback)) + functools.partial(self._on_connection_blocked, + functools.partial(callback, self))) def add_on_connection_unblocked_callback(self, callback): """Add a callback to be notified when RabbitMQ has sent a @@ -609,14 +612,15 @@ class BlockingConnection(object): to start publishing again. The callback will be passed the `Connection.Unblocked` method frame. - :param method callback: Callback to call on - `Connection.Unblocked`, having the signature - `callback(pika.frame.Method)`, where the method frame's - `method` member is of type `pika.spec.Connection.Unblocked` + :param method callback: Callback to call on Connection.Unblocked`, + having the signature `callback(connection, pika.frame.Method)`, + where connection is the `BlockingConnection` instance and the method + frame's `method` member is of type `pika.spec.Connection.Unblocked` """ self._impl.add_on_connection_unblocked_callback( - functools.partial(self._on_connection_unblocked, callback)) + functools.partial(self._on_connection_unblocked, + functools.partial(callback, self))) def add_timeout(self, deadline, callback): """Create a single-shot timer to fire after deadline seconds. Do not diff --git a/pika/connection.py b/pika/connection.py index c784cc2..25ec506 100644 --- a/pika/connection.py +++ b/pika/connection.py @@ -6,6 +6,7 @@ import ast import sys import collections import copy +import functools import logging import math import numbers @@ -1018,11 +1019,9 @@ class Connection(object): # pylint: disable=R0201 ON_CONNECTION_BACKPRESSURE = '_on_connection_backpressure' - ON_CONNECTION_BLOCKED = '_on_connection_blocked' ON_CONNECTION_CLOSED = '_on_connection_closed' ON_CONNECTION_ERROR = '_on_connection_error' ON_CONNECTION_OPEN = '_on_connection_open' - ON_CONNECTION_UNBLOCKED = '_on_connection_unblocked' CONNECTION_CLOSED = 0 CONNECTION_INIT = 1 CONNECTION_PROTOCOL = 2 @@ -1146,19 +1145,21 @@ class Connection(object): instead of relying on back pressure throttling. The callback will be passed the ``Connection.Blocked`` method frame. - TODO Also pass the connection as the callback's first arg - See also `ConnectionParameters.blocked_connection_timeout`. :param method callback: Callback to call on `Connection.Blocked`, - having the signature `callback(pika.frame.Method)`, where the - method frame's `method` member is of type + having the signature `callback(connection, pika.frame.Method)`, + where the method frame's `method` member is of type `pika.spec.Connection.Blocked` """ if not callable(callback): raise TypeError('callback should be a function or method.') - self.callbacks.add(0, spec.Connection.Blocked, callback, False) + + self.callbacks.add(0, + spec.Connection.Blocked, + functools.partial(callback, self), + one_shot=False) def add_on_connection_unblocked_callback(self, callback): """Add a callback to be notified when RabbitMQ has sent a @@ -1166,17 +1167,19 @@ class Connection(object): to start publishing again. The callback will be passed the ``Connection.Unblocked`` method frame. - TODO Also pass the connection as the callback's first arg - :param method callback: Callback to call on `Connection.Unblocked`, having the signature - `callback(pika.frame.Method)`, where the method frame's + `callback(connection, pika.frame.Method)`, where the method frame's `method` member is of type `pika.spec.Connection.Unblocked` """ if not callable(callback): raise TypeError('callback should be a function or method.') - self.callbacks.add(0, spec.Connection.Unblocked, callback, False) + + self.callbacks.add(0, + spec.Connection.Unblocked, + functools.partial(callback, self), + one_shot=False) def add_on_open_callback(self, callback): """Add a callback notification when the connection has opened. The @@ -1789,7 +1792,7 @@ class Connection(object): self._on_terminate(InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT, 'Blocked connection timeout expired') - def _on_connection_blocked(self, method_frame): + def _on_connection_blocked(self, _connection, method_frame): """Handle Connection.Blocked notification from RabbitMQ broker :param pika.frame.Method method_frame: method frame having `method` @@ -1808,7 +1811,7 @@ class Connection(object): self.params.blocked_connection_timeout, self._on_blocked_connection_timeout) - def _on_connection_unblocked(self, method_frame): + def _on_connection_unblocked(self, _connection, method_frame): """Handle Connection.Unblocked notification from RabbitMQ broker :param pika.frame.Method method_frame: method frame having `method`
Pass connection instance as first arg of callbacks for add_on_connection_blocked_callback and add_on_connection_unblocked_callback Targeting 1.0.0
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index d27a625..0539596 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -445,9 +445,11 @@ class TestBlockedConnectionTimesOut(AsyncTestCase, AsyncAdapters): # pylint: di def begin(self, channel): # Simulate Connection.Blocked - channel.connection._on_connection_blocked(pika.frame.Method( - 0, - pika.spec.Connection.Blocked('Testing blocked connection timeout'))) + channel.connection._on_connection_blocked( + channel.connection, + pika.frame.Method(0, + pika.spec.Connection.Blocked( + 'Testing blocked connection timeout'))) def on_closed(self, connection, reply_code, reply_text): """called when the connection has finished closing""" @@ -471,15 +473,16 @@ class TestBlockedConnectionUnblocks(AsyncTestCase, AsyncAdapters): # pylint: di def begin(self, channel): # Simulate Connection.Blocked - channel.connection._on_connection_blocked(pika.frame.Method( - 0, - pika.spec.Connection.Blocked( - 'Testing blocked connection unblocks'))) + channel.connection._on_connection_blocked( + channel.connection, + pika.frame.Method(0, + pika.spec.Connection.Blocked( + 'Testing blocked connection unblocks'))) # Simulate Connection.Unblocked - channel.connection._on_connection_unblocked(pika.frame.Method( - 0, - pika.spec.Connection.Unblocked())) + channel.connection._on_connection_unblocked( + channel.connection, + pika.frame.Method(0, pika.spec.Connection.Unblocked())) # Schedule shutdown after blocked connection timeout would expire channel.connection.add_timeout(0.005, self.on_cleanup_timer) diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index 5623922..03b4681 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -401,27 +401,34 @@ class TestConnectionRegisterForBlockAndUnblock(BlockingTestCaseBase): # NOTE: I haven't figured out yet how to coerce RabbitMQ to emit # Connection.Block and Connection.Unblock from the test, so we'll - # just call the registration functions for now, to make sure that - # registration doesn't crash - - connection.add_on_connection_blocked_callback(lambda frame: None) + # just call the registration functions for now and simulate incoming + # blocked/unblocked frames blocked_buffer = [] - evt = blocking_connection._ConnectionBlockedEvt( - lambda f: blocked_buffer.append("blocked"), - pika.frame.Method(1, pika.spec.Connection.Blocked('reason'))) - repr(evt) - evt.dispatch() - self.assertEqual(blocked_buffer, ["blocked"]) + connection.add_on_connection_blocked_callback( + lambda conn, frame: blocked_buffer.append((conn, frame))) + # Simulate dispatch of blocked connection + blocked_frame = pika.frame.Method( + 0, + pika.spec.Connection.Blocked('reason')) + connection._impl._process_frame(blocked_frame) + connection.sleep(0) # facilitate dispatch of pending events + self.assertEqual(len(blocked_buffer), 1) + conn, frame = blocked_buffer[0] + self.assertIs(conn, connection) + self.assertIs(frame, blocked_frame) unblocked_buffer = [] - connection.add_on_connection_unblocked_callback(lambda frame: None) - evt = blocking_connection._ConnectionUnblockedEvt( - lambda f: unblocked_buffer.append("unblocked"), - pika.frame.Method(1, pika.spec.Connection.Unblocked())) - repr(evt) - evt.dispatch() - self.assertEqual(unblocked_buffer, ["unblocked"]) + connection.add_on_connection_unblocked_callback( + lambda conn, frame: unblocked_buffer.append((conn, frame))) + # Simulate dispatch of unblocked connection + unblocked_frame = pika.frame.Method(0, pika.spec.Connection.Unblocked()) + connection._impl._process_frame(unblocked_frame) + connection.sleep(0) # facilitate dispatch of pending events + self.assertEqual(len(unblocked_buffer), 1) + conn, frame = unblocked_buffer[0] + self.assertIs(conn, connection) + self.assertIs(frame, unblocked_frame) class TestBlockedConnectionTimeout(BlockingTestCaseBase): @@ -436,9 +443,11 @@ class TestBlockedConnectionTimeout(BlockingTestCaseBase): # simulate it for now # Simulate Connection.Blocked - conn._impl._on_connection_blocked(pika.frame.Method( - 0, - pika.spec.Connection.Blocked('TestBlockedConnectionTimeout'))) + conn._impl._on_connection_blocked( + conn._impl, + pika.frame.Method( + 0, + pika.spec.Connection.Blocked('TestBlockedConnectionTimeout'))) # Wait for connection teardown with self.assertRaises(pika.exceptions.ConnectionClosed) as excCtx: diff --git a/tests/unit/blocking_connection_tests.py b/tests/unit/blocking_connection_tests.py index f303023..5fda321 100644 --- a/tests/unit/blocking_connection_tests.py +++ b/tests/unit/blocking_connection_tests.py @@ -15,6 +15,19 @@ import pika.channel from pika.exceptions import AMQPConnectionError, ChannelClosed +# Disable protected-access +# pylint: disable=W0212 + +# Disable missing-docstring +# pylint: disable=C0111 + +# Disable invalid-name +# pylint: disable=C0103 + +# Disable no-self-use +# pylint: disable=R0201 + + class BlockingConnectionMockTemplate(blocking_connection.BlockingConnection): pass @@ -325,3 +338,25 @@ class BlockingConnectionTests(unittest.TestCase): # and the raised error must then looks like: self.assertEqual('Connection to 127.0.0.1:5672 failed: timeout', str(ctx.exception)) + + def test_connection_blocked_evt(self): + blocked_buffer = [] + frame = pika.frame.Method(0, pika.spec.Connection.Blocked('reason')) + evt = blocking_connection._ConnectionBlockedEvt( + blocked_buffer.append, + frame) + repr(evt) + evt.dispatch() + self.assertEqual(len(blocked_buffer), 1) + self.assertIs(blocked_buffer[0], frame) + + def test_connection_unblocked_evt(self): + unblocked_buffer = [] + frame = pika.frame.Method(0, pika.spec.Connection.Unblocked()) + evt = blocking_connection._ConnectionUnblockedEvt( + unblocked_buffer.append, + frame) + repr(evt) + evt.dispatch() + self.assertEqual(len(unblocked_buffer), 1) + self.assertIs(unblocked_buffer[0], frame) diff --git a/tests/unit/connection_tests.py b/tests/unit/connection_tests.py index 38db1d7..9d99314 100644 --- a/tests/unit/connection_tests.py +++ b/tests/unit/connection_tests.py @@ -678,6 +678,36 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 if frame_type == frame.Heartbeat: self.assertTrue(self.connection.heartbeat.received.called) + def test_add_on_connection_blocked_callback(self): + blocked_buffer = [] + self.connection.add_on_connection_blocked_callback( + lambda conn, frame: blocked_buffer.append((conn, frame))) + + # Simulate dispatch of blocked connection + blocked_frame = pika.frame.Method( + 0, + pika.spec.Connection.Blocked('reason')) + self.connection._process_frame(blocked_frame) + + self.assertEqual(len(blocked_buffer), 1) + conn, frame = blocked_buffer[0] + self.assertIs(conn, self.connection) + self.assertIs(frame, blocked_frame) + + def test_add_on_connection_unblocked_callback(self): + unblocked_buffer = [] + self.connection.add_on_connection_unblocked_callback( + lambda conn, frame: unblocked_buffer.append((conn, frame))) + + # Simulate dispatch of unblocked connection + unblocked_frame = pika.frame.Method(0, pika.spec.Connection.Unblocked()) + self.connection._process_frame(unblocked_frame) + + self.assertEqual(len(unblocked_buffer), 1) + conn, frame = unblocked_buffer[0] + self.assertIs(conn, self.connection) + self.assertIs(frame, unblocked_frame) + @mock.patch.object( connection.Connection, 'connect', @@ -714,6 +744,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 blocked_connection_timeout=60)) conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) # Check @@ -736,6 +767,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Simulate Connection.Blocked trigger conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) # Check @@ -748,6 +780,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Simulate Connection.Blocked trigger again conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) self.assertEqual(conn.add_timeout.call_count, 1) @@ -770,6 +803,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 blocked_connection_timeout=60)) conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) conn._on_blocked_connection_timeout() @@ -798,6 +832,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 blocked_connection_timeout=60)) conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) self.assertIsNotNone(conn._blocked_conn_timer) @@ -805,6 +840,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 timer = conn._blocked_conn_timer conn._on_connection_unblocked( + conn, mock.Mock(name='frame.Method(Connection.Unblocked)')) # Check @@ -829,6 +865,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Simulate Connection.Blocked conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) self.assertIsNotNone(conn._blocked_conn_timer) @@ -837,6 +874,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Simulate Connection.Unblocked conn._on_connection_unblocked( + conn, mock.Mock(name='frame.Method(Connection.Unblocked)')) # Check @@ -845,6 +883,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Simulate Connection.Unblocked again conn._on_connection_unblocked( + conn, mock.Mock(name='frame.Method(Connection.Unblocked)')) self.assertEqual(conn.remove_timeout.call_count, 1) @@ -872,6 +911,7 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 blocked_connection_timeout=60)) conn._on_connection_blocked( + conn, mock.Mock(name='frame.Method(Connection.Blocked)')) self.assertIsNotNone(conn._blocked_conn_timer)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[tornado,twisted]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "yapf", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 nose==1.3.7 packaging==21.3 -e git+https://github.com/pika/pika.git@7f81bc721807c5416726016838398804c07effbd#egg=pika pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 Twisted==15.3.0 typing_extensions==4.1.1 urllib3==1.26.20 yapf==0.32.0 zipp==3.6.0 zope.interface==5.5.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - twisted==15.3.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - yapf==0.32.0 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pika
[ "tests/unit/connection_tests.py::ConnectionTests::test_add_on_connection_blocked_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_connection_unblocked_callback", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_multiple_blocked_in_a_row_sets_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_multiple_unblocked_in_a_row_removes_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_on_terminate_removes_timer", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_timeout_teminates_connection", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_unblocked_removes_timer", "tests/unit/connection_tests.py::ConnectionTests::test_connection_blocked_sets_timer" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromSameThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorInterruptedByCancelFromBroker::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorCancelEncountersCancelFromBroker::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_channel", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close_with_channel_closed_exception", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_attempts_with_timeout", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_blocked_evt", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_unblocked_evt", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_constructor", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_no_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_user_initiated_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup_fails_with_open_error", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_sleep", "tests/unit/connection_tests.py::ConnectionTests::test_add_callbacks", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_close_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_open_error_callback", "tests/unit/connection_tests.py::ConnectionTests::test_channel", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_closed_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_closing_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_init_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_protocol_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_start_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_tune_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties_default", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties_override", "tests/unit/connection_tests.py::ConnectionTests::test_close_bails_out_if_already_closed_or_closing", "tests/unit/connection_tests.py::ConnectionTests::test_close_calls_on_close_ready_when_no_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_closes_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_closes_opening_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_does_not_close_closing_channels", "tests/unit/connection_tests.py::ConnectionTests::test_connect_no_adapter_connect_from_constructor", "tests/unit/connection_tests.py::ConnectionTests::test_create_with_blocked_connection_timeout_config", "tests/unit/connection_tests.py::ConnectionTests::test_deliver_frame_to_channel_with_frame_for_unknown_channel", "tests/unit/connection_tests.py::ConnectionTests::test_new_conn_should_use_first_channel", "tests/unit/connection_tests.py::ConnectionTests::test_next_channel_number_returns_lowest_unused", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_closing_state_last_channel_calls_on_close_ready", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_closing_state_more_channels_no_on_close_ready", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_non_closing_state", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_with_closing_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_connect_timer", "tests/unit/connection_tests.py::ConnectionTests::test_on_connect_timer_reconnect", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_close_ok", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_start", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_tune", "tests/unit/connection_tests.py::ConnectionTests::test_on_data_available", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_cleans_up", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_access_denied_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_auth_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_connection_closed_callback", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_protocol_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_send_message_updates_frames_sent_and_bytes_sent", "tests/unit/connection_tests.py::ConnectionTests::test_set_backpressure_multiplier" ]
[]
BSD 3-Clause "New" or "Revised" License
2,221
[ "pika/adapters/blocking_connection.py", "pika/connection.py" ]
[ "pika/adapters/blocking_connection.py", "pika/connection.py" ]
NeurodataWithoutBorders__pynwb-381
a11f8e4edb84fa318b84035d0debe5c764bf7f31
2018-02-27 01:52:50
f749097718cf344f4c95de7771a1ef523f26762f
diff --git a/src/pynwb/ogen.py b/src/pynwb/ogen.py index d05832fd..a4a21f9c 100644 --- a/src/pynwb/ogen.py +++ b/src/pynwb/ogen.py @@ -5,6 +5,7 @@ from .form.utils import docval, popargs, fmt_docval_args from . import register_class, CORE_NAMESPACE from .base import TimeSeries, _default_resolution, _default_conversion from .core import NWBContainer +from .ecephys import Device @register_class('OptogeneticStimulusSite', CORE_NAMESPACE) @@ -19,7 +20,7 @@ class OptogeneticStimulusSite(NWBContainer): @docval({'name': 'name', 'type': str, 'doc': 'The name of this stimulus site'}, {'name': 'source', 'type': str, 'doc': 'the source of the data'}, - {'name': 'device', 'type': str, 'doc': 'Name of device in /general/devices'}, + {'name': 'device', 'type': Device, 'doc': 'the device that was used'}, {'name': 'description', 'type': str, 'doc': 'Description of site.'}, {'name': 'excitation_lambda', 'type': str, 'doc': 'Excitation wavelength.'}, {'name': 'location', 'type': str, 'doc': 'Location of stimulation site.'})
`nwbfile.create_ogen_site` device argument expects string the `device` argument of `nwbfile.create_ogen_site` takes a string that is the name of the device. I think it should also take the device object. This will make usage more consistent with `nwbfile.create_electrode_group` and `nwbfile.add_electrode` (with `electrode_group`) where the object is given, not the name of the object. Is there a reason the name of the device is used here?
NeurodataWithoutBorders/pynwb
diff --git a/tests/unit/pynwb_tests/test_ogen.py b/tests/unit/pynwb_tests/test_ogen.py index bee99992..b80243d4 100644 --- a/tests/unit/pynwb_tests/test_ogen.py +++ b/tests/unit/pynwb_tests/test_ogen.py @@ -1,14 +1,16 @@ import unittest from pynwb.ogen import OptogeneticSeries, OptogeneticStimulusSite +from pynwb.ecephys import Device class OptogeneticSeriesConstructor(unittest.TestCase): def test_init(self): - oS = OptogeneticStimulusSite('site1', 'a test source', 'device', 'description', 'excitation_lambda', 'location') + device = Device('name', 'source') + oS = OptogeneticStimulusSite('site1', 'a test source', device, 'description', 'excitation_lambda', 'location') self.assertEqual(oS.name, 'site1') - self.assertEqual(oS.device, 'device') + self.assertEqual(oS.device, device) self.assertEqual(oS.description, 'description') self.assertEqual(oS.excitation_lambda, 'excitation_lambda') self.assertEqual(oS.location, 'location')
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2017.11.5 chardet==3.0.4 coverage==6.2 execnet==1.9.0 h5py==2.7.1 idna==2.6 importlib-metadata==4.8.3 iniconfig==1.1.1 numpy==1.14.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 -e git+https://github.com/NeurodataWithoutBorders/pynwb.git@a11f8e4edb84fa318b84035d0debe5c764bf7f31#egg=pynwb pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 python-dateutil==2.6.1 requests==2.18.4 ruamel.yaml==0.15.35 six==1.11.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.22 zipp==3.6.0
name: pynwb channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - certifi==2017.11.5 - chardet==3.0.4 - coverage==6.2 - execnet==1.9.0 - h5py==2.7.1 - idna==2.6 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - numpy==1.14.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - python-dateutil==2.6.1 - requests==2.18.4 - ruamel-yaml==0.15.35 - six==1.11.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.22 - zipp==3.6.0 prefix: /opt/conda/envs/pynwb
[ "tests/unit/pynwb_tests/test_ogen.py::OptogeneticSeriesConstructor::test_init" ]
[]
[]
[]
BSD-3-Clause
2,222
[ "src/pynwb/ogen.py" ]
[ "src/pynwb/ogen.py" ]
joblib__joblib-639
d3e478f2822dde4181927b9efb6064cf00b92996
2018-02-27 16:54:52
902fb6bbcf75c461d1b6703e5a01605fc592f214
codecov[bot]: # [Codecov](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=h1) Report > Merging [#639](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=desc) into [master](https://codecov.io/gh/joblib/joblib/commit/a0e1f69d2be31e9e6be1f5e346988bc04df7ff75?src=pr&el=desc) will **increase** coverage by `0.44%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/joblib/joblib/pull/639/graphs/tree.svg?height=150&width=650&token=gA6LF5DGTW&src=pr)](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #639 +/- ## ========================================== + Coverage 94.52% 94.96% +0.44% ========================================== Files 39 39 Lines 5403 5407 +4 ========================================== + Hits 5107 5135 +28 + Misses 296 272 -24 ``` | [Impacted Files](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [joblib/memory.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL21lbW9yeS5weQ==) | `94.98% <100%> (+0.02%)` | :arrow_up: | | [joblib/test/test\_memory.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL3Rlc3QvdGVzdF9tZW1vcnkucHk=) | `98.14% <100%> (ø)` | :arrow_up: | | [joblib/test/test\_parallel.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL3Rlc3QvdGVzdF9wYXJhbGxlbC5weQ==) | `96.27% <0%> (+0.35%)` | :arrow_up: | | [joblib/\_parallel\_backends.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL19wYXJhbGxlbF9iYWNrZW5kcy5weQ==) | `94.39% <0%> (+0.43%)` | :arrow_up: | | [joblib/numpy\_pickle\_utils.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL251bXB5X3BpY2tsZV91dGlscy5weQ==) | `93.29% <0%> (+0.63%)` | :arrow_up: | | [joblib/hashing.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL2hhc2hpbmcucHk=) | `92.98% <0%> (+0.87%)` | :arrow_up: | | [joblib/\_memmapping\_reducer.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL19tZW1tYXBwaW5nX3JlZHVjZXIucHk=) | `95% <0%> (+1.11%)` | :arrow_up: | | [joblib/logger.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL2xvZ2dlci5weQ==) | `86.84% <0%> (+1.31%)` | :arrow_up: | | [joblib/numpy\_pickle.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL251bXB5X3BpY2tsZS5weQ==) | `98.47% <0%> (+2.03%)` | :arrow_up: | | [joblib/pool.py](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree#diff-am9ibGliL3Bvb2wucHk=) | `91.37% <0%> (+2.58%)` | :arrow_up: | | ... and [2 more](https://codecov.io/gh/joblib/joblib/pull/639/diff?src=pr&el=tree-more) | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=footer). Last update [a0e1f69...4f88f5a](https://codecov.io/gh/joblib/joblib/pull/639?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). lesteve: Merging, thanks a lot @aabadie!
diff --git a/joblib/memory.py b/joblib/memory.py index 0f005dd..18b20f5 100644 --- a/joblib/memory.py +++ b/joblib/memory.py @@ -767,7 +767,7 @@ class Memory(Logger): The path of the base directory to use as a data store or None. If None is given, no caching is done and the Memory object is completely transparent. This option - replaces cachedir since version 0.11. + replaces cachedir since version 0.12. backend: str or 'local' Type of store backend for reading/writing cache files. @@ -776,8 +776,8 @@ class Memory(Logger): backend. cachedir: str or None - cachedir is deprecated since version 0.11 and will be - removed in 0.13. Please consider using location option instead. + cachedir is deprecated since version 0.12 and will be + removed in 0.14. Please consider using location option instead. The path of the base directory to use as a data store or None. If None is given, no caching is done and the Memory object is completely transparent. @@ -818,17 +818,18 @@ class Memory(Logger): if cachedir is not None: if location is None: warnings.warn("cachedir option is deprecated since version " - "0.10 and will be removed after version 0.12.\n" + "0.12 and will be removed in version 0.14.\n" "Use option location=<store location> " "instead.", DeprecationWarning, stacklevel=2) location = cachedir else: warnings.warn("You set both location and cachedir options." "cachedir is deprecated since version " - "0.10 and will be removed after version 0.12.\n" + "0.12 and will be removed in version 0.14.\n" "cachedir value will be ignored.", DeprecationWarning, stacklevel=2) + self.location = location if isinstance(location, _basestring): location = os.path.join(location, 'joblib') @@ -837,6 +838,14 @@ class Memory(Logger): backend_options=dict(compress=compress, mmap_mode=mmap_mode, **backend_options)) + @property + def cachedir(self): + warnings.warn("cachedir option is deprecated since version " + "0.12 and will be removed in version 0.14.\n" + "Use option location=<store location> " + "instead.", DeprecationWarning, stacklevel=2) + return self.location + def cache(self, func=None, ignore=None, verbose=None, mmap_mode=False): """ Decorates the given function func to only compute its return value for input arguments not cached on disk.
Memory object does not have a cachedir attribute in master Seems like an oversight from #397. The doc says that cachedir is deprecated but actually it is not set: https://github.com/joblib/joblib/blob/a0e1f69d2be31e9e6be1f5e346988bc04df7ff75/joblib/memory.py#L778-L783 It'd be a good idea to double-check that other similar things have not been overseen. Snippet: ```python from joblib import Memory mem = Memory('/tmp/test') print(mem.cachedir) ```
joblib/joblib
diff --git a/joblib/test/test_memory.py b/joblib/test/test_memory.py index 7d74506..00f384c 100644 --- a/joblib/test/test_memory.py +++ b/joblib/test/test_memory.py @@ -383,6 +383,11 @@ def test_func_dir(tmpdir): location = os.path.join(g.store_backend.location, func_id) assert location == path assert os.path.exists(path) + assert memory.location == os.path.dirname(g.store_backend.location) + with warns(DeprecationWarning) as w: + assert memory.cachedir == os.path.dirname(g.store_backend.location) + assert len(w) == 1 + assert "cachedir option is deprecated since version" in str(w[-1].message) # Test that the code is stored. # For the following test to be robust to previous execution, we clear
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
0.11
{ "env_vars": null, "env_yml_path": [], "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-timeout", "codecov" ], "pre_install": [], "python": "3.6", "reqs_path": [ "continuous_integration/appveyor/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/joblib/joblib.git@d3e478f2822dde4181927b9efb6064cf00b92996#egg=joblib numpy==1.19.5 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-timeout==2.1.0 requests==2.27.1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: joblib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-timeout==2.1.0 - requests==2.27.1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/joblib
[ "joblib/test/test_memory.py::test_func_dir" ]
[]
[ "joblib/test/test_memory.py::test_memory_integration", "joblib/test/test_memory.py::test_no_memory", "joblib/test/test_memory.py::test_memory_kwarg", "joblib/test/test_memory.py::test_memory_lambda", "joblib/test/test_memory.py::test_memory_name_collision", "joblib/test/test_memory.py::test_memory_warning_lambda_collisions", "joblib/test/test_memory.py::test_memory_warning_collision_detection", "joblib/test/test_memory.py::test_memory_partial", "joblib/test/test_memory.py::test_memory_eval", "joblib/test/test_memory.py::test_argument_change", "joblib/test/test_memory.py::test_memory_numpy[None]", "joblib/test/test_memory.py::test_memory_numpy[r]", "joblib/test/test_memory.py::test_memory_numpy_check_mmap_mode", "joblib/test/test_memory.py::test_memory_exception", "joblib/test/test_memory.py::test_memory_ignore", "joblib/test/test_memory.py::test_partial_decoration[ignore0-100-r]", "joblib/test/test_memory.py::test_partial_decoration[ignore1-10-None]", "joblib/test/test_memory.py::test_persistence", "joblib/test/test_memory.py::test_call_and_shelve", "joblib/test/test_memory.py::test_memorized_pickling", "joblib/test/test_memory.py::test_memorized_repr", "joblib/test/test_memory.py::test_memory_file_modification", "joblib/test/test_memory.py::test_memory_in_memory_function_code_change", "joblib/test/test_memory.py::test_clear_memory_with_none_location", "joblib/test/test_memory.py::test_memory_func_with_kwonly_args", "joblib/test/test_memory.py::test_memory_func_with_signature", "joblib/test/test_memory.py::test__get_items", "joblib/test/test_memory.py::test__get_items_to_delete", "joblib/test/test_memory.py::test_memory_reduce_size", "joblib/test/test_memory.py::test_memory_clear", "joblib/test/test_memory.py::test_cached_function_race_condition_when_persisting_output", "joblib/test/test_memory.py::test_cached_function_race_condition_when_persisting_output_2", "joblib/test/test_memory.py::test_concurrency_safe_write[multiprocessing]", "joblib/test/test_memory.py::test_concurrency_safe_write[loky]", "joblib/test/test_memory.py::test_concurrency_safe_write[threading]", "joblib/test/test_memory.py::test_memory_recomputes_after_an_error_why_loading_results", "joblib/test/test_memory.py::test_cachedir_deprecation_warning", "joblib/test/test_memory.py::test_register_invalid_store_backends_key[None]", "joblib/test/test_memory.py::test_register_invalid_store_backends_key[invalid_prefix1]", "joblib/test/test_memory.py::test_register_invalid_store_backends_key[invalid_prefix2]", "joblib/test/test_memory.py::test_register_invalid_store_backends_object", "joblib/test/test_memory.py::test_memory_default_store_backend", "joblib/test/test_memory.py::test_instanciate_incomplete_store_backend", "joblib/test/test_memory.py::test_dummy_store_backend" ]
[]
BSD 3-Clause "New" or "Revised" License
2,224
[ "joblib/memory.py" ]
[ "joblib/memory.py" ]
ucfopen__canvasapi-159
c69f6a9801ac275fdad46d97fa95c77c25d6f953
2018-02-27 20:13:19
c69f6a9801ac275fdad46d97fa95c77c25d6f953
diff --git a/.travis.yml b/.travis.yml index 3f01729..51f296b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: python python: - 2.7 - - 3.3 - 3.4 - 3.5 - 3.6 diff --git a/CHANGELOG.md b/CHANGELOG.md index 1af344a..243fbaf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Change Log +## Unreleased + +### Deprecation Warnings + +- **_Dropped support for Python 3.3_** + - [Python 3.3 is end-of-life as of September 2017](https://www.python.org/dev/peps/pep-0398/#lifespan) + - Should continue to function in 3.3, but compatibility cannot be guaranteed going forward. +- Several methods in the `Course` and `Section` classes relating to assignments and submissions have been deprecated. + - Comparable methods have been implemented in the `Assignment` and `Submission` classes, as appropriate. + - The deprecated methods now include a warning in the documentation with reference to the replacement. Additionally, the deprecated methods will raise a `DeprecationWarning`. + - These methods will be removed in a future release. +- `Course.list_sections()` has been deprecated. Use `Course.get_sections()` instead. + ## [0.8.2] - 2018-01-24 ### Bugfixes diff --git a/canvasapi/assignment.py b/canvasapi/assignment.py index 90facfc..3dcec1d 100644 --- a/canvasapi/assignment.py +++ b/canvasapi/assignment.py @@ -3,7 +3,11 @@ from __future__ import absolute_import, division, print_function, unicode_litera from six import python_2_unicode_compatible from canvasapi.canvas_object import CanvasObject -from canvasapi.util import combine_kwargs +from canvasapi.exceptions import RequiredFieldMissing +from canvasapi.paginated_list import PaginatedList +from canvasapi.submission import Submission +from canvasapi.user import UserDisplay +from canvasapi.util import combine_kwargs, obj_or_id @python_2_unicode_compatible @@ -12,7 +16,7 @@ class Assignment(CanvasObject): def __str__(self): return "{} ({})".format(self.name, self.id) - def delete(self): + def delete(self, **kwargs): """ Delete this assignment. @@ -24,6 +28,7 @@ class Assignment(CanvasObject): response = self._requester.request( 'DELETE', 'courses/{}/assignments/{}'.format(self.course_id, self.id), + _kwargs=combine_kwargs(**kwargs) ) return Assignment(self._requester, response.json()) @@ -47,6 +52,98 @@ class Assignment(CanvasObject): return Assignment(self._requester, response.json()) + def get_gradeable_students(self, **kwargs): + """ + List students eligible to submit the assignment. + + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/gradeable_students \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.gradeable_students>`_ + + :rtype: :class:`canvasapi.paginated_list.PaginatedList` of + :class:`canvasapi.user.UserDisplay` + """ + return PaginatedList( + UserDisplay, + self._requester, + 'GET', + 'courses/{}/assignments/{}/gradeable_students'.format(self.course_id, self.id), + _kwargs=combine_kwargs(**kwargs) + ) + + def get_submission(self, user, **kwargs): + """ + Get a single submission, based on user id. + + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.show>`_ + + :param user: The object or ID of the related user + :type user: :class:`canvasapi.user.User` or int + + :rtype: :class:`canvasapi.submission.Submission` + """ + from canvasapi.user import User + + user_id = obj_or_id(user, "user", (User,)) + + response = self._requester.request( + 'GET', + 'courses/{}/assignments/{}/submissions/{}'.format(self.course_id, self.id, user_id), + _kwargs=combine_kwargs(**kwargs) + ) + response_json = response.json() + response_json.update(course_id=self.course_id) + + return Submission(self._requester, response_json) + + def get_submissions(self, **kwargs): + """ + Get all existing submissions for this assignment. + + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/submissions \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.index>`_ + + :rtype: :class:`canvasapi.paginated_list.PaginatedList` of + :class:`canvasapi.submission.Submission` + """ + return PaginatedList( + Submission, + self._requester, + 'GET', + 'courses/{}/assignments/{}/submissions'.format(self.course_id, self.id), + {'course_id': self.course_id}, + _kwargs=combine_kwargs(**kwargs) + ) + + def submit(self, submission, **kwargs): + """ + Makes a submission for an assignment. + + :calls: `POST /api/v1/courses/:course_id/assignments/:assignment_id/submissions \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions.create>`_ + + :param submission: The attributes of the submission. + :type submission: dict + + :rtype: :class:`canvasapi.submission.Submission` + """ + if isinstance(submission, dict) and 'submission_type' in submission: + kwargs['submision'] = submission + else: + raise RequiredFieldMissing( + "Dictionary with key 'submission_type' is required." + ) + + response = self._requester.request( + 'POST', + 'courses/{}/assignments/{}/submissions'.format(self.course_id, self.id), + _kwargs=combine_kwargs(**kwargs) + ) + response_json = response.json() + response_json.update(course_id=self.course_id) + + return Submission(self._requester, response_json) + @python_2_unicode_compatible class AssignmentGroup(CanvasObject): diff --git a/canvasapi/course.py b/canvasapi/course.py index df55c63..73c55e6 100644 --- a/canvasapi/course.py +++ b/canvasapi/course.py @@ -14,7 +14,6 @@ from canvasapi.paginated_list import PaginatedList from canvasapi.tab import Tab from canvasapi.submission import Submission from canvasapi.upload import Uploader -from canvasapi.user import UserDisplay from canvasapi.util import combine_kwargs, is_multivalued, obj_or_id from canvasapi.rubric import Rubric @@ -25,7 +24,7 @@ warnings.simplefilter('always', DeprecationWarning) class Course(CanvasObject): def __str__(self): - return "{} {} ({})".format(self.course_code, self.name, self.id) + return '{} {} ({})'.format(self.course_code, self.name, self.id) def conclude(self): """ @@ -698,6 +697,10 @@ class Course(CanvasObject): """ Returns the list of sections for this course. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.course.Course.get_sections` instead. + :calls: `GET /api/v1/courses/:course_id/sections \ <https://canvas.instructure.com/doc/api/sections.html#method.sections.index>`_ @@ -1154,6 +1157,10 @@ class Course(CanvasObject): """ Makes a submission for an assignment. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.submit` instead. + :calls: `POST /api/v1/courses/:course_id/assignments/:assignment_id/submissions \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions.create>`_ @@ -1167,29 +1174,29 @@ class Course(CanvasObject): """ from canvasapi.assignment import Assignment - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + warnings.warn( + 'Course.submit_assignment() is deprecated and will be removed in ' + 'the future. Use Assignment.submit() instead.', + DeprecationWarning + ) - if isinstance(submission, dict) and 'submission_type' in submission: - kwargs['submision'] = submission - else: - raise RequiredFieldMissing( - "Dictionary with key 'submission_type' is required." - ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - response = self._requester.request( - 'POST', - 'courses/{}/assignments/{}/submissions'.format(self.id, assignment_id), - _kwargs=combine_kwargs(**kwargs) + assignment = Assignment( + self._requester, + {'course_id': self.id, 'id': assignment_id} ) - response_json = response.json() - response_json.update(course_id=self.id) - return Submission(self._requester, response_json) + return assignment.submit(submission, **kwargs) def list_submissions(self, assignment, **kwargs): """ Get all existing submissions for an assignment. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.get_submissions` instead. + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/submissions \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.index>`_ @@ -1201,17 +1208,21 @@ class Course(CanvasObject): """ from canvasapi.assignment import Assignment + warnings.warn( + 'Course.list_submissions() is deprecated and will be removed in ' + 'the future. Use Assignment.get_submissions() instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - return PaginatedList( - Submission, + assignment = Assignment( self._requester, - 'GET', - 'courses/{}/assignments/{}/submissions'.format(self.id, assignment_id), - {'course_id': self.id}, - _kwargs=combine_kwargs(**kwargs) + {'course_id': self.id, 'id': assignment_id} ) + return assignment.get_submissions(**kwargs) + def list_multiple_submissions(self, **kwargs): """ List submissions for multiple assignments. @@ -1240,6 +1251,10 @@ class Course(CanvasObject): """ Get a single submission, based on user id. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.get_submission` instead. + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.show>`_ @@ -1251,25 +1266,30 @@ class Course(CanvasObject): :rtype: :class:`canvasapi.submission.Submission` """ from canvasapi.assignment import Assignment - from canvasapi.user import User - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - user_id = obj_or_id(user, "user", (User,)) + warnings.warn( + '`Course.get_submission()` is deprecated and will be removed in a ' + 'future version. Use `Assignment.get_submission()` instead', + DeprecationWarning + ) - response = self._requester.request( - 'GET', - 'courses/{}/assignments/{}/submissions/{}'.format(self.id, assignment_id, user_id), - _kwargs=combine_kwargs(**kwargs) + assignment_id = obj_or_id(assignment, 'assignment', (Assignment,)) + + assignment = Assignment( + self._requester, + {'course_id': self.id, 'id': assignment_id} ) - response_json = response.json() - response_json.update(course_id=self.id) - return Submission(self._requester, response_json) + return assignment.get_submission(user, **kwargs) def update_submission(self, assignment, user, **kwargs): """ Comment on and/or update the grading for a student's assignment submission. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.edit` instead. + :calls: `PUT /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.update>`_ @@ -1283,29 +1303,31 @@ class Course(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - user_id = obj_or_id(user, "user", (User,)) - - response = self._requester.request( - 'PUT', - 'courses/{}/assignments/{}/submissions/{}'.format(self.id, assignment_id, user_id), - _kwargs=combine_kwargs(**kwargs) + warnings.warn( + '`Course.update_submission()` is deprecated and will be removed in a ' + 'future version. Use `Submission.edit()` instead', + DeprecationWarning ) - response_json = response.json() - response_json.update(course_id=self.id) - - submission = self.get_submission(assignment_id, user_id) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + user_id = obj_or_id(user, "user", (User,)) - if 'submission_type' in response_json: - super(Submission, submission).set_attributes(response_json) + submission = Submission(self._requester, { + 'course_id': self.id, + 'assignment_id': assignment_id, + 'user_id': user_id, + }) - return Submission(self._requester, response_json) + return submission.edit(**kwargs) - def list_gradeable_students(self, assignment): + def list_gradeable_students(self, assignment, **kwargs): """ List students eligible to submit the assignment. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.get_gradeable_students` instead. + :calls: `GET /api/v1/courses/:course_id/assignments/:assignment_id/gradeable_students \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.gradeable_students>`_ @@ -1313,23 +1335,33 @@ class Course(CanvasObject): :type assignment: :class:`canvasapi.assignment.Assignment` or int :rtype: :class:`canvasapi.paginated_list.PaginatedList` of - :class:`canvasapi.user.User` + :class:`canvasapi.user.UserDisplay` """ from canvasapi.assignment import Assignment - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + warnings.warn( + '`Course.list_gradeable_students()` is deprecated and will be ' + 'removed in a future version. Use ' + '`Assignment.get_gradeable_students()` instead.', + DeprecationWarning + ) - return PaginatedList( - UserDisplay, + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + assignment = Assignment( self._requester, - 'GET', - 'courses/{}/assignments/{}/gradeable_students'.format(self.id, assignment_id) + {'id': assignment_id, 'course_id': self.id} ) - def mark_submission_as_read(self, assignment, user): + return assignment.get_gradeable_students(**kwargs) + + def mark_submission_as_read(self, assignment, user, **kwargs): """ Mark submission as read. No request fields are necessary. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.mark_read` instead. + :calls: `PUT /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id/read \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_read>`_ @@ -1344,23 +1376,30 @@ class Course(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User + warnings.warn( + '`Course.mark_submission_as_read()` is deprecated and will be ' + 'removed in a future version. Use `Submission.mark_read()` instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) user_id = obj_or_id(user, "user", (User,)) - response = self._requester.request( - 'PUT', - 'courses/{}/assignments/{}/submissions/{}/read'.format( - self.id, - assignment_id, - user_id, - ) - ) - return response.status_code == 204 + submission = Submission(self._requester, { + 'course_id': self.id, + 'assignment_id': assignment_id, + 'user_id': user_id + }) + return submission.mark_read(**kwargs) - def mark_submission_as_unread(self, assignment, user): + def mark_submission_as_unread(self, assignment, user, **kwargs): """ Mark submission as unread. No request fields are necessary. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.mark_unread` instead. + :calls: `DELETE /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id/read \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_unread>`_ @@ -1375,18 +1414,21 @@ class Course(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User + warnings.warn( + '`Course.mark_submission_as_unread()` is deprecated and will be ' + 'removed in a future version. Use `Submission.mark_unread()` instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) user_id = obj_or_id(user, "user", (User,)) - response = self._requester.request( - 'DELETE', - 'courses/{}/assignments/{}/submissions/{}/read'.format( - self.id, - assignment_id, - user_id, - ), - ) - return response.status_code == 204 + submission = Submission(self._requester, { + 'course_id': self.id, + 'assignment_id': assignment_id, + 'user_id': user_id + }) + return submission.mark_unread(**kwargs) def list_external_feeds(self): """ @@ -1781,7 +1823,7 @@ class Course(CanvasObject): """ response = self._requester.request( - "GET", + 'GET', 'courses/%s/grading_standards/%d' % (self.id, grading_standard_id), _kwargs=combine_kwargs(**kwargs) ) diff --git a/canvasapi/section.py b/canvasapi/section.py index f946006..707bb60 100644 --- a/canvasapi/section.py +++ b/canvasapi/section.py @@ -1,14 +1,15 @@ from __future__ import absolute_import, division, print_function, unicode_literals -from warnings import warn +import warnings from six import python_2_unicode_compatible from canvasapi.canvas_object import CanvasObject -from canvasapi.exceptions import RequiredFieldMissing from canvasapi.paginated_list import PaginatedList from canvasapi.submission import Submission from canvasapi.util import combine_kwargs, obj_or_id +warnings.simplefilter('always', DeprecationWarning) + @python_2_unicode_compatible class Section(CanvasObject): @@ -111,6 +112,10 @@ class Section(CanvasObject): """ Makes a submission for an assignment. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.submit` instead. + :calls: `POST /api/v1/sections/:section_id/assignments/:assignment_id/submissions \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions.create>`_ @@ -123,29 +128,28 @@ class Section(CanvasObject): """ from canvasapi.assignment import Assignment - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - - if isinstance(submission, dict) and 'submission_type' in submission: - kwargs['submision'] = submission - else: - raise RequiredFieldMissing( - "Dictionary with key 'submission_type' is required." - ) - - response = self._requester.request( - 'POST', - 'sections/{}/assignments/{}/submissions'.format(self.id, assignment_id), - _kwargs=combine_kwargs(**kwargs) + warnings.warn( + 'Section.submit_assignment() is deprecated and will be removed ' + 'in the future. Use Assignment.submit() instead.', + DeprecationWarning ) - response_json = response.json() - response_json.update(section_id=self.id) - return Submission(self._requester, response_json) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + assignment = Assignment(self._requester, { + 'course_id': self.course_id, + 'section_id': self.id, + 'id': assignment_id + }) + return assignment.submit(submission, **kwargs) def list_submissions(self, assignment, **kwargs): """ Get all existing submissions for an assignment. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.get_submissions` instead. + :calls: `GET /api/v1/sections/:section_id/assignments/:assignment_id/submissions \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.index>`_ @@ -157,16 +161,20 @@ class Section(CanvasObject): """ from canvasapi.assignment import Assignment + warnings.warn( + 'Section.list_submissions() is deprecated and will be removed ' + 'in the future. Use Assignment.get_submissions() instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + assignment = Assignment(self._requester, { + 'course_id': self.course_id, + 'section_id': self.id, + 'id': assignment_id + }) - return PaginatedList( - Submission, - self._requester, - 'GET', - 'sections/{}/assignments/{}/submissions'.format(self.id, assignment_id), - {'section_id': self.id}, - _kwargs=combine_kwargs(**kwargs) - ) + return assignment.get_submissions(**kwargs) def list_multiple_submissions(self, **kwargs): """ @@ -180,7 +188,7 @@ class Section(CanvasObject): :class:`canvasapi.submission.Submission` """ if 'grouped' in kwargs: - warn('The `grouped` parameter must be empty. Removing kwarg `grouped`.') + warnings.warn('The `grouped` parameter must be empty. Removing kwarg `grouped`.') del kwargs['grouped'] return PaginatedList( @@ -196,6 +204,10 @@ class Section(CanvasObject): """ Get a single submission, based on user id. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.assignment.Assignment.get_submission` instead. + :calls: `GET /api/v1/sections/:section_id/assignments/:assignment_id/submissions/:user_id \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.show>`_ @@ -207,25 +219,31 @@ class Section(CanvasObject): :rtype: :class:`canvasapi.submission.Submission` """ from canvasapi.assignment import Assignment - from canvasapi.user import User + + warnings.warn( + 'Section.get_submission() is deprecated and will be removed ' + 'in the future. Use Assignment.get_submission() instead.', + DeprecationWarning + ) assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - user_id = obj_or_id(user, "user", (User,)) - response = self._requester.request( - 'GET', - 'sections/{}/assignments/{}/submissions/{}'.format(self.id, assignment_id, user_id), - _kwargs=combine_kwargs(**kwargs) - ) - response_json = response.json() - response_json.update(section_id=self.id) + assignment = Assignment(self._requester, { + 'course_id': self.course_id, + 'section_id': self.id, + 'id': assignment_id + }) - return Submission(self._requester, response_json) + return assignment.get_submission(user, **kwargs) def update_submission(self, assignment, user, **kwargs): """ Comment on and/or update the grading for a student's assignment submission. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.edit` instead. + :calls: `PUT /api/v1/sections/:section_id/assignments/:assignment_id/submissions/:user_id \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.update>`_ @@ -239,29 +257,31 @@ class Section(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User - assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) - user_id = obj_or_id(user, "user", (User,)) - - response = self._requester.request( - 'PUT', - 'sections/{}/assignments/{}/submissions/{}'.format(self.id, assignment_id, user_id), - _kwargs=combine_kwargs(**kwargs) + warnings.warn( + 'Section.update_submission() is deprecated and will be removed ' + 'in the future. Use Submission.edit() instead.', + DeprecationWarning ) - submission = self.get_submission(assignment_id, user_id) - - response_json = response.json() - response_json.update(section_id=self.id) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) + user_id = obj_or_id(user, "user", (User,)) - if 'submission_type' in response_json: - super(Submission, submission).set_attributes(response_json) + submission = Submission(self._requester, { + 'course_id': self.course_id, + 'assignment_id': assignment_id, + 'user_id': user_id + }) - return Submission(self._requester, response_json) + return submission.edit(**kwargs) - def mark_submission_as_read(self, assignment, user): + def mark_submission_as_read(self, assignment, user, **kwargs): """ Mark submission as read. No request fields are necessary. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.mark_read` instead. + :calls: `PUT /api/v1/sections/:section_id/assignments/:assignment_id/submissions/:user_id/read \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_read>`_ @@ -276,23 +296,30 @@ class Section(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User + warnings.warn( + 'Section.mark_submission_as_read() is deprecated and will be ' + 'removed in the future. Use Submission.mark_read() instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) user_id = obj_or_id(user, "user", (User,)) - response = self._requester.request( - 'PUT', - 'sections/{}/assignments/{}/submissions/{}/read'.format( - self.id, - assignment_id, - user_id, - ) - ) - return response.status_code == 204 + submission = Submission(self._requester, { + 'course_id': self.course_id, + 'assignment_id': assignment_id, + 'user_id': user_id + }) + return submission.mark_read(**kwargs) - def mark_submission_as_unread(self, assignment, user): + def mark_submission_as_unread(self, assignment, user, **kwargs): """ Mark submission as unread. No request fields are necessary. + .. warning:: + .. deprecated:: 0.9.0 + Use :func:`canvasapi.submission.Submission.mark_unread` instead. + :calls: `DELETE /api/v1/sections/:section_id/assignments/:assignment_id/submissions/:user_id/read \ <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_unread>`_ @@ -307,15 +334,18 @@ class Section(CanvasObject): from canvasapi.assignment import Assignment from canvasapi.user import User + warnings.warn( + 'Section.mark_submission_as_unread() is deprecated and will be ' + 'removed in the future. Use Submission.mark_unread() instead.', + DeprecationWarning + ) + assignment_id = obj_or_id(assignment, "assignment", (Assignment,)) user_id = obj_or_id(user, "user", (User,)) - response = self._requester.request( - 'DELETE', - 'sections/{}/assignments/{}/submissions/{}/read'.format( - self.id, - assignment_id, - user_id, - ), - ) - return response.status_code == 204 + submission = Submission(self._requester, { + 'course_id': self.course_id, + 'assignment_id': assignment_id, + 'user_id': user_id + }) + return submission.mark_unread(**kwargs) diff --git a/canvasapi/submission.py b/canvasapi/submission.py index eccc86c..eb15e84 100644 --- a/canvasapi/submission.py +++ b/canvasapi/submission.py @@ -4,13 +4,80 @@ from six import python_2_unicode_compatible from canvasapi.canvas_object import CanvasObject from canvasapi.upload import Uploader +from canvasapi.util import combine_kwargs @python_2_unicode_compatible class Submission(CanvasObject): def __str__(self): - return "{}".format(self.id) + return '{}-{}'.format(self.assignment_id, self.user_id) + + def edit(self, **kwargs): + """ + Comment on and/or update the grading for a student's assignment submission. + + :calls: `PUT /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.update>`_ + + :rtype: :class:`canvasapi.submission.Submission` + """ + response = self._requester.request( + 'PUT', + 'courses/{}/assignments/{}/submissions/{}'.format( + self.course_id, + self.assignment_id, + self.user_id + ), + _kwargs=combine_kwargs(**kwargs) + ) + response_json = response.json() + response_json.update(course_id=self.course_id) + + super(Submission, self).set_attributes(response_json) + return self + + def mark_read(self, **kwargs): + """ + Mark submission as read. No request fields are necessary. + + :calls: `PUT + /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id/read \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_read>`_ + + :returns: True if successfully marked as read. + :rtype: bool + """ + response = self._requester.request( + 'PUT', + 'courses/{}/assignments/{}/submissions/{}/read'.format( + self.course_id, + self.assignment_id, + self.user_id + ) + ) + return response.status_code == 204 + + def mark_unread(self, **kwargs): + """ + Mark submission as unread. No request fields are necessary. + + :calls: `DELETE + /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id/read \ + <https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.mark_submission_unread>`_ + + :returns: True if successfully marked as unread. + :rtype: bool + """ + response = self._requester.request( + 'DELETE', + 'courses/{}/assignments/{}/submissions/{}/read'.format( + self.course_id, + self.assignment_id, + self.user_id + ) + ) + return response.status_code == 204 def upload_comment(self, file, **kwargs): """ @@ -26,11 +93,6 @@ class Submission(CanvasObject): and the JSON response from the API. :rtype: tuple """ - from canvasapi.course import Course - - if not hasattr(self, 'course_id'): - raise ValueError('Must use a course to upload file comments.') - response = Uploader( self._requester, 'courses/{}/assignments/{}/submissions/{}/comments/files'.format( @@ -43,12 +105,7 @@ class Submission(CanvasObject): ).start() if response[0]: - # 1337 h@x: remove once lazy loading is completed - course = Course(self._requester, {'id': self.course_id}) - - course.update_submission( - self.assignment_id, - self.user_id, + self.edit( comment={ 'file_ids': [response[1]['id']] } diff --git a/setup.py b/setup.py index ab95a2a..b25f5cb 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,6 @@ setup( 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6',
Submission methods on wrong class Was working with assignment submissions recently and discovered that many of the methods relating to submissions are in the wrong class. For example, `list_submissions`appears in the `Course` and `Section` classes, but should probably just be in the `Assignment` class. Need to move them to the appropriate class.
ucfopen/canvasapi
diff --git a/tests/fixtures/assignment.json b/tests/fixtures/assignment.json index 564854e..47f38b6 100644 --- a/tests/fixtures/assignment.json +++ b/tests/fixtures/assignment.json @@ -11,9 +11,9 @@ }, "delete_assignment": { "method": "DELETE", - "endpoint": "courses/1/assignments/5", + "endpoint": "courses/1/assignments/1", "data": { - "id": 5, + "id": 1, "course_id": 1, "name": "Assignment 1", "description": "Delete this assignment" @@ -32,9 +32,9 @@ }, "edit_assignment": { "method": "PUT", - "endpoint": "courses/1/assignments/5", + "endpoint": "courses/1/assignments/1", "data": { - "id": 5, + "id": 1, "course_id": 1, "name": "New Name", "description": "Edit this assignment" @@ -72,5 +72,17 @@ } ], "status_code": 200 + }, + "submit": { + "method": "POST", + "endpoint": "courses/1/assignments/1/submissions", + "data": { + "id": 1, + "assignment_id": 1, + "user_id": 1, + "html_url": "http://example.com/courses/1/assignments/1/submissions/1", + "submission_type": "online_upload" + }, + "status_code": 200 } } diff --git a/tests/fixtures/course.json b/tests/fixtures/course.json index fa235c6..59b2fe9 100644 --- a/tests/fixtures/course.json +++ b/tests/fixtures/course.json @@ -41,7 +41,7 @@ "method": "POST", "endpoint": "courses/1/assignments", "data": { - "id": 5, + "id": 1, "course_id": 1, "name": "Newly Created Assignment", "description": "Do this assignment" @@ -119,17 +119,6 @@ "status_code": 200 }, "get_assignment_by_id": { - "method": "GET", - "endpoint": "courses/1/assignments/5", - "data": { - "id": 5, - "course_id": 1, - "name": "Assignment 1", - "description": "Do this assignment" - }, - "status_code": 200 - }, - "get_assignment_by_id_2": { "method": "GET", "endpoint": "courses/1/assignments/1", "data": { @@ -1262,72 +1251,6 @@ ], "status_code": 200 }, - "submit_assignment": { - "method": "POST", - "endpoint": "courses/1/assignments/1/submissions", - "data": { - "id": 1, - "assignment_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/1/submissions/1", - "submission_type": "online_upload" - }, - "status_code": 200 - }, - "submit_assignment_2": { - "method": "POST", - "endpoint": "courses/1/assignments/5/submissions", - "data": { - "id": 1, - "assignment_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/5/submissions/1", - "submission_type": "online_upload" - }, - "status_code": 200 - }, - "list_submissions": { - "method": "GET", - "endpoint": "courses/1/assignments/1/submissions", - "data": [ - { - "id": 1, - "assignments_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/1/submissions/1", - "submission_type": "online_upload" - }, - { - "id": 2, - "assignments_id": 1, - "user_id": 2, - "html_url": "http://example.com/courses/1/assignments/1/submissions/2", - "submission_type": "online_upload" - } - ], - "status_code": 200 - }, - "list_submissions_2": { - "method": "GET", - "endpoint": "courses/1/assignments/5/submissions", - "data": [ - { - "id": 1, - "assignments_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/5/submissions/1", - "submission_type": "online_upload" - }, - { - "id": 2, - "assignments_id": 1, - "user_id": 2, - "html_url": "http://example.com/courses/1/assignments/5/submissions/2", - "submission_type": "online_upload" - } - ], - "status_code": 200 - }, "list_multiple_submissions": { "method": "GET", "endpoint": "courses/1/students/submissions", @@ -1349,30 +1272,6 @@ ], "status_code": 200 }, - "get_submission": { - "method": "GET", - "endpoint": "courses/1/assignments/1/submissions/1", - "data": { - "id": 1, - "assignments_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/1/submissions/1", - "submission_type": "online_upload" - }, - "status_code": 200 - }, - "get_submission_2": { - "method": "GET", - "endpoint": "courses/1/assignments/5/submissions/1", - "data": { - "id": 1, - "assignments_id": 1, - "user_id": 1, - "html_url": "http://example.com/courses/1/assignments/5/submissions/1", - "submission_type": "online_upload" - }, - "status_code": 200 - }, "list_gradeable_students": { "method": "GET", "endpoint": "courses/1/assignments/1/gradeable_students", diff --git a/tests/fixtures/submission.json b/tests/fixtures/submission.json index bee0e6f..1d18224 100644 --- a/tests/fixtures/submission.json +++ b/tests/fixtures/submission.json @@ -23,6 +23,27 @@ }, "status_code": 200 }, + "list_submissions": { + "method": "GET", + "endpoint": "courses/1/assignments/1/submissions", + "data": [ + { + "id": 1, + "assignments_id": 1, + "user_id": 1, + "html_url": "http://example.com/courses/1/assignments/1/submissions/1", + "submission_type": "online_upload" + }, + { + "id": 2, + "assignments_id": 1, + "user_id": 2, + "html_url": "http://example.com/courses/1/assignments/1/submissions/2", + "submission_type": "online_upload" + } + ], + "status_code": 200 + }, "upload_comment": { "method": "POST", "endpoint": "courses/1/assignments/1/submissions/1/comments/files", @@ -42,11 +63,16 @@ "id": 1 } }, - "update_submission": { - "method": "ANY", + "edit": { + "method": "PUT", "endpoint": "courses/1/assignments/1/submissions/1", "data": { - "id": 1 + "id": 1, + "assignments_id": 1, + "user_id": 1, + "html_url": "http://example.com/courses/1/assignments/1/submissions/1", + "submission_type": "online_upload", + "excused": true }, "status_code": 200 } diff --git a/tests/test_assignment.py b/tests/test_assignment.py index b69cf86..e0adc34 100644 --- a/tests/test_assignment.py +++ b/tests/test_assignment.py @@ -5,6 +5,9 @@ import requests_mock from canvasapi import Canvas from canvasapi.assignment import Assignment, AssignmentGroup +from canvasapi.exceptions import RequiredFieldMissing +from canvasapi.submission import Submission +from canvasapi.user import UserDisplay from tests import settings from tests.util import register_uris @@ -19,7 +22,15 @@ class TestAssignment(unittest.TestCase): register_uris({'course': ['get_by_id', 'get_assignment_by_id']}, m) self.course = self.canvas.get_course(1) - self.assignment = self.course.get_assignment(5) + self.assignment = self.course.get_assignment(1) + + # delete() + def test_delete_assignments(self, m): + register_uris({'assignment': ['delete_assignment']}, m) + + deleted_assignment = self.assignment.delete() + + self.assertIsInstance(deleted_assignment, Assignment) # edit() def test_edit_assignment(self, m): @@ -32,13 +43,58 @@ class TestAssignment(unittest.TestCase): self.assertTrue(hasattr(edited_assignment, 'name')) self.assertEqual(edited_assignment.name, name) - # delete() - def test_delete_assignments(self, m): - register_uris({'assignment': ['delete_assignment']}, m) + # get_gradeable_students() + def test_get_gradeable_students(self, m): + register_uris({'course': ['list_gradeable_students']}, m) - deleted_assignment = self.assignment.delete() + students = self.assignment.get_gradeable_students() + student_list = [student for student in students] - self.assertIsInstance(deleted_assignment, Assignment) + self.assertEqual(len(student_list), 2) + self.assertIsInstance(student_list[0], UserDisplay) + + # get_submission() + def test_get_submission(self, m): + register_uris({ + 'submission': ['get_by_id_course'], + 'user': ['get_by_id'] + }, m) + + user_id = 1 + submission_by_id = self.assignment.get_submission(user_id) + self.assertIsInstance(submission_by_id, Submission) + self.assertTrue(hasattr(submission_by_id, 'submission_type')) + + user = self.canvas.get_user(user_id) + submission_by_obj = self.assignment.get_submission(user) + self.assertIsInstance(submission_by_obj, Submission) + self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + + # get_submissions() + def test_get_submissions(self, m): + register_uris({'submission': ['list_submissions']}, m) + + submissions = self.assignment.get_submissions() + submission_list_by_id = [submission for submission in submissions] + + self.assertEqual(len(submission_list_by_id), 2) + self.assertIsInstance(submission_list_by_id[0], Submission) + + # submit() + def test_submit(self, m): + register_uris({'assignment': ['submit']}, m) + + sub_type = "online_upload" + sub_dict = {'submission_type': sub_type} + submission = self.assignment.submit(sub_dict) + + self.assertIsInstance(submission, Submission) + self.assertTrue(hasattr(submission, 'submission_type')) + self.assertEqual(submission.submission_type, sub_type) + + def test_submit_fail(self, m): + with self.assertRaises(RequiredFieldMissing): + self.assignment.submit({}) # __str__() def test__str__(self, m): diff --git a/tests/test_course.py b/tests/test_course.py index 04aa27b..b9fda7f 100644 --- a/tests/test_course.py +++ b/tests/test_course.py @@ -51,7 +51,7 @@ class TestCourse(unittest.TestCase): self.page = self.course.get_page('my-url') self.quiz = self.course.get_quiz(1) self.user = self.canvas.get_user(1) - self.assignment = self.course.get_assignment('5') + self.assignment = self.course.get_assignment(1) # __str__() def test__str__(self, m): @@ -333,7 +333,7 @@ class TestCourse(unittest.TestCase): self.assertIsInstance(assignment, Assignment) self.assertTrue(hasattr(assignment, 'name')) self.assertEqual(assignment.name, name) - self.assertEqual(assignment.id, 5) + self.assertEqual(assignment.id, 1) def test_create_assignment_fail(self, m): with self.assertRaises(RequiredFieldMissing): @@ -343,7 +343,7 @@ class TestCourse(unittest.TestCase): def test_get_assignment(self, m): register_uris({'course': ['get_assignment_by_id']}, m) - assignment_by_id = self.course.get_assignment('5') + assignment_by_id = self.course.get_assignment(1) self.assertIsInstance(assignment_by_id, Assignment) self.assertTrue(hasattr(assignment_by_id, 'name')) @@ -446,13 +446,15 @@ class TestCourse(unittest.TestCase): def test_list_sections(self, m): register_uris({'course': ['get_sections', 'get_sections_p2']}, m) - with warnings.catch_warnings(record=True) as w: + with warnings.catch_warnings(record=True) as warning_list: sections = self.course.list_sections() section_list = [sect for sect in sections] self.assertEqual(len(section_list), 4) self.assertIsInstance(section_list[0], Section) - self.assertTrue(issubclass(w[0].category, DeprecationWarning)) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) def test_create_course_section(self, m): register_uris({'course': ['create_section']}, m) @@ -714,43 +716,63 @@ class TestCourse(unittest.TestCase): # submit_assignment() def test_submit_assignment(self, m): - register_uris({'course': ['submit_assignment', 'submit_assignment_2']}, m) + register_uris({'assignment': ['submit']}, m) - assignment_id = 1 - sub_type = "online_upload" - sub_dict = {'submission_type': sub_type} - submission_by_id = self.course.submit_assignment(assignment_id, sub_dict) + with warnings.catch_warnings(record=True) as warning_list: + assignment_id = 1 + sub_type = "online_upload" + sub_dict = {'submission_type': sub_type} + submission_by_id = self.course.submit_assignment(assignment_id, sub_dict) - self.assertIsInstance(submission_by_id, Submission) - self.assertTrue(hasattr(submission_by_id, 'submission_type')) - self.assertEqual(submission_by_id.submission_type, sub_type) + self.assertIsInstance(submission_by_id, Submission) + self.assertTrue(hasattr(submission_by_id, 'submission_type')) + self.assertEqual(submission_by_id.submission_type, sub_type) - submission_by_obj = self.course.submit_assignment(self.assignment, sub_dict) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) - self.assertIsInstance(submission_by_obj, Submission) - self.assertTrue(hasattr(submission_by_obj, 'submission_type')) - self.assertEqual(submission_by_obj.submission_type, sub_type) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.course.submit_assignment(self.assignment, sub_dict) - def test_subit_assignment_fail(self, m): - with self.assertRaises(RequiredFieldMissing): - self.course.submit_assignment(1, {}) + self.assertIsInstance(submission_by_obj, Submission) + self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + self.assertEqual(submission_by_obj.submission_type, sub_type) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) + + def test_submit_assignment_fail(self, m): + with warnings.catch_warnings(record=True) as warning_list: + with self.assertRaises(RequiredFieldMissing): + self.course.submit_assignment(1, {}) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_submissions() def test_list_submissions(self, m): - register_uris({'course': ['list_submissions', 'list_submissions_2']}, m) + register_uris({'submission': ['list_submissions']}, m) - assignment_id = 1 - submissions_by_id = self.course.list_submissions(assignment_id) - submission_list_by_id = [submission for submission in submissions_by_id] + with warnings.catch_warnings(record=True) as warning_list: + assignment_id = 1 + submissions_by_id = self.course.list_submissions(assignment_id) + submission_list_by_id = [submission for submission in submissions_by_id] + + self.assertEqual(len(submission_list_by_id), 2) + self.assertIsInstance(submission_list_by_id[0], Submission) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) - self.assertEqual(len(submission_list_by_id), 2) - self.assertIsInstance(submission_list_by_id[0], Submission) + with warnings.catch_warnings(record=True) as warning_list: + submissions_by_obj = self.course.list_submissions(self.assignment) + submission_list_by_obj = [submission for submission in submissions_by_obj] - submissions_by_obj = self.course.list_submissions(self.assignment) - submission_list_by_obj = [submission for submission in submissions_by_obj] + self.assertEqual(len(submission_list_by_obj), 2) + self.assertIsInstance(submission_list_by_obj[0], Submission) - self.assertEqual(len(submission_list_by_obj), 2) - self.assertIsInstance(submission_list_by_obj[0], Submission) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_multiple_submission() def test_list_multiple_submissions(self, m): @@ -783,92 +805,139 @@ class TestCourse(unittest.TestCase): # get_submission() def test_get_submission(self, m): - register_uris({'course': ['get_assignment_by_id_2', 'get_submission']}, m) + register_uris({ + 'course': ['get_assignment_by_id'], + 'submission': ['get_by_id_course'] + }, m) assignment_for_id = 1 user_id = 1 - submission_by_id = self.course.get_submission(assignment_for_id, user_id) - self.assertIsInstance(submission_by_id, Submission) - self.assertTrue(hasattr(submission_by_id, 'submission_type')) - assignment_for_obj = self.course.get_assignment(1) - submission_by_obj = self.course.get_submission(assignment_for_obj, self.user) - self.assertIsInstance(submission_by_obj, Submission) - self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.course.get_submission(assignment_for_id, user_id) + self.assertIsInstance(submission_by_id, Submission) + self.assertTrue(hasattr(submission_by_id, 'submission_type')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) + + with warnings.catch_warnings(record=True) as warning_list: + assignment_for_obj = self.course.get_assignment(1) + submission_by_obj = self.course.get_submission(assignment_for_obj, self.user) + self.assertIsInstance(submission_by_obj, Submission) + self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # update_submission() def test_update_submission(self, m): - register_uris( - { - 'course': [ - 'get_assignment_by_id_2', - 'update_submission', - 'get_submission' - ] - }, m) + register_uris({ + 'course': ['get_assignment_by_id'], + 'submission': ['edit', 'get_by_id_course'] + }, m) assignment_for_id = 1 user_id = 1 - submission = self.course.update_submission( - assignment_for_id, - user_id, - submission={'excuse': True} - ) - self.assertIsInstance(submission, Submission) - self.assertTrue(hasattr(submission, 'excused')) + with warnings.catch_warnings(record=True) as warning_list: + submission = self.course.update_submission( + assignment_for_id, + user_id, + submission={'excuse': True} + ) + self.assertIsInstance(submission, Submission) + self.assertTrue(hasattr(submission, 'excused')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) assignment_for_obj = self.course.get_assignment(1) - submission = self.course.update_submission( - assignment_for_obj, - self.user, - submission={'excuse': True} - ) - self.assertIsInstance(submission, Submission) - self.assertTrue(hasattr(submission, 'excused')) + with warnings.catch_warnings(record=True) as warning_list: + submission = self.course.update_submission( + assignment_for_obj, + self.user, + submission={'excuse': True} + ) + self.assertIsInstance(submission, Submission) + self.assertTrue(hasattr(submission, 'excused')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_gradeable_students() def test_list_gradeable_students(self, m): - register_uris({'course': ['get_assignment_by_id_2', 'list_gradeable_students']}, m) + register_uris({'course': ['get_assignment_by_id', 'list_gradeable_students']}, m) assignment_for_id = 1 - students_by_id = self.course.list_gradeable_students(assignment_for_id) - student_list_by_id = [student for student in students_by_id] + with warnings.catch_warnings(record=True) as warning_list: + students_by_id = self.course.list_gradeable_students(assignment_for_id) + student_list_by_id = [student for student in students_by_id] - self.assertEqual(len(student_list_by_id), 2) - self.assertIsInstance(student_list_by_id[0], UserDisplay) + self.assertEqual(len(student_list_by_id), 2) + self.assertIsInstance(student_list_by_id[0], UserDisplay) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) assignment_for_obj = self.course.get_assignment(1) - students_by_id = self.course.list_gradeable_students(assignment_for_obj) - student_list_by_id = [student for student in students_by_id] + with warnings.catch_warnings(record=True) as warning_list: + students_by_id = self.course.list_gradeable_students(assignment_for_obj) + student_list_by_id = [student for student in students_by_id] - self.assertEqual(len(student_list_by_id), 2) - self.assertIsInstance(student_list_by_id[0], UserDisplay) + self.assertEqual(len(student_list_by_id), 2) + self.assertIsInstance(student_list_by_id[0], UserDisplay) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # mark_submission_as_read def test_mark_submission_as_read(self, m): - register_uris({'course': ['get_assignment_by_id_2', 'mark_submission_as_read']}, m) + register_uris({'course': ['get_assignment_by_id', 'mark_submission_as_read']}, m) assignment_for_id = 1 user_for_id = 1 - submission_by_id = self.course.mark_submission_as_read(assignment_for_id, user_for_id) - self.assertTrue(submission_by_id) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.course.mark_submission_as_read(assignment_for_id, user_for_id) + self.assertTrue(submission_by_id) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) assignment_for_obj = self.course.get_assignment(1) - submission_by_obj = self.course.mark_submission_as_read(assignment_for_obj, self.user) - self.assertTrue(submission_by_obj) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.course.mark_submission_as_read(assignment_for_obj, self.user) + self.assertTrue(submission_by_obj) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # mark_submission_as_unread def test_mark_submission_as_unread(self, m): - register_uris({'course': ['get_assignment_by_id_2', 'mark_submission_as_unread']}, m) + register_uris({'course': ['get_assignment_by_id', 'mark_submission_as_unread']}, m) assignment_for_id = 1 user_for_id = 1 - submission_by_id = self.course.mark_submission_as_unread(assignment_for_id, user_for_id) - self.assertTrue(submission_by_id) + + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.course.mark_submission_as_unread( + assignment_for_id, + user_for_id + ) + self.assertTrue(submission_by_id) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) assignment_for_obj = self.course.get_assignment(1) - submission_by_obj = self.course.mark_submission_as_unread(assignment_for_obj, self.user) - self.assertTrue(submission_by_obj) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.course.mark_submission_as_unread( + assignment_for_obj, + self.user + ) + self.assertTrue(submission_by_obj) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_external_feeds() def test_list_external_feeds(self, m): diff --git a/tests/test_section.py b/tests/test_section.py index b5c24bf..0711226 100644 --- a/tests/test_section.py +++ b/tests/test_section.py @@ -79,7 +79,7 @@ class TestSection(unittest.TestCase): def test_submit_assignment(self, m): register_uris( { - 'section': ['submit_assignment'], + 'assignment': ['submit'], 'submission': ['get_by_id_section'], 'user': ['get_by_id', 'get_user_assignments'] }, m) @@ -87,49 +87,67 @@ class TestSection(unittest.TestCase): assignment_id = 1 sub_type = "online_upload" sub_dict = {'submission_type': sub_type} - assignment_by_id = self.section.submit_assignment(assignment_id, sub_dict) + with warnings.catch_warnings(record=True) as warning_list: + assignment_by_id = self.section.submit_assignment(assignment_id, sub_dict) + + self.assertIsInstance(assignment_by_id, Submission) + self.assertTrue(hasattr(assignment_by_id, 'submission_type')) + self.assertEqual(assignment_by_id.submission_type, sub_type) - self.assertIsInstance(assignment_by_id, Submission) - self.assertTrue(hasattr(assignment_by_id, 'submission_type')) - self.assertEqual(assignment_by_id.submission_type, sub_type) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) assignments_obj = user_obj.get_assignments(1) sub_type = "online_upload" sub_dict = {'submission_type': sub_type} - assignment_by_obj = self.section.submit_assignment(assignments_obj[0], sub_dict) + with warnings.catch_warnings(record=True) as warning_list: + assignment_by_obj = self.section.submit_assignment(assignments_obj[0], sub_dict) + + self.assertIsInstance(assignment_by_obj, Submission) + self.assertTrue(hasattr(assignment_by_obj, 'submission_type')) + self.assertEqual(assignment_by_obj.submission_type, sub_type) - self.assertIsInstance(assignment_by_obj, Submission) - self.assertTrue(hasattr(assignment_by_obj, 'submission_type')) - self.assertEqual(assignment_by_obj.submission_type, sub_type) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) def test_subit_assignment_fail(self, m): - with self.assertRaises(RequiredFieldMissing): - self.section.submit_assignment(1, {}) + with warnings.catch_warnings(record=True) as warning_list: + with self.assertRaises(RequiredFieldMissing): + self.section.submit_assignment(1, {}) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_submissions() def test_list_submissions(self, m): register_uris( { - 'section': ['list_submissions'], - 'submission': ['get_by_id_section'], + 'submission': ['list_submissions'], 'user': ['get_by_id', 'get_user_assignments'] }, m) assignment_id = 1 - submissions_by_id = self.section.list_submissions(assignment_id) - submission_list_by_id = [submission for submission in submissions_by_id] + with warnings.catch_warnings(record=True) as warning_list: + submissions_by_id = self.section.list_submissions(assignment_id) + submission_list_by_id = [submission for submission in submissions_by_id] + + self.assertEqual(len(submission_list_by_id), 2) + self.assertIsInstance(submission_list_by_id[0], Submission) - self.assertEqual(len(submission_list_by_id), 2) - self.assertIsInstance(submission_list_by_id[0], Submission) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) assignments_obj = user_obj.get_assignments(1) - submissions_by_obj = self.section.list_submissions(assignments_obj[0]) - submission_list_by_obj = [submission for submission in submissions_by_obj] + with warnings.catch_warnings(record=True) as warning_list: + submissions_by_obj = self.section.list_submissions(assignments_obj[0]) + submission_list_by_obj = [submission for submission in submissions_by_obj] + + self.assertEqual(len(submission_list_by_obj), 2) + self.assertIsInstance(submission_list_by_obj[0], Submission) - self.assertEqual(len(submission_list_by_obj), 2) - self.assertIsInstance(submission_list_by_obj[0], Submission) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # list_multiple_submission() def test_list_multiple_submissions(self, m): @@ -164,93 +182,126 @@ class TestSection(unittest.TestCase): def test_get_submission(self, m): register_uris( { - 'section': ['get_submission'], - 'submission': ['get_by_id_section'], + 'submission': ['get_by_id_course'], 'user': ['get_by_id', 'get_user_assignments'] }, m) assignment_id = 1 user_id = 1 - submission_by_id = self.section.get_submission(assignment_id, user_id) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.section.get_submission(assignment_id, user_id) - self.assertIsInstance(submission_by_id, Submission) - self.assertTrue(hasattr(submission_by_id, 'submission_type')) + self.assertIsInstance(submission_by_id, Submission) + self.assertTrue(hasattr(submission_by_id, 'submission_type')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) assignments_obj = user_obj.get_assignments(1) - submission_by_obj = self.section.get_submission(assignments_obj[0], user_obj) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.section.get_submission(assignments_obj[0], user_obj) - self.assertIsInstance(submission_by_obj, Submission) - self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + self.assertIsInstance(submission_by_obj, Submission) + self.assertTrue(hasattr(submission_by_obj, 'submission_type')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # update_submission() def test_update_submission(self, m): register_uris( { - 'section': ['update_submission', 'get_submission'], - 'submission': ['get_by_id_section'], + 'submission': ['get_by_id_section', 'edit'], 'user': ['get_by_id', 'get_user_assignments'] }, m) assignment_id = 1 user_id = 1 - submission_by_id = self.section.update_submission( - assignment_id, - user_id, - submission={'excuse': True} - ) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.section.update_submission( + assignment_id, + user_id, + submission={'excuse': True} + ) + + self.assertIsInstance(submission_by_id, Submission) + self.assertTrue(hasattr(submission_by_id, 'excused')) - self.assertIsInstance(submission_by_id, Submission) - self.assertTrue(hasattr(submission_by_id, 'excused')) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) assignments_obj = user_obj.get_assignments(1) - submission_by_obj = self.section.update_submission( - assignments_obj[0], - user_obj, - submission={'excuse': True} - ) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.section.update_submission( + assignments_obj[0], + user_obj, + submission={'excuse': True} + ) - self.assertIsInstance(submission_by_obj, Submission) - self.assertTrue(hasattr(submission_by_obj, 'excused')) + self.assertIsInstance(submission_by_obj, Submission) + self.assertTrue(hasattr(submission_by_obj, 'excused')) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # mark_submission_as_read def test_mark_submission_as_read(self, m): register_uris( { - 'section': ['mark_submission_as_read'], + 'course': ['mark_submission_as_read'], 'submission': ['get_by_id_section'], 'user': ['get_by_id', 'get_user_assignments'] }, m) submission_id = 1 user_id = 1 - submission_by_id = self.section.mark_submission_as_read(submission_id, user_id) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.section.mark_submission_as_read(submission_id, user_id) - self.assertTrue(submission_by_id) + self.assertTrue(submission_by_id) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) - assignments_obj = user_obj.get_assignments(1) - submission_by_obj = self.section.mark_submission_as_read(assignments_obj[0], user_obj) + with warnings.catch_warnings(record=True) as warning_list: + assignments_obj = user_obj.get_assignments(1) + submission_by_obj = self.section.mark_submission_as_read(assignments_obj[0], user_obj) + + self.assertTrue(submission_by_obj) - self.assertTrue(submission_by_obj) + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) # mark_submission_as_unread def test_mark_submission_as_unread(self, m): register_uris( { - 'section': ['mark_submission_as_unread'], + 'course': ['mark_submission_as_unread'], 'submission': ['get_by_id_section'], 'user': ['get_by_id', 'get_user_assignments'] }, m) user_id = 1 assignment_id = 1 - submission_by_id = self.section.mark_submission_as_unread(assignment_id, user_id) - self.assertTrue(submission_by_id) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_id = self.section.mark_submission_as_unread(assignment_id, user_id) + self.assertTrue(submission_by_id) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) user_obj = self.canvas.get_user(1) assignments_obj = user_obj.get_assignments(1) - submission_by_obj = self.section.mark_submission_as_unread(assignments_obj[0], user_obj) - self.assertTrue(submission_by_obj) + with warnings.catch_warnings(record=True) as warning_list: + submission_by_obj = self.section.mark_submission_as_unread( + assignments_obj[0], + user_obj + ) + self.assertTrue(submission_by_obj) + + self.assertEqual(len(warning_list), 1) + self.assertEqual(warning_list[-1].category, DeprecationWarning) diff --git a/tests/test_submission.py b/tests/test_submission.py index ed1c693..354e04a 100644 --- a/tests/test_submission.py +++ b/tests/test_submission.py @@ -1,10 +1,12 @@ from __future__ import absolute_import, division, print_function, unicode_literals import unittest import uuid +import warnings import requests_mock from canvasapi import Canvas +from canvasapi.submission import Submission from tests import settings from tests.util import cleanup_file, register_uris @@ -17,27 +19,47 @@ class TestSubmission(unittest.TestCase): with requests_mock.Mocker() as m: register_uris({ - 'course': ['get_by_id'], + 'course': ['get_by_id', 'get_assignment_by_id'], 'section': ['get_by_id'], 'submission': ['get_by_id_course', 'get_by_id_section'] }, m) - self.course = self.canvas.get_course(1) - self.submission_course = self.course.get_submission(1, 1) - self.section = self.canvas.get_section(1) - self.submission_section = self.section.get_submission(1, 1) + with warnings.catch_warnings(record=True) as warning_list: + self.course = self.canvas.get_course(1) + self.submission_course = self.course.get_submission(1, 1) + + self.section = self.canvas.get_section(1) + self.submission_section = self.section.get_submission(1, 1) + + self.assertEqual(len(warning_list), 2) + self.assertEqual(warning_list[0].category, DeprecationWarning) + self.assertEqual(warning_list[1].category, DeprecationWarning) # __str__() def test__str__(self, m): string = str(self.submission_course) self.assertIsInstance(string, str) + # edit() + def test_edit(self, m): + register_uris({ + 'submission': ['edit'] + }, m) + + self.assertFalse(hasattr(self.submission_course, 'excused')) + + self.submission_course.edit(submission={'excuse': True}) + + self.assertIsInstance(self.submission_course, Submission) + self.assertTrue(hasattr(self.submission_course, 'excused')) + self.assertTrue(self.submission_course.excused) + # upload_comment() def test_upload_comment(self, m): register_uris({'submission': [ 'upload_comment', 'upload_comment_final', - 'update_submission', + 'edit', ]}, m) filename = 'testfile_submission_{}'.format(uuid.uuid4().hex) @@ -53,6 +75,20 @@ class TestSubmission(unittest.TestCase): cleanup_file(filename) def test_upload_comment_section(self, m): - # Sections do not support uploading file comments - with self.assertRaises(ValueError): - self.submission_section.upload_comment('fakefilename.txt') + register_uris({'submission': [ + 'upload_comment', + 'upload_comment_final', + 'edit', + ]}, m) + + filename = 'testfile_submission_{}'.format(uuid.uuid4().hex) + + try: + with open(filename, 'w+') as file: + response = self.submission_section.upload_comment(file) + + self.assertTrue(response[0]) + self.assertIsInstance(response[1], dict) + self.assertIn('url', response[1]) + finally: + cleanup_file(filename) diff --git a/tests/test_user.py b/tests/test_user.py index c43b98f..eb5fbd8 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -345,11 +345,12 @@ class TestUserDisplay(unittest.TestCase): with requests_mock.Mocker() as m: register_uris({ - 'course': ['get_by_id', 'list_gradeable_students'] + 'course': ['get_by_id', 'get_assignment_by_id', 'list_gradeable_students'] }, m) self.course = self.canvas.get_course(1) - self.userDisplays = self.course.list_gradeable_students(1) + self.assignment = self.course.get_assignment(1) + self.userDisplays = self.assignment.get_gradeable_students() self.userDisplayList = [ud for ud in self.userDisplays] self.userDisplay = self.userDisplayList[0]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 7 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "flake8", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt", "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 -e git+https://github.com/ucfopen/canvasapi.git@c69f6a9801ac275fdad46d97fa95c77c25d6f953#egg=canvasapi certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 docutils==0.17.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytz==2025.2 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: canvasapi channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - docutils==0.17.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytz==2025.2 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/canvasapi
[ "tests/test_assignment.py::TestAssignment::test_get_gradeable_students", "tests/test_assignment.py::TestAssignment::test_get_submission", "tests/test_assignment.py::TestAssignment::test_get_submissions", "tests/test_assignment.py::TestAssignment::test_submit", "tests/test_assignment.py::TestAssignment::test_submit_fail", "tests/test_user.py::TestUserDisplay::test__str__" ]
[ "tests/test_course.py::TestCourse::test_get_submission", "tests/test_course.py::TestCourse::test_list_gradeable_students", "tests/test_course.py::TestCourse::test_list_sections", "tests/test_course.py::TestCourse::test_list_submissions", "tests/test_course.py::TestCourse::test_mark_submission_as_read", "tests/test_course.py::TestCourse::test_mark_submission_as_unread", "tests/test_course.py::TestCourse::test_submit_assignment", "tests/test_course.py::TestCourse::test_submit_assignment_fail", "tests/test_course.py::TestCourse::test_update_submission", "tests/test_section.py::TestSection::test_get_submission", "tests/test_section.py::TestSection::test_list_submissions", "tests/test_section.py::TestSection::test_mark_submission_as_read", "tests/test_section.py::TestSection::test_mark_submission_as_unread", "tests/test_section.py::TestSection::test_subit_assignment_fail", "tests/test_section.py::TestSection::test_submit_assignment", "tests/test_section.py::TestSection::test_update_submission", "tests/test_submission.py::TestSubmission::test__str__", "tests/test_submission.py::TestSubmission::test_edit", "tests/test_submission.py::TestSubmission::test_upload_comment", "tests/test_submission.py::TestSubmission::test_upload_comment_section" ]
[ "tests/test_assignment.py::TestAssignment::test__str__", "tests/test_assignment.py::TestAssignment::test_delete_assignments", "tests/test_assignment.py::TestAssignment::test_edit_assignment", "tests/test_assignment.py::TestAssignmentGroup::test__str__", "tests/test_assignment.py::TestAssignmentGroup::test_delete_assignment_group", "tests/test_assignment.py::TestAssignmentGroup::test_edit_assignment_group", "tests/test_course.py::TestCourse::test__str__", "tests/test_course.py::TestCourse::test_add_grading_standards", "tests/test_course.py::TestCourse::test_add_grading_standards_empty_list", "tests/test_course.py::TestCourse::test_add_grading_standards_missing_name_key", "tests/test_course.py::TestCourse::test_add_grading_standards_missing_value_key", "tests/test_course.py::TestCourse::test_add_grading_standards_non_dict_list", "tests/test_course.py::TestCourse::test_conclude", "tests/test_course.py::TestCourse::test_course_files", "tests/test_course.py::TestCourse::test_create_assignment", "tests/test_course.py::TestCourse::test_create_assignment_fail", "tests/test_course.py::TestCourse::test_create_assignment_group", "tests/test_course.py::TestCourse::test_create_course_section", "tests/test_course.py::TestCourse::test_create_discussion_topic", "tests/test_course.py::TestCourse::test_create_external_feed", "tests/test_course.py::TestCourse::test_create_external_tool", "tests/test_course.py::TestCourse::test_create_folder", "tests/test_course.py::TestCourse::test_create_group_category", "tests/test_course.py::TestCourse::test_create_module", "tests/test_course.py::TestCourse::test_create_module_fail", "tests/test_course.py::TestCourse::test_create_page", "tests/test_course.py::TestCourse::test_create_page_fail", "tests/test_course.py::TestCourse::test_create_quiz", "tests/test_course.py::TestCourse::test_create_quiz_fail", "tests/test_course.py::TestCourse::test_delete", "tests/test_course.py::TestCourse::test_delete_external_feed", "tests/test_course.py::TestCourse::test_edit_front_page", "tests/test_course.py::TestCourse::test_enroll_user", "tests/test_course.py::TestCourse::test_get_assignment", "tests/test_course.py::TestCourse::test_get_assignment_group", "tests/test_course.py::TestCourse::test_get_assignments", "tests/test_course.py::TestCourse::test_get_course_level_assignment_data", "tests/test_course.py::TestCourse::test_get_course_level_participation_data", "tests/test_course.py::TestCourse::test_get_course_level_student_summary_data", "tests/test_course.py::TestCourse::test_get_discussion_topic", "tests/test_course.py::TestCourse::test_get_discussion_topics", "tests/test_course.py::TestCourse::test_get_enrollments", "tests/test_course.py::TestCourse::test_get_external_tool", "tests/test_course.py::TestCourse::test_get_external_tools", "tests/test_course.py::TestCourse::test_get_file", "tests/test_course.py::TestCourse::test_get_folder", "tests/test_course.py::TestCourse::test_get_full_discussion_topic", "tests/test_course.py::TestCourse::test_get_grading_standards", "tests/test_course.py::TestCourse::test_get_module", "tests/test_course.py::TestCourse::test_get_modules", "tests/test_course.py::TestCourse::test_get_outcome_group", "tests/test_course.py::TestCourse::test_get_outcome_groups_in_context", "tests/test_course.py::TestCourse::test_get_outcome_links_in_context", "tests/test_course.py::TestCourse::test_get_outcome_result_rollups", "tests/test_course.py::TestCourse::test_get_outcome_results", "tests/test_course.py::TestCourse::test_get_page", "tests/test_course.py::TestCourse::test_get_pages", "tests/test_course.py::TestCourse::test_get_quiz", "tests/test_course.py::TestCourse::test_get_quiz_fail", "tests/test_course.py::TestCourse::test_get_quizzes", "tests/test_course.py::TestCourse::test_get_recent_students", "tests/test_course.py::TestCourse::test_get_root_outcome_group", "tests/test_course.py::TestCourse::test_get_rubric", "tests/test_course.py::TestCourse::test_get_section", "tests/test_course.py::TestCourse::test_get_sections", "tests/test_course.py::TestCourse::test_get_settings", "tests/test_course.py::TestCourse::test_get_single_grading_standard", "tests/test_course.py::TestCourse::test_get_user", "tests/test_course.py::TestCourse::test_get_user_id_type", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_assignment_data", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_messaging_data", "tests/test_course.py::TestCourse::test_get_user_in_a_course_level_participation_data", "tests/test_course.py::TestCourse::test_get_users", "tests/test_course.py::TestCourse::test_list_assignment_groups", "tests/test_course.py::TestCourse::test_list_external_feeds", "tests/test_course.py::TestCourse::test_list_folders", "tests/test_course.py::TestCourse::test_list_group_categories", "tests/test_course.py::TestCourse::test_list_groups", "tests/test_course.py::TestCourse::test_list_multiple_submissions", "tests/test_course.py::TestCourse::test_list_multiple_submissions_grouped_param", "tests/test_course.py::TestCourse::test_list_rubrics", "tests/test_course.py::TestCourse::test_list_tabs", "tests/test_course.py::TestCourse::test_preview_html", "tests/test_course.py::TestCourse::test_reorder_pinned_topics", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_comma_separated_string", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_invalid_input", "tests/test_course.py::TestCourse::test_reorder_pinned_topics_tuple", "tests/test_course.py::TestCourse::test_reset", "tests/test_course.py::TestCourse::test_show_front_page", "tests/test_course.py::TestCourse::test_update", "tests/test_course.py::TestCourse::test_update_settings", "tests/test_course.py::TestCourse::test_update_tab", "tests/test_course.py::TestCourse::test_upload", "tests/test_course.py::TestCourseNickname::test__str__", "tests/test_course.py::TestCourseNickname::test_remove", "tests/test_section.py::TestSection::test__str__", "tests/test_section.py::TestSection::test_cross_list_section", "tests/test_section.py::TestSection::test_decross_list_section", "tests/test_section.py::TestSection::test_delete", "tests/test_section.py::TestSection::test_edit", "tests/test_section.py::TestSection::test_get_enrollments", "tests/test_section.py::TestSection::test_list_multiple_submissions", "tests/test_section.py::TestSection::test_list_multiple_submissions_grouped_param", "tests/test_user.py::TestUser::test__str__", "tests/test_user.py::TestUser::test_add_observee", "tests/test_user.py::TestUser::test_add_observee_with_credentials", "tests/test_user.py::TestUser::test_create_folder", "tests/test_user.py::TestUser::test_edit", "tests/test_user.py::TestUser::test_get_avatars", "tests/test_user.py::TestUser::test_get_color", "tests/test_user.py::TestUser::test_get_colors", "tests/test_user.py::TestUser::test_get_courses", "tests/test_user.py::TestUser::test_get_file", "tests/test_user.py::TestUser::test_get_folder", "tests/test_user.py::TestUser::test_get_missing_submissions", "tests/test_user.py::TestUser::test_get_page_views", "tests/test_user.py::TestUser::test_get_profile", "tests/test_user.py::TestUser::test_list_calendar_events_for_user", "tests/test_user.py::TestUser::test_list_communication_channels", "tests/test_user.py::TestUser::test_list_enrollments", "tests/test_user.py::TestUser::test_list_folders", "tests/test_user.py::TestUser::test_list_observees", "tests/test_user.py::TestUser::test_list_user_logins", "tests/test_user.py::TestUser::test_merge_into_id", "tests/test_user.py::TestUser::test_merge_into_user", "tests/test_user.py::TestUser::test_remove_observee", "tests/test_user.py::TestUser::test_show_observee", "tests/test_user.py::TestUser::test_update_color", "tests/test_user.py::TestUser::test_update_color_no_hashtag", "tests/test_user.py::TestUser::test_update_settings", "tests/test_user.py::TestUser::test_upload", "tests/test_user.py::TestUser::test_user_files", "tests/test_user.py::TestUser::test_user_get_assignments" ]
[]
MIT License
2,225
[ "canvasapi/submission.py", "setup.py", "CHANGELOG.md", "canvasapi/course.py", ".travis.yml", "canvasapi/assignment.py", "canvasapi/section.py" ]
[ "canvasapi/submission.py", "setup.py", "CHANGELOG.md", "canvasapi/course.py", ".travis.yml", "canvasapi/assignment.py", "canvasapi/section.py" ]
akolar__ogn-lib-18
fb920126739e2ac5dba17b6dd14b718b1952090f
2018-02-28 06:21:43
695f77174bd5a1ef7bdba53c03e3e905aaaec521
diff --git a/ogn_lib/parser.py b/ogn_lib/parser.py index 4ae680a..f3b51ae 100644 --- a/ogn_lib/parser.py +++ b/ogn_lib/parser.py @@ -773,7 +773,7 @@ class LiveTrack24(Parser): class Capturs(Parser): - __destto__ = ['OGLT24', 'OGLT24-1'] + __destto__ = ['OGCAPT', 'OGCAPT-1'] @staticmethod def _preprocess_message(message):
Capturs parser has invalid callsign Capturs parser is registered using OGLT24 instead of OGCAPT.
akolar/ogn-lib
diff --git a/tests/test_parser.py b/tests/test_parser.py index c1c0a9c..937c193 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -426,6 +426,14 @@ class TestAPRS: assert data['aircraft_type'] is constants.AirplaneType.glider assert data['address_type'] is constants.AddressType.flarm + def test_registered(self, mocker): + mocker.spy(parser.APRS, '_parse_protocol_specific') + parser.Parser("FLRDDA5BA>APRS,qAS,LFMX:/165829h4415.41N/00600.03E'342/" + "049/A=005524 id0ADDA5BA -454fpm -1.1rot 8.8dB 0e " + "+51.2kHz gps4x5") + parser.APRS._parse_protocol_specific.assert_called_once_with( + 'id0ADDA5BA -454fpm -1.1rot 8.8dB 0e +51.2kHz gps4x5') + class TestNaviter: @@ -459,6 +467,13 @@ class TestNaviter: assert data['aircraft_type'] is constants.AirplaneType.paraglider assert data['address_type'] is constants.AddressType.naviter + def test_registered(self, mocker): + mocker.spy(parser.Naviter, '_parse_protocol_specific') + parser.Parser("NAV04220E>OGNAVI,qAS,NAVITER:/140748h4552.27N/01155.61E" + "'090/012/A=006562 !W81! id044004220E +060fpm +1.2rot") + parser.Naviter._parse_protocol_specific.assert_called_once_with( + '!W81! id044004220E +060fpm +1.2rot') + class TestSpot: def test_parse_protocol_specific(self): @@ -471,6 +486,13 @@ class TestSpot: with pytest.raises(exceptions.ParseError): parser.Spot._parse_protocol_specific('id0-2860357 SPOT3') + def test_registered(self, mocker): + mocker.spy(parser.Spot, '_parse_protocol_specific') + parser.Parser("ICA3E7540>OGSPOT,qAS,SPOT:/161427h1448.35S/04610.86W'" + "000/000/A=008677 id0-2860357 SPOT3 GOOD") + parser.Spot._parse_protocol_specific.assert_called_once_with( + 'id0-2860357 SPOT3 GOOD') + class TestSpider: def test_parse_protocol_specific(self): @@ -485,6 +507,13 @@ class TestSpider: with pytest.raises(exceptions.ParseError): parser.Spider._parse_protocol_specific('id300234010617040 +19dB') + def test_registered(self, mocker): + mocker.spy(parser.Spider, '_parse_protocol_specific') + parser.Parser("FLRDDF944>OGSPID,qAS,SPIDER:/190930h3322.78S/07034.60W'" + "000/000/A=002263 id300234010617040 +19dB LWE 3D") + parser.Spider._parse_protocol_specific.assert_called_once_with( + 'id300234010617040 +19dB LWE 3D') + class TestSkylines: def test_parse_protocol_specific(self): @@ -500,6 +529,13 @@ class TestSkylines: with pytest.raises(exceptions.ParseError): parser.Skylines._parse_protocol_specific('id1111') + def test_registered(self, mocker): + mocker.spy(parser.Skylines, '_parse_protocol_specific') + parser.Parser("FLRDDDD78>OGSKYL,qAS,SKYLINES:/134403h4225.90N/00144.8" + "3E'000/000/A=008438 id2816 +000fpm") + parser.Skylines._parse_protocol_specific.assert_called_once_with( + 'id2816 +000fpm') + class TestLT24: def test_parse_protocol_specific(self): @@ -517,6 +553,13 @@ class TestLT24: with pytest.raises(exceptions.ParseError): parser.LiveTrack24._parse_protocol_specific('id11111 GPS') + def test_registered(self, mocker): + mocker.spy(parser.LiveTrack24, '_parse_protocol_specific') + parser.Parser("FLRDDE48A>OGLT24,qAS,LT24:/102606h4030.47N/00338.38W'" + "000/018/A=002267 id25387 +000fpm GPS") + parser.LiveTrack24._parse_protocol_specific.assert_called_once_with( + 'id25387 +000fpm GPS') + class TestCapturs: def test_process(self): @@ -530,6 +573,12 @@ class TestCapturs: assert msg == msg_original[:-1] + def test_registered(self, mocker): + mocker.spy(parser.Capturs, '_preprocess_message') + msg = ("FLRDDEEF1>OGCAPT,qAS,CAPTURS:/065144h4837.56N/00233.80E'000/000/") + parser.Parser(msg) + parser.Capturs._preprocess_message.assert_called_once_with(msg) + class TestServerParser:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 1 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work -e git+https://github.com/akolar/ogn-lib.git@fb920126739e2ac5dba17b6dd14b718b1952090f#egg=ogn_lib packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work pytest-mock==3.14.0 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
name: ogn-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - pytest-mock==3.14.0 prefix: /opt/conda/envs/ogn-lib
[ "tests/test_parser.py::TestLT24::test_registered", "tests/test_parser.py::TestCapturs::test_registered" ]
[]
[ "tests/test_parser.py::TestParserBase::test_new_no_id", "tests/test_parser.py::TestParserBase::test_new_single_id", "tests/test_parser.py::TestParserBase::test_new_multi_id", "tests/test_parser.py::TestParserBase::test_no_destto", "tests/test_parser.py::TestParserBase::test_new_wrong_id", "tests/test_parser.py::TestParserBase::test_set_default", "tests/test_parser.py::TestParserBase::test_call", "tests/test_parser.py::TestParserBase::test_call_server", "tests/test_parser.py::TestParserBase::test_call_no_parser", "tests/test_parser.py::TestParserBase::test_call_default", "tests/test_parser.py::TestParserBase::test_call_failed", "tests/test_parser.py::TestParser::test_pattern_header", "tests/test_parser.py::TestParser::test_pattern_header_matches_all", "tests/test_parser.py::TestParser::test_pattern_location", "tests/test_parser.py::TestParser::test_pattern_location_matches_all", "tests/test_parser.py::TestParser::test_pattern_comment_common", "tests/test_parser.py::TestParser::test_pattern_comment_common_matches_all", "tests/test_parser.py::TestParser::test_pattern_all", "tests/test_parser.py::TestParser::test_pattern_all_matches_all", "tests/test_parser.py::TestParser::test_parse_msg_no_match", "tests/test_parser.py::TestParser::test_parse_msg_calls", "tests/test_parser.py::TestParser::test_parse_msg", "tests/test_parser.py::TestParser::test_parse_msg_full", "tests/test_parser.py::TestParser::test_parse_msg_delete_update", "tests/test_parser.py::TestParser::test_parse_msg_comment", "tests/test_parser.py::TestParser::test_preprocess_message", "tests/test_parser.py::TestParser::test_parse_digipeaters", "tests/test_parser.py::TestParser::test_parse_digipeaters_relayed", "tests/test_parser.py::TestParser::test_parse_digipeaters_unknown_format", "tests/test_parser.py::TestParser::test_parse_heading_speed", "tests/test_parser.py::TestParser::test_parse_heading_speed_both_missing", "tests/test_parser.py::TestParser::test_parse_heading_speed_null_input", "tests/test_parser.py::TestParser::test_parse_altitude", "tests/test_parser.py::TestParser::test_parse_altitude_missing", "tests/test_parser.py::TestParser::test_parse_attrs", "tests/test_parser.py::TestParser::test_parse_timestamp_h", "tests/test_parser.py::TestParser::test_parse_timestamp_z", "tests/test_parser.py::TestParser::test_parse_time_past", "tests/test_parser.py::TestParser::test_parse_time_future", "tests/test_parser.py::TestParser::test_parse_datetime", "tests/test_parser.py::TestParser::test_parse_location_sign", "tests/test_parser.py::TestParser::test_parse_location_value", "tests/test_parser.py::TestParser::test_parse_protocol_specific", "tests/test_parser.py::TestParser::test_conv_fpm_to_ms", "tests/test_parser.py::TestParser::test_conv_fpm_to_ms_sign", "tests/test_parser.py::TestParser::test_get_location_update_func", "tests/test_parser.py::TestParser::test_update_location_decimal_same", "tests/test_parser.py::TestParser::test_update_location_decimal_positive", "tests/test_parser.py::TestParser::test_update_location_decimal_negative", "tests/test_parser.py::TestParser::test_call", "tests/test_parser.py::TestParser::test_update_data", "tests/test_parser.py::TestParser::test_update_data_missing", "tests/test_parser.py::TestAPRS::test_parse_protocol_specific", "tests/test_parser.py::TestAPRS::test_parse_id_string", "tests/test_parser.py::TestAPRS::test_registered", "tests/test_parser.py::TestNaviter::test_parse_protocol_specific", "tests/test_parser.py::TestNaviter::test_parse_id_string", "tests/test_parser.py::TestNaviter::test_registered", "tests/test_parser.py::TestSpot::test_parse_protocol_specific", "tests/test_parser.py::TestSpot::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSpot::test_registered", "tests/test_parser.py::TestSpider::test_parse_protocol_specific", "tests/test_parser.py::TestSpider::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSpider::test_registered", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific", "tests/test_parser.py::TestSkylines::test_parse_protocol_specific_fail", "tests/test_parser.py::TestSkylines::test_registered", "tests/test_parser.py::TestLT24::test_parse_protocol_specific", "tests/test_parser.py::TestLT24::test_parse_protocol_specific_fail", "tests/test_parser.py::TestCapturs::test_process", "tests/test_parser.py::TestCapturs::test_preprocess", "tests/test_parser.py::TestServerParser::test_parse_message_beacon", "tests/test_parser.py::TestServerParser::test_parse_message_status", "tests/test_parser.py::TestServerParser::test_parse_beacon_comment" ]
[]
MIT License
2,226
[ "ogn_lib/parser.py" ]
[ "ogn_lib/parser.py" ]
nipy__nipype-2471
6ca791d9c5ec0efb9f56cc9e44758d6e53ffb800
2018-02-28 18:35:59
704b97dee7848283692bac38f04541c5af2a87b5
diff --git a/.zenodo.json b/.zenodo.json index 1058e3b15..29e7f047a 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -558,7 +558,7 @@ "name": "Flandin, Guillaume" }, { - "affiliation": "Stereotaxy Core, Brain & Spine Institute", + "affiliation": "University College London", "name": "P\u00e9rez-Garc\u00eda, Fernando", "orcid": "0000-0001-9090-3024" }, diff --git a/nipype/interfaces/niftyreg/regutils.py b/nipype/interfaces/niftyreg/regutils.py index 7c3ed28ea..0910b7d65 100644 --- a/nipype/interfaces/niftyreg/regutils.py +++ b/nipype/interfaces/niftyreg/regutils.py @@ -122,7 +122,7 @@ warpfield.nii -res im2_res.nii.gz' # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): if name == 'inter_val': - inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 5} + inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} return spec.argstr % inter_val[value] else: return super(RegResample, self)._format_arg(name, spec, value) @@ -295,6 +295,15 @@ class RegToolsInputSpec(NiftyRegCommandInputSpec): desc=desc, argstr='-smoG %f %f %f') + # Interpolation type + inter_val = traits.Enum( + 'NN', + 'LIN', + 'CUB', + 'SINC', + desc='Interpolation order to use to warp the floating image', + argstr='-interp %d') + class RegToolsOutputSpec(TraitedSpec): """ Output Spec for RegTools. """ @@ -326,6 +335,14 @@ class RegTools(NiftyRegCommand): output_spec = RegToolsOutputSpec _suffix = '_tools' + # Need this overload to properly constraint the interpolation type input + def _format_arg(self, name, spec, value): + if name == 'inter_val': + inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} + return spec.argstr % inter_val[value] + else: + return super(RegTools, self)._format_arg(name, spec, value) + class RegAverageInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegAverage. """
NiftyReg's RegTools is missing interpolation order argument
nipy/nipype
diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py index 97ea5e6c9..f0f66083c 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py @@ -24,6 +24,7 @@ def test_RegTools_inputs(): argstr='-in %s', mandatory=True, ), + inter_val=dict(argstr='-interp %d', ), iso_flag=dict(argstr='-iso', ), mask_file=dict(argstr='-nan %s', ), mul_val=dict(argstr='-mul %s', ),
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==8.0.4 configparser==5.2.0 decorator==4.4.2 funcsigs==1.0.2 future==1.0.0 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 lxml==5.3.1 mock==5.2.0 networkx==2.5.1 nibabel==3.2.2 -e git+https://github.com/nipy/nipype.git@6ca791d9c5ec0efb9f56cc9e44758d6e53ffb800#egg=nipype numpy==1.19.5 packaging==21.3 pluggy==1.0.0 prov==1.5.0 py==1.11.0 pydot==1.4.2 pydotplus==2.0.2 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 rdflib==5.0.0 scipy==1.5.4 simplejson==3.20.1 six==1.17.0 tomli==1.2.3 traits==6.4.1 typing_extensions==4.1.1 zipp==3.6.0
name: nipype channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - configparser==5.2.0 - decorator==4.4.2 - funcsigs==1.0.2 - future==1.0.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - lxml==5.3.1 - mock==5.2.0 - networkx==2.5.1 - nibabel==3.2.2 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - prov==1.5.0 - py==1.11.0 - pydot==1.4.2 - pydotplus==2.0.2 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - rdflib==5.0.0 - scipy==1.5.4 - simplejson==3.20.1 - six==1.17.0 - tomli==1.2.3 - traits==6.4.1 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nipype
[ "nipype/interfaces/niftyreg/tests/test_auto_RegTools.py::test_RegTools_inputs" ]
[]
[ "nipype/interfaces/niftyreg/tests/test_auto_RegTools.py::test_RegTools_outputs" ]
[]
Apache License 2.0
2,227
[ "nipype/interfaces/niftyreg/regutils.py", ".zenodo.json" ]
[ "nipype/interfaces/niftyreg/regutils.py", ".zenodo.json" ]
poliastro__poliastro-330
60d463f8043de496eb377e2bfb798560910fb679
2018-03-01 00:27:02
050339c63c6eab6a1a58adc286f423f5265e0cac
diff --git a/src/poliastro/plotting.py b/src/poliastro/plotting.py index 48bc1e9f..ad3af768 100644 --- a/src/poliastro/plotting.py +++ b/src/poliastro/plotting.py @@ -131,6 +131,12 @@ class OrbitPlotter(object): self.ax.add_patch(mpl.patches.Circle((0, 0), radius, lw=0, color=color)) + def _project(self, rr): + rr_proj = rr - rr.dot(self._frame[2])[:, None] * self._frame[2] + x = rr_proj.dot(self._frame[0]) + y = rr_proj.dot(self._frame[1]) + return x, y + def plot(self, orbit, label=None, color=None): """Plots state and osculating orbit in their plane. @@ -156,15 +162,15 @@ class OrbitPlotter(object): # Project on OrbitPlotter frame # x_vec, y_vec, z_vec = self._frame - rr_proj = rr - rr.dot(self._frame[2])[:, None] * self._frame[2] - x = rr_proj.dot(self._frame[0]) - y = rr_proj.dot(self._frame[1]) + x, y = self._project(rr) + x0, y0 = self._project(orbit.r[None]) # Plot current position - l, = self.ax.plot(x[0].to(u.km).value, y[0].to(u.km).value, + l, = self.ax.plot(x0.to(u.km).value, y0.to(u.km).value, 'o', mew=0, color=color) lines.append(l) + # Plot trajectory l, = self.ax.plot(x.to(u.km).value, y.to(u.km).value, '--', color=l.get_color()) lines.append(l) diff --git a/src/poliastro/twobody/orbit.py b/src/poliastro/twobody/orbit.py index d2ab7542..82c241c0 100644 --- a/src/poliastro/twobody/orbit.py +++ b/src/poliastro/twobody/orbit.py @@ -298,20 +298,25 @@ class Orbit(object): # first sample eccentric anomaly, then transform into true anomaly # why sampling eccentric anomaly uniformly to minimize error in the apocenter, see # http://www.dtic.mil/dtic/tr/fulltext/u2/a605040.pdf + # Start from pericenter E_values = np.linspace(0, 2 * np.pi, values) * u.rad nu_values = E_to_nu(E_values, self.ecc) else: # Select a sensible limiting value for non-closed orbits - # This corresponds to r = 3p - nu_limit = np.arccos(-(1 - 1 / 3.) / self.ecc) + # This corresponds to max(r = 3p, r = self.r) + # We have to wrap nu in [-180, 180) to compare it with the output of + # the arc cosine, which is in the range [0, 180) + # Start from -nu_limit + wrapped_nu = self.nu if self.nu < 180 * u.deg else self.nu - 360 * u.deg + nu_limit = max(np.arccos(-(1 - 1 / 3.) / self.ecc), wrapped_nu) nu_values = np.linspace(-nu_limit, nu_limit, values) - nu_values = np.insert(nu_values, 0, self.ecc) return self.sample(nu_values, method) elif hasattr(values, "unit") and values.unit in ('rad', 'deg'): values = self._generate_time_values(values) - return (values, self._sample(values, method)) + + return values, self._sample(values, method) def _sample(self, time_values, method=mean_motion): values = np.zeros((len(time_values), 3)) * self.r.unit @@ -322,7 +327,8 @@ class Orbit(object): return CartesianRepresentation(values, xyz_axis=1) def _generate_time_values(self, nu_vals): - M_vals = nu_to_M(nu_vals, self.ecc) + # Subtract current anomaly to start from the desired point + M_vals = nu_to_M(nu_vals, self.ecc) - nu_to_M(self.nu, self.ecc) time_values = self.epoch + (M_vals / self.n).decompose() return time_values
"New Horizons launch" example won't run on master <!--- Provide a general summary of the issue in the Title above --> 🐞 **Problem** <!--- Provide a detailed description of the change or addition you are proposing --> <!--- If it is a feature or a bug, what problem is it solving--> [Exploring the new Horizons launch](https://github.com/poliastro/poliastro/blob/d32f3ab802f782a03582e6c183a28c12c6abb2d7/docs/source/examples/Exploring%20the%20New%20Horizons%20launch.ipynb) example fails on `In [5]` when poliastro is installed from master (d32f3ab802f782a03582e6c183a28c12c6abb2d7). <details> ```python /usr/local/lib/python3.6/site-packages/astropy/units/quantity.py:639: RuntimeWarning: invalid value encountered in log --------------------------------------------------------------------------- ValueError Traceback (most recent call last) /usr/local/lib/python3.6/site-packages/astropy/time/core.py in __add__(self, other) 1369 try: -> 1370 other = TimeDelta(other) 1371 except Exception: /usr/local/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, copy) 1539 -> 1540 self._init_from_vals(val, val2, format, scale, copy) 1541 /usr/local/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt) 329 self._time = self._get_time_fmt(val, val2, format, scale, --> 330 precision, in_subfmt, out_subfmt) 331 self._format = self._time.name /usr/local/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt) 373 else: --> 374 raise ValueError('Input values did not match {0}'.format(err_msg)) 375 ValueError: Input values did not match the format class jd During handling of the above exception, another exception occurred: OperandTypeError Traceback (most recent call last) <ipython-input-5-f1d654cf6abc> in <module>() 2 3 op.plot(parking) ----> 4 op.plot(exit) 5 6 plt.xlim(-8000, 8000) /usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/plotting.py in plot(self, orbit, label, color) 152 lines = [] 153 --> 154 _, positions = orbit.sample(self.num_points) 155 rr = positions.get_xyz().transpose() 156 /usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/twobody/orbit.py in sample(self, values, function) 308 nu_values = np.insert(nu_values, 0, self.ecc) 309 --> 310 return self.sample(nu_values, function) 311 312 elif hasattr(values, "unit") and values.unit in ('rad', 'deg'): /usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/twobody/orbit.py in sample(self, values, function) 311 312 elif hasattr(values, "unit") and values.unit in ('rad', 'deg'): --> 313 values = self._generate_time_values(values) 314 return (values, self._sample(values, function)) 315 /usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/twobody/orbit.py in _generate_time_values(self, nu_vals) 324 def _generate_time_values(self, nu_vals): 325 M_vals = nu_to_M(nu_vals, self.ecc) --> 326 time_values = self.epoch + (M_vals / self.n).decompose() 327 return time_values 328 /usr/local/lib/python3.6/site-packages/astropy/time/core.py in __add__(self, other) 1370 other = TimeDelta(other) 1371 except Exception: -> 1372 raise OperandTypeError(self, other, '+') 1373 1374 # Tdelta + something is dealt with in TimeDelta, so we have OperandTypeError: Unsupported operand type(s) for +: 'Time' and 'Quantity' ``` </details><br> 🖥 **Please paste the output of following commands** (Installed poliastro without conda) ``` $ python3 -V Python 3.6.4 $ uname -a Linux linux 4.13.0-32-generic #35~16.04.1-Ubuntu SMP Thu Jan 25 10:13:43 UTC 2018 x86_64 x86_64 x86_64 GNU/Linux ``` <details> <summary>Running tests</summary> ``` $ python3 -c "import poliastro.testing; poliastro.testing.test()" ======================================= test session starts ======================================== platform linux -- Python 3.6.4, pytest-3.4.1, py-1.5.2, pluggy-0.6.0 rootdir: /usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro, inifile: plugins: rerunfailures-4.0, remotedata-0.2.0, openfiles-0.2.0, mock-1.6.3, doctestplus-0.1.2, cov-2.5.1, arraydiff-0.2, hypothesis-3.45.0 collected 140 items ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_bodies.py . [ 0%] ..... [ 4%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_coordinates.py . [ 5%] .. [ 6%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_hyper.py . [ 7%] .......... [ 14%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_iod.py . [ 15%] ........ [ 20%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_jit.py . [ 21%] .. [ 22%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_maneuver.py . [ 23%] ..... [ 27%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_patched_conics.py . [ 27%] . [ 28%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_plotting.py . [ 29%] ....... [ 34%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_plotting3d.py . [ 35%] ..... [ 38%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_stumpff.py . [ 39%] .. [ 40%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_twobody.py . [ 41%] . [ 42%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/test_util.py . [ 42%] ...... [ 47%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_neos/test_dastcom5.py . [ 47%] ........ [ 53%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_neos/test_neos_neows.py . [ 54%] ..... [ 57%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_angles.py . [ 58%] ...... [ 62%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_decorators.py . [ 63%] . [ 64%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_orbit.py . [ 65%] .................. [ 77%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_propagation.py . [ 78%] ........ [ 84%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_sample.py . [ 85%] ........... [ 92%] ../../usr/local/lib/python3.6/site-packages/poliastro-0.9.dev0-py3.6.egg/poliastro/tests/tests_twobody/test_states.py . [ 93%] ......... [100%] =================================== 140 passed in 23.95 seconds ==================================== ``` </details><br> Looks like 27318862c266caf51a96857864eaf3045f613dae (was later on modified by 92b6c9dbcfa2d5fcf707094642821725465b131e) caused this regression. 🎯 **Goal** <!--- Why is this change important to you? How would you use it? --> <!--- How can it benefit other users? --> 💡 **Possible solutions** <!--- Not obligatory, but suggest an idea for implementing addition or change --> 📋 **Steps to solve the problem** * Comment below about what you've started working on. * Add, commit, push your changes * Submit a pull request and add this in comments - `Addresses #<put issue number here>` * Ask for a review in comments section of pull request * Celebrate your contribution to this project 🎉
poliastro/poliastro
diff --git a/src/poliastro/tests/tests_twobody/test_orbit.py b/src/poliastro/tests/tests_twobody/test_orbit.py index 14b0835d..1649570c 100644 --- a/src/poliastro/tests/tests_twobody/test_orbit.py +++ b/src/poliastro/tests/tests_twobody/test_orbit.py @@ -181,7 +181,7 @@ def test_sample_with_time_value(): ss = Orbit.from_classical(_body, _d, _, _a, _a, _a, _a) expected_r = [ss.r] - _, positions = ss.sample(values=[360] * u.deg) + _, positions = ss.sample(values=ss.nu + [360] * u.deg) r = positions.get_xyz().transpose() assert_quantity_allclose(r, expected_r, rtol=1.e-7) @@ -195,18 +195,21 @@ def test_sample_with_nu_value(): ss = Orbit.from_classical(_body, _d, _, _a, _a, _a, _a) expected_r = [ss.r] - _, positions = ss.sample(values=[360] * u.deg) + _, positions = ss.sample(values=ss.nu + [360] * u.deg) r = positions.get_xyz().transpose() assert_quantity_allclose(r, expected_r, rtol=1.e-7) -def test_nu_value_check(): - _d = [1.197659243752796E+09, -4.443716685978071E+09, -1.747610548576734E+09] * u.km - _v = [5.540549267188614E+00, -1.251544669134140E+01, -4.848892572767733E+00] * u.km / u.s - ss = Orbit.from_vectors(Sun, _d, _v, Time('2015-07-14 07:59', scale='tdb')) +def test_hyperbolic_nu_value_check(): + # A custom hyperbolic orbit + r = [1.197659243752796E+09, -4.443716685978071E+09, -1.747610548576734E+09] * u.km + v = [5.540549267188614E+00, -1.251544669134140E+01, -4.848892572767733E+00] * u.km / u.s + + ss = Orbit.from_vectors(Sun, r, v, Time('2015-07-14 07:59', scale='tdb')) + values, positions = ss.sample(100) assert isinstance(positions, CartesianRepresentation) assert isinstance(values, Time) - assert len(positions) == len(values) == 101 + assert len(positions) == len(values) == 100 diff --git a/src/poliastro/tests/tests_twobody/test_sample.py b/src/poliastro/tests/tests_twobody/test_sample.py index 051f86c9..ae1b978c 100644 --- a/src/poliastro/tests/tests_twobody/test_sample.py +++ b/src/poliastro/tests/tests_twobody/test_sample.py @@ -9,6 +9,8 @@ from poliastro.twobody import Orbit from poliastro.twobody.propagation import kepler, mean_motion, cowell import numpy as np +from poliastro.util import norm + def test_sample_angle_zero_returns_same(): # Data from Vallado, example 2.4 @@ -17,7 +19,7 @@ def test_sample_angle_zero_returns_same(): ss0 = Orbit.from_vectors(Earth, r0, v0) nu_values = [0] * u.deg - _, rr = ss0.sample(nu_values) + _, rr = ss0.sample(ss0.nu + nu_values) assert_quantity_allclose(rr[0].get_xyz(), ss0.r) @@ -71,7 +73,8 @@ def test_sample_nu_values(): _, rr = ss0.sample(nu_values) assert len(rr) == len(nu_values) - assert_quantity_allclose(rr[-1].get_xyz(), expected_ss.r) + assert_quantity_allclose(norm(rr[0].get_xyz()), expected_ss.r_p) + assert_quantity_allclose(norm(rr[-1].get_xyz()), expected_ss.r_a) @pytest.mark.parametrize("num_points", [3, 5, 7, 9, 11, 101]) @@ -81,9 +84,10 @@ def test_sample_num_points(num_points): v0 = [-5.64305, 4.30333, 2.42879] * u.km / u.s ss0 = Orbit.from_vectors(Earth, r0, v0) - expected_ss = ss0.propagate(ss0.period / 2) + # TODO: Test against the perigee and apogee + # expected_ss = ss0.propagate(ss0.period / 2) _, rr = ss0.sample(num_points) assert len(rr) == num_points - assert_quantity_allclose(rr[num_points // 2].get_xyz(), expected_ss.r) + # assert_quantity_allclose(rr[num_points // 2].get_xyz(), expected_ss.r)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_git_commit_hash", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 2 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 argon2-cffi==21.3.0 argon2-cffi-bindings==21.2.0 astropy==3.2.3 async-generator==1.10 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work Babel==2.11.0 backcall==0.2.0 beautifulsoup4==4.12.3 bleach==4.1.0 CALLHORIZONS==1.1.1 certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 comm==0.1.4 coverage==6.2 cycler==0.11.0 dataclasses==0.8 decorator==5.1.1 defusedxml==0.7.1 docutils==0.16 entrypoints==0.4 idna==3.10 imagesize==1.4.1 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work ipykernel==5.5.6 ipython==7.16.3 ipython-genutils==0.2.0 ipywidgets==7.8.5 jedi==0.17.2 Jinja2==3.0.3 jplephem==2.22 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 jupyterlab_widgets==1.1.11 kiwisolver==1.3.1 llvmlite==0.36.0 MarkupSafe==2.0.1 matplotlib==3.3.4 mistune==0.8.4 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nbsphinx==0.3.5 nest-asyncio==1.6.0 notebook==6.4.10 numba==0.53.1 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 Pillow==8.4.0 plotly==5.18.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work -e git+https://github.com/poliastro/poliastro.git@60d463f8043de496eb377e2bfb798560910fb679#egg=poliastro prometheus-client==0.17.1 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pycodestyle==2.10.0 pycparser==2.21 Pygments==2.14.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyrsistent==0.18.0 pytest==6.2.4 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 pyzmq==25.1.2 requests==2.27.1 scipy==1.5.4 Send2Trash==1.8.3 six==1.17.0 snowballstemmer==2.2.0 soupsieve==2.3.2.post1 Sphinx==1.5.6 sphinx-rtd-theme==0.5.2 tenacity==8.2.2 terminado==0.12.1 testpath==0.6.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 tornado==6.1 traitlets==4.3.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 wcwidth==0.2.13 webencodings==0.5.1 widgetsnbextension==3.6.10 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: poliastro channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - argon2-cffi==21.3.0 - argon2-cffi-bindings==21.2.0 - astropy==3.2.3 - async-generator==1.10 - babel==2.11.0 - backcall==0.2.0 - beautifulsoup4==4.12.3 - bleach==4.1.0 - callhorizons==1.1.1 - cffi==1.15.1 - charset-normalizer==2.0.12 - comm==0.1.4 - coverage==6.2 - cycler==0.11.0 - dataclasses==0.8 - decorator==5.1.1 - defusedxml==0.7.1 - docutils==0.16 - entrypoints==0.4 - idna==3.10 - imagesize==1.4.1 - ipykernel==5.5.6 - ipython==7.16.3 - ipython-genutils==0.2.0 - ipywidgets==7.8.5 - jedi==0.17.2 - jinja2==3.0.3 - jplephem==2.22 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - jupyterlab-widgets==1.1.11 - kiwisolver==1.3.1 - llvmlite==0.36.0 - markupsafe==2.0.1 - matplotlib==3.3.4 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbsphinx==0.3.5 - nest-asyncio==1.6.0 - notebook==6.4.10 - numba==0.53.1 - numpy==1.19.5 - pandas==1.1.5 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pillow==8.4.0 - plotly==5.18.0 - prometheus-client==0.17.1 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - pycodestyle==2.10.0 - pycparser==2.21 - pygments==2.14.0 - pyrsistent==0.18.0 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyzmq==25.1.2 - requests==2.27.1 - scipy==1.5.4 - send2trash==1.8.3 - six==1.17.0 - snowballstemmer==2.2.0 - soupsieve==2.3.2.post1 - sphinx==1.5.6 - sphinx-rtd-theme==0.5.2 - tenacity==8.2.2 - terminado==0.12.1 - testpath==0.6.0 - tomli==1.2.3 - tornado==6.1 - traitlets==4.3.3 - urllib3==1.26.20 - wcwidth==0.2.13 - webencodings==0.5.1 - widgetsnbextension==3.6.10 prefix: /opt/conda/envs/poliastro
[ "src/poliastro/tests/tests_twobody/test_orbit.py::test_sample_with_time_value", "src/poliastro/tests/tests_twobody/test_orbit.py::test_sample_with_nu_value", "src/poliastro/tests/tests_twobody/test_orbit.py::test_hyperbolic_nu_value_check", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_angle_zero_returns_same" ]
[]
[ "src/poliastro/tests/tests_twobody/test_orbit.py::test_default_time_for_new_state", "src/poliastro/tests/tests_twobody/test_orbit.py::test_state_raises_unitserror_if_elements_units_are_wrong", "src/poliastro/tests/tests_twobody/test_orbit.py::test_state_raises_unitserror_if_rv_units_are_wrong", "src/poliastro/tests/tests_twobody/test_orbit.py::test_parabolic_elements_fail_early", "src/poliastro/tests/tests_twobody/test_orbit.py::test_bad_inclination_raises_exception", "src/poliastro/tests/tests_twobody/test_orbit.py::test_bad_hyperbolic_raises_exception", "src/poliastro/tests/tests_twobody/test_orbit.py::test_apply_maneuver_changes_epoch", "src/poliastro/tests/tests_twobody/test_orbit.py::test_orbit_from_ephem_with_no_epoch_is_today", "src/poliastro/tests/tests_twobody/test_orbit.py::test_from_ephem_raises_warning_if_time_is_not_tdb_with_proper_time", "src/poliastro/tests/tests_twobody/test_orbit.py::test_circular_has_proper_semimajor_axis", "src/poliastro/tests/tests_twobody/test_orbit.py::test_geosync_has_proper_period", "src/poliastro/tests/tests_twobody/test_orbit.py::test_parabolic_has_proper_eccentricity", "src/poliastro/tests/tests_twobody/test_orbit.py::test_parabolic_has_zero_energy", "src/poliastro/tests/tests_twobody/test_orbit.py::test_pqw_for_circular_equatorial_orbit", "src/poliastro/tests/tests_twobody/test_orbit.py::test_orbit_representation", "src/poliastro/tests/tests_twobody/test_orbit.py::test_sample_numpoints", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[kepler-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[kepler-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[mean_motion-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[mean_motion-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[cowell-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_small_deltas[cowell-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[kepler-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[kepler-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[mean_motion-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[mean_motion-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[cowell-time_of_flight0]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_one_point_equals_propagation_big_deltas[cowell-time_of_flight1]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_nu_values", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[3]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[5]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[7]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[9]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[11]", "src/poliastro/tests/tests_twobody/test_sample.py::test_sample_num_points[101]" ]
[]
MIT License
2,229
[ "src/poliastro/plotting.py", "src/poliastro/twobody/orbit.py" ]
[ "src/poliastro/plotting.py", "src/poliastro/twobody/orbit.py" ]
hasgeek__coaster-178
8864fb07e070b40886eecf1ed29b683ca538c16d
2018-03-01 08:13:20
8864fb07e070b40886eecf1ed29b683ca538c16d
diff --git a/.gitignore b/.gitignore index 5e6c4a0..f1aea8c 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ error.log __pycache__ .pytest_cache .cache +.vscode diff --git a/coaster/sqlalchemy/statemanager.py b/coaster/sqlalchemy/statemanager.py index de2f75e..a2a5f0e 100644 --- a/coaster/sqlalchemy/statemanager.py +++ b/coaster/sqlalchemy/statemanager.py @@ -67,6 +67,18 @@ control state change via transitions. Sample usage:: # A transition can do additional housekeeping self.datetime = datetime.utcnow() + # If AbortTransition is raised in this method, the state wont be changed. + # AbortTransition will return anything passed to it as return value of the method + # E.g. + # success, message = post.publish() + + success = False + message = "A transition is not desirable right now" + raise AbortTransition(success, message) + + # Any other exception will be raised + raise RandomError("This will be raised") + # A transition can use a conditional state. The condition is evaluated # before the transition can proceed @state.transition(state.RECENT, state.PENDING) @@ -204,7 +216,7 @@ from ..utils import is_collection, NameTitle from ..signals import coaster_signals from .roles import RoleMixin -__all__ = ['StateManager', 'StateTransitionError', +__all__ = ['StateManager', 'StateTransitionError', 'AbortTransition', 'transition_error', 'transition_before', 'transition_after', 'transition_exception'] @@ -230,6 +242,21 @@ class StateTransitionError(BadRequest, TypeError): pass +class AbortTransition(Exception): + """ + Transitions may raise :exc:`AbortTransition` to return without changing + state. The parameter to this exception is returned as the transition's + result. + + This exception is a signal to :class:`StateTransition` and will not be + raised to the transition's caller. + + :param result: Value to return to the transition's caller + """ + def __init__(self, result=None): + super(AbortTransition, self).__init__(result) + + # --- Classes ----------------------------------------------------------------- class ManagedState(object): @@ -458,6 +485,7 @@ class StateTransition(object): def __set_name__(self, owner, name): # pragma: no cover self.name = name + self.data['name'] = name # Make the transition a non-data descriptor def __get__(self, obj, cls=None): @@ -521,19 +549,23 @@ class StateTransitionWrapper(object): label=repr(state_invalid[2]) )) - # Raise a transition-before signal + # Send a transition-before signal transition_before.send(self.obj, transition=self.statetransition) - # Call the transition function + # Call the transition method try: result = self.statetransition.func(self.obj, *args, **kwargs) + except AbortTransition as e: + transition_exception.send(self.obj, transition=self.statetransition, exception=e) + return e.args[0] except Exception as e: transition_exception.send(self.obj, transition=self.statetransition, exception=e) raise + # Change the state for each of the state managers for statemanager, conditions in self.statetransition.transitions.items(): if conditions['to'] is not None: # Allow to=None for the @requires decorator statemanager._set(self.obj, conditions['to'].value) # Change state - # Raise a transition-after signal + # Send a transition-after signal transition_after.send(self.obj, transition=self.statetransition) return result @@ -677,11 +709,12 @@ class StateManager(object): def transition(self, from_, to, if_=None, **data): """ - Decorates a function to transition from one state to another. The - decorated function can accept any necessary parameters and perform + Decorates a method to transition from one state to another. The + decorated method can accept any necessary parameters and perform additional processing, or raise an exception to abort the transition. If it returns without an error, the state value is updated - automatically. + automatically. Transitions may also abort without raising an exception + using :exc:`AbortTransition`. :param from_: Required state to allow this transition (can be a state group) :param to: The state of the object after this transition (automatically set if no exception is raised)
StateManager needs an AbortTransition exception If a transition does `raise AbortTransition(result)`, the state should not change, and the `result` should be returned to the caller as the return value of the transition method. The anticipated use case is that callers will use something like `success = obj.transition(…)` or `status, message = obj.transition(…)`. All exceptions except `AbortTransition` should pass through uncaught.
hasgeek/coaster
diff --git a/tests/test_statemanager.py b/tests/test_statemanager.py index ad61af4..51ab10f 100644 --- a/tests/test_statemanager.py +++ b/tests/test_statemanager.py @@ -9,7 +9,7 @@ from flask_sqlalchemy import SQLAlchemy from coaster.utils import LabeledEnum from coaster.auth import add_auth_attribute from coaster.sqlalchemy import (with_roles, BaseMixin, - StateManager, StateTransitionError) + StateManager, StateTransitionError, AbortTransition) from coaster.sqlalchemy.statemanager import ManagedStateWrapper @@ -113,6 +113,17 @@ class MyPost(BaseMixin, db.Model): def rewind(self): self.datetime = datetime.utcnow() - timedelta(hours=2) + @with_roles(call={'author'}) + @state.transition(state.UNPUBLISHED, state.PUBLISHED, message=u"Abort this transition") + @reviewstate.transition(reviewstate.UNLOCKED, reviewstate.PENDING, title="Publish") + def abort(self, success=False, empty_abort=False): + if not success: + if empty_abort: + raise AbortTransition() + else: + raise AbortTransition((success, 'failed')) + return success, 'passed' + def roles_for(self, actor, anchors=()): roles = super(MyPost, self).roles_for(actor, anchors) # Cheap hack for the sake of testing, using strings instead of objects @@ -509,6 +520,26 @@ class TestStateManager(unittest.TestCase): self.assertFalse(self.post.reviewstate.LOCKED) self.assertTrue(self.post.state.PENDING) + def test_transition_abort(self): + """Transitions can abort without changing state or raising an exception""" + self.assertTrue(self.post.state.DRAFT) + + # A transition can abort returning a value (a 2-tuple here) + success, message = self.post.abort(success=False) + self.assertEqual(success, False) + self.assertEqual(message, "failed") + self.assertTrue(self.post.state.DRAFT) # state has not changed + + # A transition can abort without returning a value + result = self.post.abort(success=False, empty_abort=True) + self.assertEqual(result, None) + self.assertTrue(self.post.state.DRAFT) # state has not changed + + success, message = self.post.abort(success=True) + self.assertEqual(success, True) + self.assertEqual(message, 'passed') + self.assertTrue(self.post.state.PUBLISHED) # state has changed + def test_transition_is_available(self): """A transition's is_available property is reliable""" self.assertTrue(self.post.state.DRAFT)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "coverage", "coveralls", "psycopg2" ], "pre_install": [], "python": "3.6", "reqs_path": [ "test_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alembic==1.7.7 attrs==22.2.0 bcrypt==4.0.1 bleach==4.1.0 blinker==1.5 certifi==2021.5.30 charset-normalizer==2.0.12 click==8.0.4 -e git+https://github.com/hasgeek/coaster.git@8864fb07e070b40886eecf1ed29b683ca538c16d#egg=coaster coverage==6.2 coveralls==3.3.1 dataclasses==0.8 docflow==0.3.3 docopt==0.6.2 filelock==3.4.1 Flask==2.0.3 Flask-Assets==2.1.0 Flask-Migrate==4.0.1 Flask-Script==2.0.6 Flask-SQLAlchemy==2.5.1 greenlet==2.0.2 html2text==2020.1.16 html5lib==1.1 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isoweek==1.3.3 itsdangerous==2.0.1 Jinja2==3.0.3 joblib==1.1.1 Mako==1.1.6 Markdown==3.3.7 MarkupSafe==2.0.1 nltk==3.6.7 packaging==21.3 pluggy==1.0.0 psycopg2==2.7.7 py==1.11.0 PyExecJS==1.5.1 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytz==2025.2 regex==2023.8.8 requests==2.27.1 requests-file==2.1.0 semantic-version==2.10.0 shortuuid==1.0.13 simplejson==3.20.1 six==1.17.0 SQLAlchemy==1.4.54 SQLAlchemy-Utils==0.41.1 tldextract==3.1.2 tomli==1.2.3 tqdm==4.64.1 typing_extensions==4.1.1 UgliPyJS==0.2.5 Unidecode==1.3.8 urllib3==1.26.20 webassets==2.0 webencodings==0.5.1 Werkzeug==2.0.3 zipp==3.6.0
name: coaster channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alembic==1.7.7 - attrs==22.2.0 - bcrypt==4.0.1 - bleach==4.1.0 - blinker==1.5 - charset-normalizer==2.0.12 - click==8.0.4 - coverage==6.2 - coveralls==3.3.1 - dataclasses==0.8 - docflow==0.3.3 - docopt==0.6.2 - filelock==3.4.1 - flask==2.0.3 - flask-assets==2.1.0 - flask-migrate==4.0.1 - flask-script==2.0.6 - flask-sqlalchemy==2.5.1 - greenlet==2.0.2 - html2text==2020.1.16 - html5lib==1.1 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isoweek==1.3.3 - itsdangerous==2.0.1 - jinja2==3.0.3 - joblib==1.1.1 - mako==1.1.6 - markdown==3.3.7 - markupsafe==2.0.1 - nltk==3.6.7 - packaging==21.3 - pluggy==1.0.0 - psycopg2==2.7.7 - py==1.11.0 - pyexecjs==1.5.1 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytz==2025.2 - regex==2023.8.8 - requests==2.27.1 - requests-file==2.1.0 - semantic-version==2.10.0 - shortuuid==1.0.13 - simplejson==3.20.1 - six==1.17.0 - sqlalchemy==1.4.54 - sqlalchemy-utils==0.41.1 - tldextract==3.1.2 - tomli==1.2.3 - tqdm==4.64.1 - typing-extensions==4.1.1 - uglipyjs==0.2.5 - unidecode==1.3.8 - urllib3==1.26.20 - webassets==2.0 - webencodings==0.5.1 - werkzeug==2.0.3 - zipp==3.6.0 prefix: /opt/conda/envs/coaster
[ "tests/test_statemanager.py::TestStateManager::test_added_regular_state_transition", "tests/test_statemanager.py::TestStateManager::test_added_state_group", "tests/test_statemanager.py::TestStateManager::test_added_state_transition", "tests/test_statemanager.py::TestStateManager::test_available_transitions", "tests/test_statemanager.py::TestStateManager::test_available_transitions_for", "tests/test_statemanager.py::TestStateManager::test_bestmatch_state", "tests/test_statemanager.py::TestStateManager::test_change_state_invalid", "tests/test_statemanager.py::TestStateManager::test_conditional_state", "tests/test_statemanager.py::TestStateManager::test_conditional_state_label", "tests/test_statemanager.py::TestStateManager::test_conditional_state_unmanaged_state", "tests/test_statemanager.py::TestStateManager::test_current_states", "tests/test_statemanager.py::TestStateManager::test_currently_available_transitions", "tests/test_statemanager.py::TestStateManager::test_duplicate_transition", "tests/test_statemanager.py::TestStateManager::test_group_by_state", "tests/test_statemanager.py::TestStateManager::test_has_nonstate", "tests/test_statemanager.py::TestStateManager::test_has_state", "tests/test_statemanager.py::TestStateManager::test_is_state", "tests/test_statemanager.py::TestStateManager::test_managed_state_wrapper", "tests/test_statemanager.py::TestStateManager::test_readonly", "tests/test_statemanager.py::TestStateManager::test_requires", "tests/test_statemanager.py::TestStateManager::test_reviewstate_also_changes", "tests/test_statemanager.py::TestStateManager::test_role_proxy_transitions", "tests/test_statemanager.py::TestStateManager::test_sql_query_added_state", "tests/test_statemanager.py::TestStateManager::test_sql_query_multi_value", "tests/test_statemanager.py::TestStateManager::test_sql_query_single_value", "tests/test_statemanager.py::TestStateManager::test_sql_query_state_group", "tests/test_statemanager.py::TestStateManager::test_state_already_exists", "tests/test_statemanager.py::TestStateManager::test_state_group_invalid", "tests/test_statemanager.py::TestStateManager::test_state_labels", "tests/test_statemanager.py::TestStateManager::test_transition_abort", "tests/test_statemanager.py::TestStateManager::test_transition_data", "tests/test_statemanager.py::TestStateManager::test_transition_data_name_invalid", "tests/test_statemanager.py::TestStateManager::test_transition_from_none", "tests/test_statemanager.py::TestStateManager::test_transition_invalid_from_to", "tests/test_statemanager.py::TestStateManager::test_transition_is_available", "tests/test_statemanager.py::TestStateManager::test_transition_publish_datetime", "tests/test_statemanager.py::TestStateManager::test_transition_publish_invalid", "tests/test_statemanager.py::TestStateManager::test_transition_state_lock", "tests/test_statemanager.py::TestStateManager::test_transition_submit" ]
[]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,230
[ ".gitignore", "coaster/sqlalchemy/statemanager.py" ]
[ ".gitignore", "coaster/sqlalchemy/statemanager.py" ]
pydoit__doit-246
62335b05473193303d6ecb5524d65bf1a5a50d7e
2018-03-01 15:17:29
50f0c7eaa8084e3a54c88bfec02e477e419f4c12
diff --git a/doit/doit_cmd.py b/doit/doit_cmd.py index e1be34d..ce6ed15 100644 --- a/doit/doit_cmd.py +++ b/doit/doit_cmd.py @@ -32,6 +32,8 @@ def reset_vars(): _CMDLINE_VARS = {} def get_var(name, default=None): + if _CMDLINE_VARS is None: + return None return _CMDLINE_VARS.get(name, default) def set_var(name, value):
'get_var' fails while the multiprocess execution if any task uses delayed creation. @schettino72 , hey! :) So, i found strange behaviour while trying to implement some stuff. ```.py #! /usr/bin/doit -f # -*- coding: utf-8 -*- from doit import get_var from doit import create_after get_var( 'A', None ) def task_foo() : return { 'actions': [ 'echo foo' ], 'task_dep': [ 'bar' ], } @create_after( executed = 'baz' ) def task_bar() : for i in range( 10 ) : yield { 'name': 'bar_{}'.format( i ), 'actions': [ 'echo bar_{}'.format( i ) ] } def task_baz() : for i in range( 10 ) : yield { 'name': 'baz_{}'.format( i ), 'actions': [ 'echo baz_{}'.format( i ) ] } ``` This code works fine while being executed in single-process mode. But it fails in multi process mode. ``` /root>GetVar.py -n 2 Traceback (most recent call last): File "<string>", line 1, in <module> File "/py3.4/lib/multiprocessing/spawn.py", line 106, in spawn_main exitcode = _main(fd) File "/py3.4/lib/multiprocessing/spawn.py", line 116, in _main self = pickle.load(from_parent) File "/root/GetVar.py", line 7, in <module> get_var( 'A', None ) File "/py3.4/lib/site-packages/doit/doit_cmd.py", line 35, in get_var return _CMDLINE_VARS.get(name, default) AttributeError: 'NoneType' object has no attribute 'get' Traceback (most recent call last): File "<string>", line 1, in <module> File "/py3.4/lib/multiprocessing/spawn.py", line 106, in spawn_main exitcode = _main(fd) File "/py3.4/lib/multiprocessing/spawn.py", line 116, in _main self = pickle.load(from_parent) File "/root/GetVar.py", line 7, in <module> get_var( 'A', None ) File "/py3.4/lib/site-packages/doit/doit_cmd.py", line 35, in get_var return _CMDLINE_VARS.get(name, default) AttributeError: 'NoneType' object has no attribute 'get' ``` But if i remove the `create_after` decorator, all became works fine even in multi process mode. So, i found it ambiguous. I had seen no notifications in docs about such behaviour. Can you confirm this is the doit error or not? Currently i use the last (0.30.0) version of doit.
pydoit/doit
diff --git a/tests/test_doit_cmd.py b/tests/test_doit_cmd.py index 90d51a2..84c740a 100644 --- a/tests/test_doit_cmd.py +++ b/tests/test_doit_cmd.py @@ -45,6 +45,17 @@ class TestRun(object): cmd_main(['x=1', 'y=abc']) assert '1' == doit_cmd.get_var('x') assert 'abc' == doit_cmd.get_var('y') + assert None is doit_cmd.get_var('z') + + def test_cmdline_novars(self, monkeypatch): + mock_run = Mock() + monkeypatch.setattr(Run, "execute", mock_run) + cmd_main(['x=1']) + + # Simulate the variable below not being initialized by a subprocess on + # Windows. See https://github.com/pydoit/doit/issues/164. + doit_cmd._CMDLINE_VARS = None + assert None is doit_cmd.get_var('x') def test_cmdline_vars_not_opts(self, monkeypatch): mock_run = Mock()
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.31
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
cloudpickle==3.1.1 configclass==0.2.0 coverage==7.8.0 -e git+https://github.com/pydoit/doit.git@62335b05473193303d6ecb5524d65bf1a5a50d7e#egg=doit doit-py==0.5.0 exceptiongroup==1.2.2 importlib_metadata==8.6.1 iniconfig==2.1.0 mergedict==1.0.0 packaging==24.2 pluggy==1.5.0 pyflakes==3.3.1 pyinotify==0.9.6 pytest==8.3.5 pytest-ignore-flaky==2.2.1 tomli==2.2.1 zipp==3.21.0
name: doit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - cloudpickle==3.1.1 - configclass==0.2.0 - coverage==7.8.0 - doit-py==0.5.0 - exceptiongroup==1.2.2 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - mergedict==1.0.0 - packaging==24.2 - pluggy==1.5.0 - pyflakes==3.3.1 - pyinotify==0.9.6 - pytest==8.3.5 - pytest-ignore-flaky==2.2.1 - tomli==2.2.1 - zipp==3.21.0 prefix: /opt/conda/envs/doit
[ "tests/test_doit_cmd.py::TestRun::test_cmdline_novars" ]
[]
[ "tests/test_doit_cmd.py::TestRun::test_version", "tests/test_doit_cmd.py::TestRun::test_usage", "tests/test_doit_cmd.py::TestRun::test_run_is_default", "tests/test_doit_cmd.py::TestRun::test_run_other_subcommand", "tests/test_doit_cmd.py::TestRun::test_cmdline_vars", "tests/test_doit_cmd.py::TestRun::test_cmdline_vars_not_opts", "tests/test_doit_cmd.py::TestRun::test_task_loader_has_cmd_list", "tests/test_doit_cmd.py::TestErrors::test_interrupt", "tests/test_doit_cmd.py::TestErrors::test_user_error", "tests/test_doit_cmd.py::TestErrors::test_internal_error", "tests/test_doit_cmd.py::TestConfig::test_no_ini_config_file", "tests/test_doit_cmd.py::TestConfig::test_load_plugins_command", "tests/test_doit_cmd.py::TestConfig::test_merge_api_ini_config", "tests/test_doit_cmd.py::TestConfig::test_execute_command_plugin" ]
[]
MIT License
2,232
[ "doit/doit_cmd.py" ]
[ "doit/doit_cmd.py" ]
springload__draftjs_exporter-90
764d377659f6c6b48be826c7a7d98cfdbc152015
2018-03-01 15:21:59
764d377659f6c6b48be826c7a7d98cfdbc152015
diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f70acb..2db755a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ ## Unreleased +### Added + +* Give block rendering components access to the current `block`, when the component is rendered for a block, and the `blocks` list. +* Give text decorators renderers access to the current `block` and `blocks` list. +* Give style rendering components access to the current `block`, `blocks` list, and current style type as `inline_style_range.style` ([#87](https://github.com/springload/draftjs_exporter/issues/87)). + ### Changed * Performance improvements for text-only (no inline styles, no entities) blocks. diff --git a/draftjs_exporter/composite_decorators.py b/draftjs_exporter/composite_decorators.py index 8080fd5..5f167f9 100644 --- a/draftjs_exporter/composite_decorators.py +++ b/draftjs_exporter/composite_decorators.py @@ -21,7 +21,7 @@ def get_decorations(decorators, text): return decorations -def apply_decorators(decorators, text, block): +def apply_decorators(decorators, text, block, blocks): decorations = get_decorations(decorators, text) pointer = 0 @@ -31,11 +31,8 @@ def apply_decorators(decorators, text, block): yield DOM.create_element(decorator['component'], { 'match': match, - 'block': { - 'type': block['type'], - 'depth': block['depth'], - 'data': block.get('data', {}), - } + 'block': block, + 'blocks': blocks, }, match.group(0)) pointer = end @@ -43,8 +40,8 @@ def apply_decorators(decorators, text, block): yield text[pointer:] -def render_decorators(decorators, text, block): - decorated_children = list(apply_decorators(decorators, text, block)) +def render_decorators(decorators, text, block, blocks): + decorated_children = list(apply_decorators(decorators, text, block, blocks)) if len(decorated_children) == 1: decorated_node = decorated_children[0] diff --git a/draftjs_exporter/dom.py b/draftjs_exporter/dom.py index 967253b..9d67333 100644 --- a/draftjs_exporter/dom.py +++ b/draftjs_exporter/dom.py @@ -76,7 +76,9 @@ class DOM(object): # Never render those attributes on a raw tag. props.pop('children', None) props.pop('block', None) + props.pop('blocks', None) props.pop('entity', None) + props.pop('inline_style_range', None) # Convert style object to style string, like the DOM would do. if 'style' in props and isinstance(props['style'], dict): diff --git a/draftjs_exporter/html.py b/draftjs_exporter/html.py index e527c2b..5267bd7 100644 --- a/draftjs_exporter/html.py +++ b/draftjs_exporter/html.py @@ -34,12 +34,13 @@ class HTML: if content_state is None: content_state = {} - wrapper_state = WrapperState(self.block_map) + blocks = content_state.get('blocks', []) + wrapper_state = WrapperState(self.block_map, blocks) document = DOM.create_element() entity_map = content_state.get('entityMap', {}) min_depth = 0 - for block in content_state.get('blocks', []): + for block in blocks: depth = block['depth'] elt = self.render_block(block, entity_map, wrapper_state) @@ -70,11 +71,11 @@ class HTML: # Decorators are not rendered inside entities. if text and entity_state.has_no_entity() and len(self.composite_decorators) > 0: - decorated_node = render_decorators(self.composite_decorators, text, block) + decorated_node = render_decorators(self.composite_decorators, text, block, wrapper_state.blocks) else: decorated_node = text - styled_node = style_state.render_styles(decorated_node) + styled_node = style_state.render_styles(decorated_node, block, wrapper_state.blocks) entity_node = entity_state.render_entities(styled_node) if entity_node is not None: @@ -84,7 +85,7 @@ class HTML: # Fast track for blocks which do not contain styles nor entities, which is very common. else: if len(self.composite_decorators) > 0: - decorated_node = render_decorators(self.composite_decorators, block['text'], block) + decorated_node = render_decorators(self.composite_decorators, block['text'], block, wrapper_state.blocks) else: decorated_node = block['text'] diff --git a/draftjs_exporter/style_state.py b/draftjs_exporter/style_state.py index d6fad23..e0c5a35 100644 --- a/draftjs_exporter/style_state.py +++ b/draftjs_exporter/style_state.py @@ -23,12 +23,18 @@ class StyleState: def is_empty(self): return not self.styles - def render_styles(self, text_node): - node = text_node + def render_styles(self, decorated_node, block, blocks): + node = decorated_node if not self.is_empty(): # Nest the tags. - for s in sorted(self.styles, reverse=True): - opt = Options.for_style(self.style_map, s) - node = DOM.create_element(opt.element, opt.props, node) + for style in sorted(self.styles, reverse=True): + opt = Options.for_style(self.style_map, style) + props = dict(opt.props) + props['block'] = block + props['blocks'] = blocks + props['inline_style_range'] = { + 'style': style, + } + node = DOM.create_element(opt.element, props, node) return node diff --git a/draftjs_exporter/wrapper_state.py b/draftjs_exporter/wrapper_state.py index e566550..ac5312b 100644 --- a/draftjs_exporter/wrapper_state.py +++ b/draftjs_exporter/wrapper_state.py @@ -79,8 +79,9 @@ class WrapperState: It adds a wrapper element around elements, if required. """ - def __init__(self, block_map): + def __init__(self, block_map, blocks): self.block_map = block_map + self.blocks = blocks self.stack = WrapperStack() def __str__(self): @@ -89,14 +90,10 @@ class WrapperState: def element_for(self, block, block_content): type_ = block['type'] depth = block['depth'] - data = block.get('data', {}) options = Options.for_block(self.block_map, type_) props = dict(options.props) - props['block'] = { - 'type': type_, - 'depth': depth, - 'data': data, - } + props['block'] = block + props['blocks'] = self.blocks # Make an element from the options specified in the block map. elt = DOM.create_element(options.element, props, block_content) @@ -146,6 +143,8 @@ class WrapperState: 'depth': depth, 'data': {}, } + props['blocks'] = self.blocks + wrapper_parent = DOM.create_element(options.element, props) DOM.append_child(self.stack.head().elt, wrapper_parent) else: diff --git a/example.py b/example.py index 6fa442b..8fc9022 100644 --- a/example.py +++ b/example.py @@ -11,7 +11,7 @@ from pstats import Stats from bs4 import BeautifulSoup # draftjs_exporter provides default configurations and predefined constants for reuse. -from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES +from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES from draftjs_exporter.defaults import BLOCK_MAP, STYLE_MAP from draftjs_exporter.dom import DOM from draftjs_exporter.html import HTML @@ -127,6 +127,12 @@ def entity_fallback(props): return DOM.create_element('span', {'class': 'missing-entity'}, props['children']) +def style_fallback(props): + type_ = props['inline_style_range']['style'] + logging.warn('Missing config for "%s". Deleting style.' % type_) + return props['children'] + + if __name__ == '__main__': config = { # `block_map` is a mapping from Draft.js block types to a definition of their HTML representation. @@ -158,6 +164,7 @@ if __name__ == '__main__': 'KBD': 'kbd', # The `style` prop can be defined as a dict, that will automatically be converted to a string. 'HIGHLIGHT': {'element': 'strong', 'props': {'style': {'textDecoration': 'underline'}}}, + INLINE_STYLES.FALLBACK: style_fallback, }), 'entity_decorators': { # Map entities to components so they can be rendered with their data. @@ -565,7 +572,11 @@ if __name__ == '__main__': "text": "Optionally, define your custom components.", "type": "ordered-list-item", "depth": 1, - "inlineStyleRanges": [], + "inlineStyleRanges": [{ + "offset": 0, + "length": 10, + "style": "EXAMPLE_DISCARD" + }], "entityRanges": [], "data": {} }, {
style_map components should be given data on render At the moment, `style_map` components do not receive any data beyond the text to style (as `props['children]`). https://github.com/springload/draftjs_exporter/blob/209631a107be74837b1be2f7c0697f77d64d994b/draftjs_exporter/style_state.py#L26-L32 This is ok for common use cases (`BOLD`, `ITALIC`, etc), but it makes the style_map fallback rather useless – there is no way to know what style needs the fallback, or have any other information about the context to adjust the fallback behavior. Here's what the `block_map` fallback has access to for comparison: ```py props['block'] = { 'type': type_, 'depth': depth, 'data': data, } ``` In retrospect I think this could've been all of the block's attributes, not just a cherry-picked shortlist, so for inline styles we could pass the following exhaustive `props`: ```py { # The style range to render. "range": "offset": 10, "length": 17, "style": "BOLD" }, # The full block data, eg. "block": { "key": "t7k7", "text": "Unstyled test test test test test", "type": "unstyled", "depth": 0, "inlineStyleRanges": [ { "offset": 10, "length": 17, "style": "BOLD" } ], "entityRanges": [ { "offset": 0, "length": 4, "key": 6 } ], "data": {} }, } ``` Here's the approximative change: ```diff - def render_styles(self, text_node): + def render_styles(self, text_node, block): node = text_node if not self.is_empty(): # Nest the tags. for s in sorted(self.styles, reverse=True): opt = Options.for_style(self.style_map, s) + props['block'] = block + props['range'] = s node = DOM.create_element(opt.element, opt.props, node) return node ``` --- Ideally I'd like entities and blocks to also be given more data (enough data to recreate the whole ContentState, thus making the exporter usable to create content migrations), but that's a separate issue.
springload/draftjs_exporter
diff --git a/tests/test_composite_decorators.py b/tests/test_composite_decorators.py index aeb029c..e375ebe 100644 --- a/tests/test_composite_decorators.py +++ b/tests/test_composite_decorators.py @@ -68,13 +68,37 @@ class TestBR(unittest.TestCase): class TestCompositeDecorators(unittest.TestCase): def test_render_decorators_empty(self): - self.assertEqual(render_decorators([], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0}), 'test https://www.example.com#hash #hashtagtest') + self.assertEqual(render_decorators([], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0}, []), 'test https://www.example.com#hash #hashtagtest') def test_render_decorators_single(self): - self.assertEqual(DOM.render(render_decorators([LINKIFY_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0})), 'test <a href="https://www.example.com#hash">https://www.example.com#hash</a> #hashtagtest') + self.assertEqual(DOM.render(render_decorators([LINKIFY_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0}, [])), 'test <a href="https://www.example.com#hash">https://www.example.com#hash</a> #hashtagtest') def test_render_decorators_conflicting_order_one(self): - self.assertEqual(DOM.render(render_decorators([LINKIFY_DECORATOR, HASHTAG_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0})), 'test <a href="https://www.example.com#hash">https://www.example.com#hash</a> <span class="hashtag">#hashtagtest</span>') + self.assertEqual(DOM.render(render_decorators([LINKIFY_DECORATOR, HASHTAG_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0}, [])), 'test <a href="https://www.example.com#hash">https://www.example.com#hash</a> <span class="hashtag">#hashtagtest</span>') def test_render_decorators_conflicting_order_two(self): - self.assertEqual(DOM.render(render_decorators([HASHTAG_DECORATOR, LINKIFY_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0})), 'test https://www.example.com<span class="hashtag">#hash</span> <span class="hashtag">#hashtagtest</span>') + self.assertEqual(DOM.render(render_decorators([HASHTAG_DECORATOR, LINKIFY_DECORATOR], 'test https://www.example.com#hash #hashtagtest', {'type': BLOCK_TYPES.UNSTYLED, 'depth': 0}, [])), 'test https://www.example.com<span class="hashtag">#hash</span> <span class="hashtag">#hashtagtest</span>') + + def test_render_decorators_data(self): + blocks = [ + { + 'key': '5s7g9', + 'text': 'test', + 'type': 'unstyled', + 'depth': 0, + 'inlineStyleRanges': [], + 'entityRanges': [], + }, + ] + + def component(props): + self.assertEqual(props['blocks'], blocks) + self.assertEqual(props['block'], blocks[0]) + return None + + render_decorators([ + { + 'strategy': LINKIFY_RE, + 'component': component, + }, + ], 'test https://www.example.com#hash #hashtagtest', blocks[0], blocks) diff --git a/tests/test_style_state.py b/tests/test_style_state.py index 44b3120..20b36ac 100644 --- a/tests/test_style_state.py +++ b/tests/test_style_state.py @@ -55,41 +55,67 @@ class TestStyleState(unittest.TestCase): self.assertEqual(self.style_state.is_empty(), False) def test_render_styles_unstyled(self): - self.assertEqual(self.style_state.render_styles('Test text'), 'Test text') + self.assertEqual(self.style_state.render_styles('Test text', {}, []), 'Test text') def test_render_styles_unicode(self): - self.assertEqual(self.style_state.render_styles('🍺'), '🍺') + self.assertEqual(self.style_state.render_styles('🍺', {}, []), '🍺') def test_render_styles_styled(self): self.style_state.apply(Command('start_inline_style', 0, 'ITALIC')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<em>Test text</em>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<em>Test text</em>') self.style_state.apply(Command('stop_inline_style', 9, 'ITALIC')) def test_render_styles_styled_multiple(self): self.style_state.apply(Command('start_inline_style', 0, 'BOLD')) self.style_state.apply(Command('start_inline_style', 0, 'ITALIC')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<strong><em>Test text</em></strong>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<strong><em>Test text</em></strong>') def test_render_styles_attributes(self): self.style_state.apply(Command('start_inline_style', 0, 'KBD')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<kbd class="o-keyboard-shortcut">Test text</kbd>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<kbd class="o-keyboard-shortcut">Test text</kbd>') self.style_state.apply(Command('stop_inline_style', 9, 'KBD')) def test_render_styles_component(self): self.style_state.apply(Command('start_inline_style', 0, 'IMPORTANT')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<strong style="color: red;">Test text</strong>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<strong style="color: red;">Test text</strong>') self.style_state.apply(Command('stop_inline_style', 9, 'IMPORTANT')) def test_render_styles_component_multiple(self): self.style_state.apply(Command('start_inline_style', 0, 'IMPORTANT')) self.style_state.apply(Command('start_inline_style', 0, 'SHOUT')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<strong style="color: red;"><span style="text-transform: uppercase;">Test text</span></strong>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<strong style="color: red;"><span style="text-transform: uppercase;">Test text</span></strong>') self.style_state.apply(Command('stop_inline_style', 9, 'IMPORTANT')) self.style_state.apply(Command('stop_inline_style', 9, 'SHOUT')) def test_render_styles_component_multiple_invert(self): self.style_state.apply(Command('start_inline_style', 0, 'SHOUT')) self.style_state.apply(Command('start_inline_style', 0, 'IMPORTANT')) - self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text')), '<strong style="color: red;"><span style="text-transform: uppercase;">Test text</span></strong>') + self.assertEqual(DOM.render_debug(self.style_state.render_styles('Test text', {}, [])), '<strong style="color: red;"><span style="text-transform: uppercase;">Test text</span></strong>') self.style_state.apply(Command('stop_inline_style', 9, 'SHOUT')) self.style_state.apply(Command('stop_inline_style', 9, 'IMPORTANT')) + + def test_render_styles_data(self): + blocks = [ + { + 'key': '5s7g9', + 'text': 'test', + 'type': 'unstyled', + 'depth': 0, + 'inlineStyleRanges': [], + 'entityRanges': [], + }, + ] + + def component(props): + self.assertEqual(props['blocks'], blocks) + self.assertEqual(props['block'], blocks[0]) + self.assertEqual(props['inline_style_range']['style'], 'ITALIC') + return None + + style_state = StyleState({ + 'ITALIC': component, + }) + + style_state.apply(Command('start_inline_style', 0, 'ITALIC')) + style_state.render_styles('Test text', blocks[0], blocks) + style_state.apply(Command('stop_inline_style', 9, 'ITALIC')) diff --git a/tests/test_wrapper_state.py b/tests/test_wrapper_state.py index 8d2cebe..80b2daf 100644 --- a/tests/test_wrapper_state.py +++ b/tests/test_wrapper_state.py @@ -21,11 +21,29 @@ class TestWrapperState(unittest.TestCase): 'element': list_item, 'wrapper': ordered_list }, - }) + }, []) def test_init(self): self.assertIsInstance(self.wrapper_state, WrapperState) + def test_element_for_data(self): + blocks = [ + { + 'key': '5s7g9', + 'text': 'test', + 'type': 'unstyled', + 'depth': 0, + 'inlineStyleRanges': [], + 'entityRanges': [], + }, + ] + + def unstyled(props): + self.assertEqual(props['blocks'], blocks) + self.assertEqual(props['block'], blocks[0]) + + WrapperState({'unstyled': unstyled}, blocks).element_for(blocks[0], 'test') + def test_element_for_simple_content(self): self.assertEqual(DOM.render_debug(self.wrapper_state.element_for({ 'key': '5s7g9',
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 7 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[testing,docs]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libxml2-dev libxslt1-dev" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 beautifulsoup4==4.12.3 certifi==2021.5.30 coverage==6.2 distlib==0.3.9 -e git+https://github.com/springload/draftjs_exporter.git@764d377659f6c6b48be826c7a7d98cfdbc152015#egg=draftjs_exporter execnet==1.9.0 filelock==3.4.1 flake8==5.0.4 html5lib==1.0b10 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 isort==4.2.5 lxml==5.3.1 markov-draftjs==0.1.1 mccabe==0.7.0 memory-profiler==0.47 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 psutil==5.4.1 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 six==1.17.0 soupsieve==2.3.2.post1 toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 virtualenv==20.16.2 webencodings==0.5.1 zipp==3.6.0
name: draftjs_exporter channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - beautifulsoup4==4.12.3 - coverage==6.2 - distlib==0.3.9 - execnet==1.9.0 - filelock==3.4.1 - flake8==5.0.4 - html5lib==1.0b10 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isort==4.2.5 - lxml==5.3.1 - markov-draftjs==0.1.1 - mccabe==0.7.0 - memory-profiler==0.47 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - psutil==5.4.1 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - six==1.17.0 - soupsieve==2.3.2.post1 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - virtualenv==20.16.2 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/draftjs_exporter
[ "tests/test_composite_decorators.py::TestCompositeDecorators::test_render_decorators_conflicting_order_one", "tests/test_composite_decorators.py::TestCompositeDecorators::test_render_decorators_conflicting_order_two", "tests/test_composite_decorators.py::TestCompositeDecorators::test_render_decorators_data", "tests/test_composite_decorators.py::TestCompositeDecorators::test_render_decorators_empty", "tests/test_composite_decorators.py::TestCompositeDecorators::test_render_decorators_single", "tests/test_style_state.py::TestStyleState::test_render_styles_attributes", "tests/test_style_state.py::TestStyleState::test_render_styles_component", "tests/test_style_state.py::TestStyleState::test_render_styles_component_multiple", "tests/test_style_state.py::TestStyleState::test_render_styles_component_multiple_invert", "tests/test_style_state.py::TestStyleState::test_render_styles_data", "tests/test_style_state.py::TestStyleState::test_render_styles_styled", "tests/test_style_state.py::TestStyleState::test_render_styles_styled_multiple", "tests/test_style_state.py::TestStyleState::test_render_styles_unicode", "tests/test_style_state.py::TestStyleState::test_render_styles_unstyled", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_component", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_component_wrapper", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_data", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_dismiss_content", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_element_content", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_no_block", "tests/test_wrapper_state.py::TestWrapperState::test_element_for_simple_content", "tests/test_wrapper_state.py::TestWrapperState::test_init", "tests/test_wrapper_state.py::TestWrapperState::test_str", "tests/test_wrapper_state.py::TestWrapperState::test_str_elts" ]
[]
[ "tests/test_composite_decorators.py::TestLinkify::test_render", "tests/test_composite_decorators.py::TestLinkify::test_render_code_block", "tests/test_composite_decorators.py::TestLinkify::test_render_www", "tests/test_composite_decorators.py::TestHashtag::test_render", "tests/test_composite_decorators.py::TestHashtag::test_render_code_block", "tests/test_composite_decorators.py::TestBR::test_render", "tests/test_composite_decorators.py::TestBR::test_render_code_block", "tests/test_style_state.py::TestStyleState::test_apply_start_inline_style", "tests/test_style_state.py::TestStyleState::test_apply_stop_inline_style", "tests/test_style_state.py::TestStyleState::test_init", "tests/test_style_state.py::TestStyleState::test_is_empty_default", "tests/test_style_state.py::TestStyleState::test_is_empty_styled", "tests/test_wrapper_state.py::TestBlockquote::test_render_debug", "tests/test_wrapper_state.py::TestListItem::test_render_debug" ]
[]
MIT License
2,233
[ "draftjs_exporter/dom.py", "draftjs_exporter/wrapper_state.py", "CHANGELOG.md", "example.py", "draftjs_exporter/html.py", "draftjs_exporter/style_state.py", "draftjs_exporter/composite_decorators.py" ]
[ "draftjs_exporter/dom.py", "draftjs_exporter/wrapper_state.py", "CHANGELOG.md", "example.py", "draftjs_exporter/html.py", "draftjs_exporter/style_state.py", "draftjs_exporter/composite_decorators.py" ]
NeuralEnsemble__python-neo-488
643f889779ded1333bd433e004fe1f05b5c33026
2018-03-01 16:08:53
f0285a7ab15ff6535d3e6736e0163c4fa6aea091
diff --git a/neo/core/spiketrain.py b/neo/core/spiketrain.py index 5d285ae0..7256511b 100644 --- a/neo/core/spiketrain.py +++ b/neo/core/spiketrain.py @@ -54,6 +54,9 @@ def _check_time_in_range(value, t_start, t_stop, view=False): certain that the dtype and units are the same ''' + if t_start > t_stop: + raise ValueError("t_stop (%s) is before t_start (%s)" % (t_stop, t_start)) + if not value.size: return @@ -217,8 +220,8 @@ class SpikeTrain(BaseNeo, pq.Quantity): constructor, but not when slicing. ''' if len(times) != 0 and waveforms is not None and len(times) != \ - waveforms.shape[ - 0]: # len(times)!=0 has been used to workaround a bug occuring during neo import) + waveforms.shape[0]: + # len(times)!=0 has been used to workaround a bug occuring during neo import raise ValueError( "the number of waveforms should be equal to the number of spikes")
t_start > t_stop issue in SpikeTrain I am transferring the issue from https://github.com/NeuralEnsemble/elephant/issues/81 here. Is following intended? ```python import neo import quantities as pq print(neo.SpikeTrain([]*pq.s, t_start=5*pq.s, t_stop=4*pq.s)) >>> [] s ``` I am on version `0.5.2`. Shouldn't start and stop times in `neo.SpikeTrain` checked and an error thrown?
NeuralEnsemble/python-neo
diff --git a/neo/test/coretest/test_spiketrain.py b/neo/test/coretest/test_spiketrain.py index 22dd0bc8..ed5cec3b 100644 --- a/neo/test/coretest/test_spiketrain.py +++ b/neo/test/coretest/test_spiketrain.py @@ -142,6 +142,12 @@ class Testcheck_time_in_range(unittest.TestCase): _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=False) _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=True) + def test__check_time_in_range_empty_array_invalid_t_stop(self): + value = np.array([]) + t_start = 6 * pq.s + t_stop = 4 * pq.s + self.assertRaises(ValueError, _check_time_in_range, value, t_start=t_start, t_stop=t_stop) + def test__check_time_in_range_exact(self): value = np.array([0., 5., 10.]) * pq.s t_start = 0. * pq.s @@ -1558,7 +1564,7 @@ class TestPropertiesMethods(unittest.TestCase): def test__repr(self): result = repr(self.train1) if np.__version__.split(".")[:2] > ['1', '13']: - # see https://github.com/numpy/numpy/blob/master/doc/release/1.14.0-notes.rst#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode + # see https://github.com/numpy/numpy/blob/master/doc/release/1.14.0-notes.rst#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # nopep8 targ = '<SpikeTrain(array([3., 4., 5.]) * ms, [0.5 ms, 10.0 ms])>' else: targ = '<SpikeTrain(array([ 3., 4., 5.]) * ms, [0.5 ms, 10.0 ms])>' diff --git a/neo/test/iotest/test_nestio.py b/neo/test/iotest/test_nestio.py index 155e3974..1686644d 100644 --- a/neo/test/iotest/test_nestio.py +++ b/neo/test/iotest/test_nestio.py @@ -673,8 +673,8 @@ class TestNestIO_Spiketrains(BaseTestIO, unittest.TestCase): filename='0gid-1time-1256-0.gdf', directory=self.local_test_dir, clean=False) r = NestIO(filenames=filename) - seg = r.read_segment(gid_list=[], t_start=400. * pq.ms, - t_stop=1. * pq.ms) + seg = r.read_segment(gid_list=[], t_start=400.4 * pq.ms, + t_stop=400.5 * pq.ms) for st in seg.spiketrains: self.assertEqual(st.size, 0) @@ -689,7 +689,7 @@ class TestNestIO_Spiketrains(BaseTestIO, unittest.TestCase): directory=self.local_test_dir, clean=False) r = NestIO(filenames=filename) st = r.read_spiketrain(gdf_id=0, t_start=400. * pq.ms, - t_stop=1. * pq.ms) + t_stop=410. * pq.ms) self.assertEqual(st.size, 0)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/NeuralEnsemble/python-neo.git@643f889779ded1333bd433e004fe1f05b5c33026#egg=neo nose==1.3.7 numpy==1.19.5 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 quantities==0.13.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: python-neo channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - nose==1.3.7 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - quantities==0.13.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/python-neo
[ "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_empty_array_invalid_t_stop" ]
[]
[ "neo/test/coretest/test_spiketrain.py::Test__generate_datasets::test__fake_neo__cascade", "neo/test/coretest/test_spiketrain.py::Test__generate_datasets::test__fake_neo__nocascade", "neo/test/coretest/test_spiketrain.py::Test__generate_datasets::test__get_fake_values", "neo/test/coretest/test_spiketrain.py::Testcheck_has_dimensions_time::test__check_has_dimensions_time", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_above", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_above_below", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_above_below_scale", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_above_scale", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_below", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_below_scale", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_empty_array", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_exact", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_inside", "neo/test/coretest/test_spiketrain.py::Testcheck_time_in_range::test__check_time_in_range_scale", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_empty", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_empty_no_t_start", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_no_start_stop_units", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_no_start_stop_units_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_no_start_stop_units_with_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_with_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_with_incompatible_units_ValueError", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_array_without_units_should_raise_ValueError", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_list", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_list_no_start_stop_units", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_list_no_start_stop_units_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_list_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_list_without_units_should_raise_ValueError", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_no_start_stop_units", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_no_start_stop_units_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_no_start_stop_units_with_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_units", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_units_no_start_stop_units", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_units_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_units_with_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_array_with_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_from_quantity_units_no_start_stop_units_set_dtype", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_minimal", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_with_len_times_different_size_than_waveform_shape1_ValueError", "neo/test/coretest/test_spiketrain.py::TestConstructor::test__create_with_times_outside_tstart_tstop_ValueError", "neo/test/coretest/test_spiketrain.py::TestConstructor::test_default_tstart", "neo/test/coretest/test_spiketrain.py::TestConstructor::test_defaults", "neo/test/coretest/test_spiketrain.py::TestConstructor::test_tstop_units_conversion", "neo/test/coretest/test_spiketrain.py::TestSorting::test_sort", "neo/test/coretest/test_spiketrain.py::TestSlice::test_compliant", "neo/test/coretest/test_spiketrain.py::TestSlice::test_slice", "neo/test/coretest/test_spiketrain.py::TestSlice::test_slice_from_beginning", "neo/test/coretest/test_spiketrain.py::TestSlice::test_slice_negative_idxs", "neo/test/coretest/test_spiketrain.py::TestSlice::test_slice_to_end", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_compliant", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_differnt_units", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_empty", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_matching_ends", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_none_both", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_none_start", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_none_stop", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_out_of_boundries", "neo/test/coretest/test_spiketrain.py::TestTimeSlice::test_time_slice_typical", "neo/test/coretest/test_spiketrain.py::TestMerge::test_compliant", "neo/test/coretest/test_spiketrain.py::TestMerge::test_correct_shape", "neo/test/coretest/test_spiketrain.py::TestMerge::test_correct_times", "neo/test/coretest/test_spiketrain.py::TestMerge::test_incompatible_t_start", "neo/test/coretest/test_spiketrain.py::TestMerge::test_merge_typical", "neo/test/coretest/test_spiketrain.py::TestMerge::test_merge_with_waveforms", "neo/test/coretest/test_spiketrain.py::TestMerge::test_missing_waveforms_error", "neo/test/coretest/test_spiketrain.py::TestMerge::test_neo_relations", "neo/test/coretest/test_spiketrain.py::TestMerge::test_rescaling_units", "neo/test/coretest/test_spiketrain.py::TestMerge::test_sampling_rate", "neo/test/coretest/test_spiketrain.py::TestDuplicateWithNewData::test_deep_copy_attributes", "neo/test/coretest/test_spiketrain.py::TestDuplicateWithNewData::test_duplicate_with_new_data", "neo/test/coretest/test_spiketrain.py::TestAttributesAnnotations::test_annotations", "neo/test/coretest/test_spiketrain.py::TestAttributesAnnotations::test_autoset_universally_recommended_attributes", "neo/test/coretest/test_spiketrain.py::TestAttributesAnnotations::test_set_universally_recommended_attributes", "neo/test/coretest/test_spiketrain.py::TestChanging::test__adding_time", "neo/test/coretest/test_spiketrain.py::TestChanging::test__changing_multiple_spiketimes", "neo/test/coretest/test_spiketrain.py::TestChanging::test__changing_multiple_spiketimes_should_check_time_in_range", "neo/test/coretest/test_spiketrain.py::TestChanging::test__changing_spiketime_should_check_time_in_range", "neo/test/coretest/test_spiketrain.py::TestChanging::test__rescale", "neo/test/coretest/test_spiketrain.py::TestChanging::test__rescale_incompatible_units_ValueError", "neo/test/coretest/test_spiketrain.py::TestChanging::test__rescale_same_units", "neo/test/coretest/test_spiketrain.py::TestChanging::test__subtracting_time", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_default", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_default_and_data_not_quantity", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_false", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_false_and_data_not_quantity", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_false_and_dtype_change", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_false_and_fake_rescale", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_false_and_rescale_true", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_true", "neo/test/coretest/test_spiketrain.py::TestChanging::test_change_with_copy_true_and_data_not_quantity", "neo/test/coretest/test_spiketrain.py::TestChanging::test_changing_slice_changes_original_spiketrain", "neo/test/coretest/test_spiketrain.py::TestChanging::test_changing_slice_changes_original_spiketrain_with_copy_false", "neo/test/coretest/test_spiketrain.py::TestChanging::test_init_with_rescale", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__children", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__compliant", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__duration", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__repr", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__right_sweep", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__sampling_period", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__spike_duration", "neo/test/coretest/test_spiketrain.py::TestPropertiesMethods::test__times", "neo/test/coretest/test_spiketrain.py::TestMiscellaneous::test__different_dtype_for_t_start_and_array", "neo/test/coretest/test_spiketrain.py::TestMiscellaneous::test_as_array", "neo/test/coretest/test_spiketrain.py::TestMiscellaneous::test_as_quantity", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_assert_readed_neo_object_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_id_column_none_multiple_neurons", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_load_lazy_objects", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_multiple_value_columns", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_no_gid", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_no_gid_no_time", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_notimeid", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_read_analogsignal", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_read_block", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_read_segment", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_read_then_write", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_readed_with_lazy_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_single_gid", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_t_start_t_stop", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_values", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_write_then_read", "neo/test/iotest/test_nestio.py::TestNestIO_Analogsignals::test_wrong_input", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_adding_custom_annotation", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_assert_readed_neo_object_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_gdf_id_illdefined_raises_error", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_load_lazy_objects", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_float", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_integer", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_segment", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_segment_accepts_range", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_segment_annotates", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_segment_can_return_empty_spiketrains", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_segment_range_is_reasonable", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_spiketrain", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_spiketrain_annotates", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_spiketrain_can_return_empty_spiketrain", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_read_then_write", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_readed_with_lazy_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_t_start_t_stop", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_t_start_undefined_raises_error", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_t_stop_undefined_raises_error", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_values", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_write_then_read", "neo/test/iotest/test_nestio.py::TestNestIO_Spiketrains::test_wrong_input", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_assert_readed_neo_object_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_load_lazy_objects", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_read_analogsignal_and_spiketrain", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_read_then_write", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_readed_with_lazy_is_compliant", "neo/test/iotest/test_nestio.py::TestNestIO_multiple_signal_types::test_write_then_read", "neo/test/iotest/test_nestio.py::TestColumnIO::test_assert_readed_neo_object_is_compliant", "neo/test/iotest/test_nestio.py::TestColumnIO::test_correct_condition_selection", "neo/test/iotest/test_nestio.py::TestColumnIO::test_load_lazy_objects", "neo/test/iotest/test_nestio.py::TestColumnIO::test_multiple_column_ids", "neo/test/iotest/test_nestio.py::TestColumnIO::test_no_arguments", "neo/test/iotest/test_nestio.py::TestColumnIO::test_no_condition", "neo/test/iotest/test_nestio.py::TestColumnIO::test_no_condition_column", "neo/test/iotest/test_nestio.py::TestColumnIO::test_read_then_write", "neo/test/iotest/test_nestio.py::TestColumnIO::test_readed_with_lazy_is_compliant", "neo/test/iotest/test_nestio.py::TestColumnIO::test_single_column_id", "neo/test/iotest/test_nestio.py::TestColumnIO::test_sorting", "neo/test/iotest/test_nestio.py::TestColumnIO::test_write_then_read" ]
[]
BSD 3-Clause "New" or "Revised" License
2,234
[ "neo/core/spiketrain.py" ]
[ "neo/core/spiketrain.py" ]
oasis-open__cti-python-stix2-132
9402cff110ad6c8da0e2c3229520499dadbc7feb
2018-03-01 16:31:33
4a9c38e0b50415f4733072fc76eb8ebd0749c84b
diff --git a/docs/api/datastore/stix2.datastore.filesystem.rst b/docs/api/datastore/stix2.datastore.filesystem.rst new file mode 100644 index 0000000..665df66 --- /dev/null +++ b/docs/api/datastore/stix2.datastore.filesystem.rst @@ -0,0 +1,5 @@ +filesystem +========================== + +.. automodule:: stix2.datastore.filesystem + :members: \ No newline at end of file diff --git a/docs/api/datastore/stix2.datastore.filters.rst b/docs/api/datastore/stix2.datastore.filters.rst new file mode 100644 index 0000000..b556754 --- /dev/null +++ b/docs/api/datastore/stix2.datastore.filters.rst @@ -0,0 +1,5 @@ +filters +======================= + +.. automodule:: stix2.datastore.filters + :members: \ No newline at end of file diff --git a/docs/api/datastore/stix2.datastore.memory.rst b/docs/api/datastore/stix2.datastore.memory.rst new file mode 100644 index 0000000..b0521c7 --- /dev/null +++ b/docs/api/datastore/stix2.datastore.memory.rst @@ -0,0 +1,5 @@ +memory +====================== + +.. automodule:: stix2.datastore.memory + :members: \ No newline at end of file diff --git a/docs/api/datastore/stix2.datastore.taxii.rst b/docs/api/datastore/stix2.datastore.taxii.rst new file mode 100644 index 0000000..68389a0 --- /dev/null +++ b/docs/api/datastore/stix2.datastore.taxii.rst @@ -0,0 +1,5 @@ +taxii +===================== + +.. automodule:: stix2.datastore.taxii + :members: \ No newline at end of file diff --git a/docs/api/sources/stix2.sources.filesystem.rst b/docs/api/sources/stix2.sources.filesystem.rst deleted file mode 100644 index ca0cfce..0000000 --- a/docs/api/sources/stix2.sources.filesystem.rst +++ /dev/null @@ -1,5 +0,0 @@ -filesystem -======================== - -.. automodule:: stix2.sources.filesystem - :members: \ No newline at end of file diff --git a/docs/api/sources/stix2.sources.filters.rst b/docs/api/sources/stix2.sources.filters.rst deleted file mode 100644 index 43dea51..0000000 --- a/docs/api/sources/stix2.sources.filters.rst +++ /dev/null @@ -1,5 +0,0 @@ -filters -===================== - -.. automodule:: stix2.sources.filters - :members: \ No newline at end of file diff --git a/docs/api/sources/stix2.sources.memory.rst b/docs/api/sources/stix2.sources.memory.rst deleted file mode 100644 index 1e70f2e..0000000 --- a/docs/api/sources/stix2.sources.memory.rst +++ /dev/null @@ -1,5 +0,0 @@ -memory -==================== - -.. automodule:: stix2.sources.memory - :members: \ No newline at end of file diff --git a/docs/api/sources/stix2.sources.taxii.rst b/docs/api/sources/stix2.sources.taxii.rst deleted file mode 100644 index 42303fe..0000000 --- a/docs/api/sources/stix2.sources.taxii.rst +++ /dev/null @@ -1,5 +0,0 @@ -taxii -=================== - -.. automodule:: stix2.sources.taxii - :members: \ No newline at end of file diff --git a/docs/api/stix2.datastore.rst b/docs/api/stix2.datastore.rst new file mode 100644 index 0000000..4af05a9 --- /dev/null +++ b/docs/api/stix2.datastore.rst @@ -0,0 +1,5 @@ +datastore +=============== + +.. automodule:: stix2.datastore + :members: \ No newline at end of file diff --git a/docs/api/stix2.sources.rst b/docs/api/stix2.sources.rst deleted file mode 100644 index aaf9f89..0000000 --- a/docs/api/stix2.sources.rst +++ /dev/null @@ -1,5 +0,0 @@ -sources -============= - -.. automodule:: stix2.sources - :members: \ No newline at end of file diff --git a/docs/guide/datastore.ipynb b/docs/guide/datastore.ipynb index 31635ba..ac27a82 100644 --- a/docs/guide/datastore.ipynb +++ b/docs/guide/datastore.ipynb @@ -71,9 +71,9 @@ "source": [ "# DataStore API\n", "\n", - "CTI Python STIX2 features a new interface for pulling and pushing STIX2 content. The new interface consists of [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore), [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource) and [DataSink](../api/stix2.sources.rst#stix2.sources.DataSink) constructs: a [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource) for pulling STIX2 content, a [DataSink](../api/stix2.sources.rst#stix2.sources.DataSink) for pushing STIX2 content, and a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) for both pulling and pushing.\n", + "CTI Python STIX2 features a new interface for pulling and pushing STIX2 content. The new interface consists of [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin), [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource) and [DataSink](../api/stix2.datastore.rst#stix2.datastore.DataSink) constructs: a [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource) for pulling STIX2 content, a [DataSink](../api/stix2.datastore.rst#stix2.datastore.DataSink) for pushing STIX2 content, and a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) for both pulling and pushing.\n", "\n", - "The DataStore, [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource), [DataSink](../api/stix2.sources.rst#stix2.sources.DataSink) (collectively referred to as the \"DataStore suite\") APIs are not referenced directly by a user but are used as base classes, which are then subclassed by real DataStore suites. CTI Python STIX2 provides the DataStore suites of [FileSystem](../api/sources/stix2.sources.filesystem.rst), [Memory](../api/sources/stix2.sources.memory.rst), and [TAXII](../api/sources/stix2.sources.taxii.rst). Users are also encouraged to subclass the base classes and create their own custom DataStore suites." + "The DataStore, [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource), [DataSink](../api/stix2.datastore.rst#stix2.datastore.DataSink) (collectively referred to as the \"DataStore suite\") APIs are not referenced directly by a user but are used as base classes, which are then subclassed by real DataStore suites. CTI Python STIX2 provides the DataStore suites of [FileSystem](../api/datastore/stix2.datastore.filesystem.rst), [Memory](../api/datastore/stix2.datastore.memory.rst), and [TAXII](../api/datastore/stix2.datastore.taxii.rst). Users are also encouraged to subclass the base classes and create their own custom DataStore suites." ] }, { @@ -82,13 +82,13 @@ "source": [ "## CompositeDataSource\n", "\n", - "[CompositeDataSource](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) is an available controller that can be used as a single interface to a set of defined [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource). The purpose of this controller is allow for the grouping of [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource) and making `get()`/`query()` calls to a set of DataSources in one API call. [CompositeDataSources](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) can be used to organize/group [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource), federate `get()`/`all_versions()`/`query()` calls, and reduce user code.\n", + "[CompositeDataSource](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) is an available controller that can be used as a single interface to a set of defined [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource). The purpose of this controller is allow for the grouping of [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource) and making `get()`/`query()` calls to a set of DataSources in one API call. [CompositeDataSources](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) can be used to organize/group [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource), federate `get()`/`all_versions()`/`query()` calls, and reduce user code.\n", "\n", - "[CompositeDataSource](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) is just a wrapper around a set of defined [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource) (e.g. [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource)) that federates `get()`/`all_versions()`/`query()` calls individually to each of the attached [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource) , collects the results from each [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource) and returns them.\n", + "[CompositeDataSource](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) is just a wrapper around a set of defined [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource) (e.g. [FileSystemSource](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource)) that federates `get()`/`all_versions()`/`query()` calls individually to each of the attached [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource) , collects the results from each [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource) and returns them.\n", "\n", - "Filters can be attached to [CompositeDataSources](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) just as they can be done to [DataStores](../api/stix2.sources.rst#stix2.sources.DataStore) and [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource). When `get()`/`all_versions()`/`query()` calls are made to the [CompositeDataSource](../api/stix2.sources.rst#stix2.sources.CompositeDataSource), it will pass along any query filters from the call and any of its own filters to the attached [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource). In addition, those [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource) may have their own attached filters as well. The effect is that all the filters are eventually combined when the `get()`/`all_versions()`/`query()` call is actually executed within a [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource). \n", + "Filters can be attached to [CompositeDataSources](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) just as they can be done to [DataStores](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) and [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource). When `get()`/`all_versions()`/`query()` calls are made to the [CompositeDataSource](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource), it will pass along any query filters from the call and any of its own filters to the attached [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource). In addition, those [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource) may have their own attached filters as well. The effect is that all the filters are eventually combined when the `get()`/`all_versions()`/`query()` call is actually executed within a [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource). \n", "\n", - "A [CompositeDataSource](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) can also be attached to a [CompositeDataSource](../api/stix2.sources.rst#stix2.sources.CompositeDataSource) for multiple layers of grouped [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource).\n", + "A [CompositeDataSource](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) can also be attached to a [CompositeDataSource](../api/stix2.datastore.rst#stix2.datastore.CompositeDataSource) for multiple layers of grouped [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource).\n", "\n", "\n", "### CompositeDataSource API\n", @@ -353,7 +353,7 @@ "source": [ "## Filters\n", "\n", - "The CTI Python STIX2 DataStore suites - [FileSystem](../api/sources/stix2.sources.filesystem.rst), [Memory](../api/sources/stix2.sources.memory.rst), and [TAXII](../api/sources/stix2.sources.taxii.rst) - all use the [Filters](../api/sources/stix2.sources.filters.rst) module to allow for the querying of STIX content. The basic functionality is that filters can be created and supplied everytime to calls to `query()`, and/or attached to a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) so that every future query placed to that [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) is evaluated against the attached filters, supplemented with any further filters supplied with the query call. Attached filters can also be removed from [DataStores](../api/stix2.sources.rst#stix2.sources.DataStore).\n", + "The CTI Python STIX2 DataStore suites - [FileSystem](../api/datastore/stix2.datastore.filesystem.rst), [Memory](../api/datastore/stix2.datastore.memory.rst), and [TAXII](../api/datastore/stix2.datastore.taxii.rst) - all use the [Filters](../api/datastore/stix2.datastore.filters.rst) module to allow for the querying of STIX content. The basic functionality is that filters can be created and supplied everytime to calls to `query()`, and/or attached to a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) so that every future query placed to that [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) is evaluated against the attached filters, supplemented with any further filters supplied with the query call. Attached filters can also be removed from [DataStores](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin).\n", "\n", "Filters are very simple, as they consist of a field name, comparison operator and an object property value (i.e. value to compare to). All properties of STIX2 objects can be filtered on. In addition, TAXII2 Filtering parameters for fields can also be used in filters.\n", "\n", @@ -418,7 +418,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "For Filters to be applied to a query, they must be either supplied with the query call or attached a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore), more specifically to a [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource) whether that [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource) is a part of a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) or stands by itself. " + "For Filters to be applied to a query, they must be either supplied with the query call or attached to a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin), more specifically to a [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource) whether that [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource) is a part of a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) or stands by itself. " ] }, { @@ -461,7 +461,7 @@ "source": [ "## De-Referencing Relationships\n", "\n", - "Given a STIX object, there are several ways to find other STIX objects related to it. To illustrate this, let's first create a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) and add some objects and relationships." + "Given a STIX object, there are several ways to find other STIX objects related to it. To illustrate this, let's first create a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) and add some objects and relationships." ] }, { @@ -487,7 +487,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If a STIX object has a `created_by_ref` property, you can use the [creator_of()](../api/stix2.sources.rst#stix2.sources.DataSource.creator_of) method to retrieve the [Identity](../api/stix2.v20.sdo.rst#stix2.v20.sdo.Identity) object that created it." + "If a STIX object has a `created_by_ref` property, you can use the [creator_of()](../api/stix2.datastore.rst#stix2.datastore.DataSource.creator_of) method to retrieve the [Identity](../api/stix2.v20.sdo.rst#stix2.v20.sdo.Identity) object that created it." ] }, { @@ -593,7 +593,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Use the [relationships()](../api/stix2.sources.rst#stix2.sources.DataSource.relationships) method to retrieve all the relationship objects that reference a STIX object." + "Use the [relationships()](../api/stix2.datastore.rst#stix2.datastore.DataSource.relationships) method to retrieve all the relationship objects that reference a STIX object." ] }, { @@ -703,7 +703,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Finally, you can retrieve all STIX objects related to a given STIX object using [related_to()](../api/stix2.sources.rst#stix2.sources.DataSource.related_to). This calls [relationships()](../api/stix2.sources.rst#stix2.sources.DataSource.relationships) but then performs the extra step of getting the objects that these Relationships point to. [related_to()](../api/stix2.sources.rst#stix2.sources.DataSource.related_to) takes all the same arguments that [relationships()](../api/stix2.sources.rst#stix2.sources.DataSource.relationships) does." + "Finally, you can retrieve all STIX objects related to a given STIX object using [related_to()](../api/stix2.datastore.rst#stix2.datastore.DataSource.related_to). This calls [relationships()](../api/stix2.datastore.rst#stix2.datastore.DataSource.relationships) but then performs the extra step of getting the objects that these Relationships point to. [related_to()](../api/stix2.datastore.rst#stix2.datastore.DataSource.related_to) takes all the same arguments that [relationships()](../api/stix2.datastore.rst#stix2.datastore.DataSource.relationships) does." ] }, { diff --git a/docs/guide/environment.ipynb b/docs/guide/environment.ipynb index 0cb5796..3ece7c4 100644 --- a/docs/guide/environment.ipynb +++ b/docs/guide/environment.ipynb @@ -62,7 +62,7 @@ "\n", "### Storing and Retrieving STIX Content\n", "\n", - "An [Environment](../api/stix2.environment.rst#stix2.environment.Environment) can be set up with a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore) if you want to store and retrieve STIX content from the same place. " + "An [Environment](../api/stix2.environment.rst#stix2.environment.Environment) can be set up with a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin) if you want to store and retrieve STIX content from the same place. " ] }, { @@ -82,7 +82,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If desired, you can instead set up an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with different data sources and sinks. In the following example we set up an environment that retrieves objects from [memory](../api/sources/stix2.sources.memory.rst) and a directory on the [filesystem](../api/sources/stix2.sources.filesystem.rst), and stores objects in a different directory on the filesystem." + "If desired, you can instead set up an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with different data sources and sinks. In the following example we set up an environment that retrieves objects from [memory](../api/datastore/stix2.datastore.memory.rst) and a directory on the [filesystem](../api/datastore/stix2.datastore.filesystem.rst), and stores objects in a different directory on the filesystem." ] }, { @@ -105,7 +105,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Once you have an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) you can store some STIX content in its [DataSinks](../api/stix2.sources.rst#stix2.sources.DataSink) with [add()](../api/stix2.environment.rst#stix2.environment.Environment.add):" + "Once you have an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) you can store some STIX content in its [DataSinks](../api/stix2.datastore.rst#stix2.datastore.DataSink) with [add()](../api/stix2.environment.rst#stix2.environment.Environment.add):" ] }, { @@ -128,7 +128,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "You can retrieve STIX objects from the [DataSources](../api/stix2.sources.rst#stix2.sources.DataSource) in the [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with [get()](../api/stix2.environment.rst#stix2.environment.Environment.get), [query()](../api/stix2.environment.rst#stix2.environment.Environment.query), [all_versions()](../api/stix2.environment.rst#stix2.environment.Environment.all_versions), [creator_of()](../api/stix2.sources.rst#stix2.sources.DataSource.creator_of), [related_to()](../api/stix2.sources.rst#stix2.sources.DataSource.related_to), and [relationships()](../api/stix2.sources.rst#stix2.sources.DataSource.relationships) just as you would for a [DataSource](../api/stix2.sources.rst#stix2.sources.DataSource)." + "You can retrieve STIX objects from the [DataSources](../api/stix2.datastore.rst#stix2.datastore.DataSource) in the [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with [get()](../api/stix2.environment.rst#stix2.environment.Environment.get), [query()](../api/stix2.environment.rst#stix2.environment.Environment.query), [all_versions()](../api/stix2.environment.rst#stix2.environment.Environment.all_versions), [creator_of()](../api/stix2.datastore.rst#stix2.datastore.DataSource.creator_of), [related_to()](../api/stix2.datastore.rst#stix2.datastore.DataSource.related_to), and [relationships()](../api/stix2.datastore.rst#stix2.datastore.DataSource.relationships) just as you would for a [DataSource](../api/stix2.datastore.rst#stix2.datastore.DataSource)." ] }, { @@ -609,7 +609,7 @@ "collapsed": true }, "source": [ - "For the full power of the Environment layer, create an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with both a [DataStore](../api/stix2.sources.rst#stix2.sources.DataStore)/[Source](../api/stix2.sources.rst#stix2.sources.DataSource)/[Sink](../api/stix2.sources.rst#stix2.sources.DataSink) and an [ObjectFactory](../api/stix2.environment.rst#stix2.environment.ObjectFactory):" + "For the full power of the Environment layer, create an [Environment](../api/stix2.environment.rst#stix2.environment.Environment) with both a [DataStore](../api/stix2.datastore.rst#stix2.datastore.DataStoreMixin)/[Source](../api/stix2.datastore.rst#stix2.datastore.DataSource)/[Sink](../api/stix2.datastore.rst#stix2.datastore.DataSink) and an [ObjectFactory](../api/stix2.environment.rst#stix2.environment.ObjectFactory):" ] }, { diff --git a/docs/guide/filesystem.ipynb b/docs/guide/filesystem.ipynb index f494e6e..cff73d6 100644 --- a/docs/guide/filesystem.ipynb +++ b/docs/guide/filesystem.ipynb @@ -58,7 +58,7 @@ "source": [ "## FileSystem \n", "\n", - "The FileSystem suite contains [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore), [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink). Under the hood, all FileSystem objects point to a file directory (on disk) that contains STIX2 content. \n", + "The FileSystem suite contains [FileSystemStore](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemStore), [FileSystemSource](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource) and [FileSystemSink](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSink). Under the hood, all FileSystem objects point to a file directory (on disk) that contains STIX2 content. \n", "\n", "The directory and file structure of the intended STIX2 content should be:\n", "\n", @@ -107,15 +107,15 @@ " /vulnerability\n", "```\n", "\n", - "[FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) is intended for use cases where STIX2 content is retrieved and pushed to the same file directory. As [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) is just a wrapper around a paired [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink) that point the same file directory.\n", + "[FileSystemStore](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemStore) is intended for use cases where STIX2 content is retrieved and pushed to the same file directory. As [FileSystemStore](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemStore) is just a wrapper around a paired [FileSystemSource](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource) and [FileSystemSink](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSink) that point the same file directory.\n", "\n", - "For use cases where STIX2 content will only be retrieved or pushed, then a [FileSystemSource](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource) and [FileSystemSink](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink) can be used individually. They can also be used individually when STIX2 content will be retrieved from one distinct file directory and pushed to another.\n", + "For use cases where STIX2 content will only be retrieved or pushed, then a [FileSystemSource](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource) and [FileSystemSink](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSink) can be used individually. They can also be used individually when STIX2 content will be retrieved from one distinct file directory and pushed to another.\n", "\n", "### FileSystem API\n", "\n", - "A note on [get()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.get), [all_versions()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.all_versions), and [query()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSource.query): The format of the STIX2 content targeted by the FileSystem suite is JSON files. When the [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore) retrieves STIX2 content (in JSON) from disk, it will attempt to parse the content into full-featured python-stix2 objects and returned as such. \n", + "A note on [get()](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource.get), [all_versions()](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource.all_versions), and [query()](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSource.query): The format of the STIX2 content targeted by the FileSystem suite is JSON files. When the [FileSystemStore](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemStore) retrieves STIX2 content (in JSON) from disk, it will attempt to parse the content into full-featured python-stix2 objects and returned as such. \n", "\n", - "A note on [add()](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemSink.add): When STIX content is added (pushed) to the file system, the STIX content can be supplied in the following forms: Python STIX objects, Python dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. Any of the previous STIX content forms will be converted to a STIX JSON object (in a STIX Bundle) and written to disk. \n", + "A note on [add()](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemSink.add): When STIX content is added (pushed) to the file system, the STIX content can be supplied in the following forms: Python STIX objects, Python dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. Any of the previous STIX content forms will be converted to a STIX JSON object (in a STIX Bundle) and written to disk. \n", "\n", "### FileSystem Examples\n", "\n", diff --git a/docs/guide/memory.ipynb b/docs/guide/memory.ipynb index d651525..79a8f33 100644 --- a/docs/guide/memory.ipynb +++ b/docs/guide/memory.ipynb @@ -58,15 +58,15 @@ "source": [ "## Memory\n", "\n", - "The Memory suite consists of [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore), [MemorySource](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySource), and [MemorySink](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySink). Under the hood, the Memory suite points to an in-memory dictionary. Similarly, the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) is a just a wrapper around a paired [MemorySource](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySource) and [MemorySink](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySink); as there is quite limited uses for just a [MemorySource](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySource) or a [MemorySink](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemorySink), it is recommended to always use [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore). The [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) is intended for retrieving/searching and pushing STIX content to memory. It is important to note that all STIX content in memory is not backed up on the file system (disk), as that functionality is encompassed within the [FileSystemStore](../api/sources/stix2.sources.filesystem.rst#stix2.sources.filesystem.FileSystemStore). However, the Memory suite does provide some utility methods for saving and loading STIX content to disk. [MemoryStore.save_to_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.save_to_file) allows for saving all the STIX content that is in memory to a json file. [MemoryStore.load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file) allows for loading STIX content from a JSON-formatted file. \n", + "The Memory suite consists of [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore), [MemorySource](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySource), and [MemorySink](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySink). Under the hood, the Memory suite points to an in-memory dictionary. Similarly, the [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore) is a just a wrapper around a paired [MemorySource](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySource) and [MemorySink](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySink); as there is quite limited uses for just a [MemorySource](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySource) or a [MemorySink](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemorySink), it is recommended to always use [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore). The [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore) is intended for retrieving/searching and pushing STIX content to memory. It is important to note that all STIX content in memory is not backed up on the file system (disk), as that functionality is encompassed within the [FileSystemStore](../api/datastore/stix2.datastore.filesystem.rst#stix2.datastore.filesystem.FileSystemStore). However, the Memory suite does provide some utility methods for saving and loading STIX content to disk. [MemoryStore.save_to_file()](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore.save_to_file) allows for saving all the STIX content that is in memory to a json file. [MemoryStore.load_from_file()](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore.load_from_file) allows for loading STIX content from a JSON-formatted file. \n", "\n", "\n", "### Memory API\n", - "A note on adding and retreiving STIX content to the Memory suite: As mentioned, under the hood the Memory suite is an internal, in-memory dictionary. STIX content that is to be added can be in the following forms: python-stix2 objects, (Python) dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) actually stores STIX content either as python-stix2 objects or as (Python) dictionaries, reducing and converting any of the aforementioned types to one of those. Additionally, whatever form the STIX object is stored as, is how it will be returned when retrieved. python-stix2 objects, and json-encoded strings (of STIX content) are stored as python-stix2 objects, while (Python) dictionaries (of STIX objects) are stored as (Python) dictionaries.\n", + "A note on adding and retreiving STIX content to the Memory suite: As mentioned, under the hood the Memory suite is an internal, in-memory dictionary. STIX content that is to be added can be in the following forms: python-stix2 objects, (Python) dictionaries (of valid STIX objects or Bundles), JSON-encoded strings (of valid STIX objects or Bundles), or a (Python) list of any of the previously listed types. [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore) actually stores STIX content either as python-stix2 objects or as (Python) dictionaries, reducing and converting any of the aforementioned types to one of those. Additionally, whatever form the STIX object is stored as, is how it will be returned when retrieved. python-stix2 objects, and json-encoded strings (of STIX content) are stored as python-stix2 objects, while (Python) dictionaries (of STIX objects) are stored as (Python) dictionaries.\n", "\n", - "A note on [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file): For [load_from_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.load_from_file), STIX content is assumed to be in JSON form within the file, as an individual STIX object or in a Bundle. When the JSON is loaded, the STIX objects are parsed into python-stix2 objects before being stored in the in-memory dictionary.\n", + "A note on [load_from_file()](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore.load_from_file): For [load_from_file()](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore.load_from_file), STIX content is assumed to be in JSON form within the file, as an individual STIX object or in a Bundle. When the JSON is loaded, the STIX objects are parsed into python-stix2 objects before being stored in the in-memory dictionary.\n", "\n", - "A note on [save_to_file()](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore.save_to_file): This method dumps all STIX content that is in the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore) to the specified file. The file format will be JSON, and the STIX content will be within a STIX Bundle. Note also that the output form will be a JSON STIX Bundle regardless of the form that the individual STIX objects are stored in (i.e. supplied to) the [MemoryStore](../api/sources/stix2.sources.memory.rst#stix2.sources.memory.MemoryStore). \n", + "A note on [save_to_file()](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore.save_to_file): This method dumps all STIX content that is in the [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore) to the specified file. The file format will be JSON, and the STIX content will be within a STIX Bundle. Note also that the output form will be a JSON STIX Bundle regardless of the form that the individual STIX objects are stored in (i.e. supplied to) the [MemoryStore](../api/datastore/stix2.datastore.memory.rst#stix2.datastore.memory.MemoryStore). \n", "\n", "### Memory Examples\n", "\n", diff --git a/docs/guide/taxii.ipynb b/docs/guide/taxii.ipynb index b0f0cea..ad06093 100644 --- a/docs/guide/taxii.ipynb +++ b/docs/guide/taxii.ipynb @@ -58,9 +58,9 @@ "source": [ "## TAXIICollection\n", "\n", - "The TAXIICollection suite contains [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore), [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource), and [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink). [TAXIICollectionStore](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionStore) pushes and retrieves STIX content to local/remote TAXII Collection(s). [TAXIICollectionSource](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSource) retrieves STIX content from local/remote TAXII Collection(s). [TAXIICollectionSink](../api/sources/stix2.sources.taxii.rst#stix2.sources.taxii.TAXIICollectionSink) pushes STIX content to local/remote TAXII Collection(s). Each of the interfaces is designed to be bound to a Collection from the [taxii2client](https://github.com/oasis-open/cti-taxii-client) library (taxii2client.Collection), where all [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API calls will be executed through that Collection instance.\n", + "The TAXIICollection suite contains [TAXIICollectionStore](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionStore), [TAXIICollectionSource](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionSource), and [TAXIICollectionSink](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionSink). [TAXIICollectionStore](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionStore) pushes and retrieves STIX content to local/remote TAXII Collection(s). [TAXIICollectionSource](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionSource) retrieves STIX content from local/remote TAXII Collection(s). [TAXIICollectionSink](../api/datastore/stix2.datastore.taxii.rst#stix2.datastore.taxii.TAXIICollectionSink) pushes STIX content to local/remote TAXII Collection(s). Each of the interfaces is designed to be bound to a Collection from the [taxii2client](https://github.com/oasis-open/cti-taxii-client) library (taxii2client.Collection), where all [TAXIICollection](../api/datastore/stix2.datastore.taxii.rst) API calls will be executed through that Collection instance.\n", "\n", - "A note on TAXII2 searching/filtering of STIX content: TAXII2 server implementations natively support searching on the STIX2 object properties: id, type and version; API requests made to TAXII2 can contain filter arguments for those 3 properties. However, the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) suite supports searching on all STIX2 common object properties (see [Filters](../api/sources/stix2.sources.filters.rst) documentation for full listing). This works simply by augmenting the filtering that is done remotely at the TAXII2 server instance. [TAXIICollection](../api/sources/stix2.sources.taxii.rst) will seperate any supplied queries into TAXII supported filters and non-supported filters. During a [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call, TAXII2 supported filters get inserted into the TAXII2 server request (to be evaluated at the server). The rest of the filters are kept locally and then applied to the STIX2 content that is returned from the TAXII2 server, before being returned from the [TAXIICollection](../api/sources/stix2.sources.taxii.rst) API call. \n", + "A note on TAXII2 searching/filtering of STIX content: TAXII2 server implementations natively support searching on the STIX2 object properties: id, type and version; API requests made to TAXII2 can contain filter arguments for those 3 properties. However, the [TAXIICollection](../api/datastore/stix2.datastore.taxii.rst) suite supports searching on all STIX2 common object properties (see [Filters](../api/datastore/stix2.datastore.filters.rst) documentation for full listing). This works simply by augmenting the filtering that is done remotely at the TAXII2 server instance. [TAXIICollection](../api/datastore/stix2.datastore.taxii.rst) will seperate any supplied queries into TAXII supported filters and non-supported filters. During a [TAXIICollection](../api/datastore/stix2.datastore.taxii.rst) API call, TAXII2 supported filters get inserted into the TAXII2 server request (to be evaluated at the server). The rest of the filters are kept locally and then applied to the STIX2 content that is returned from the TAXII2 server, before being returned from the [TAXIICollection](../api/datastore/stix2.datastore.taxii.rst) API call. \n", "\n", "### TAXIICollection API\n", "\n", diff --git a/examples/taxii_example.py b/examples/taxii_example.py index 166d028..e102b17 100644 --- a/examples/taxii_example.py +++ b/examples/taxii_example.py @@ -1,6 +1,6 @@ import json -from stix2.sources.taxii import TAXIIDataSource +from stix2.datastore.taxii import TAXIIDataSource # Flask TAXII server - developmental ROOT = 'http://localhost:5000' diff --git a/stix2/__init__.py b/stix2/__init__.py index 9ce9378..401d44b 100644 --- a/stix2/__init__.py +++ b/stix2/__init__.py @@ -3,23 +3,30 @@ .. autosummary:: :toctree: api - v20.common core + datastore environment exceptions markings - v20.observables patterns properties + utils + v20.common + v20.observables v20.sdo - sources v20.sro - utils """ # flake8: noqa from .core import Bundle, _collect_stix2_obj_maps, _register_type, parse +from .datastore import CompositeDataSource +from .datastore.filesystem import (FileSystemSink, FileSystemSource, + FileSystemStore) +from .datastore.filters import Filter +from .datastore.memory import MemorySink, MemorySource, MemoryStore +from .datastore.taxii import (TAXIICollectionSink, TAXIICollectionSource, + TAXIICollectionStore) from .environment import Environment, ObjectFactory from .markings import (add_markings, clear_markings, get_markings, is_marked, remove_markings, set_markings) @@ -41,13 +48,6 @@ from .patterns import (AndBooleanExpression, AndObservationExpression, ReferenceObjectPathComponent, RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant, WithinQualifier) -from .sources import CompositeDataSource -from .sources.filesystem import (FileSystemSink, FileSystemSource, - FileSystemStore) -from .sources.filters import Filter -from .sources.memory import MemorySink, MemorySource, MemoryStore -from .sources.taxii import (TAXIICollectionSink, TAXIICollectionSource, - TAXIICollectionStore) from .utils import get_dict, new_version, revoke from .v20 import * # This import will always be the latest STIX 2.X version from .version import __version__ diff --git a/stix2/sources/__init__.py b/stix2/datastore/__init__.py similarity index 98% rename from stix2/sources/__init__.py rename to stix2/datastore/__init__.py index adc6def..78f7555 100644 --- a/stix2/sources/__init__.py +++ b/stix2/datastore/__init__.py @@ -1,7 +1,7 @@ -"""Python STIX 2.0 Sources +"""Python STIX 2.0 DataStore API .. autosummary:: - :toctree: sources + :toctree: datastore filesystem filters @@ -16,7 +16,7 @@ import uuid from six import with_metaclass -from stix2.sources.filters import Filter +from stix2.datastore.filters import Filter from stix2.utils import deduplicate @@ -24,9 +24,9 @@ def make_id(): return str(uuid.uuid4()) -class DataStore(object): - """An implementer can subclass to create custom behavior from - this class for the specific DataStores. +class DataStoreMixin(object): + """Provides mechanisms for storing and retrieving STIX data. The specific + behavior can be customized by subclasses. Args: source (DataSource): An existing DataSource to use @@ -41,7 +41,7 @@ class DataStore(object): """ def __init__(self, source=None, sink=None): - super(DataStore, self).__init__() + super(DataStoreMixin, self).__init__() self.id = make_id() self.source = source self.sink = sink diff --git a/stix2/sources/filesystem.py b/stix2/datastore/filesystem.py similarity index 98% rename from stix2/sources/filesystem.py rename to stix2/datastore/filesystem.py index f4311be..26d0c58 100644 --- a/stix2/sources/filesystem.py +++ b/stix2/datastore/filesystem.py @@ -7,12 +7,12 @@ import json import os from stix2.core import Bundle, parse -from stix2.sources import DataSink, DataSource, DataStore -from stix2.sources.filters import Filter, apply_common_filters +from stix2.datastore import DataSink, DataSource, DataStoreMixin +from stix2.datastore.filters import Filter, apply_common_filters from stix2.utils import deduplicate, get_class_hierarchy_names -class FileSystemStore(DataStore): +class FileSystemStore(DataStoreMixin): """Interface to a file directory of STIX objects. FileSystemStore is a wrapper around a paired FileSystemSink diff --git a/stix2/sources/filters.py b/stix2/datastore/filters.py similarity index 100% rename from stix2/sources/filters.py rename to stix2/datastore/filters.py diff --git a/stix2/sources/memory.py b/stix2/datastore/memory.py similarity index 97% rename from stix2/sources/memory.py rename to stix2/datastore/memory.py index 5d08d7c..e057271 100644 --- a/stix2/sources/memory.py +++ b/stix2/datastore/memory.py @@ -17,8 +17,8 @@ import os from stix2.base import _STIXBase from stix2.core import Bundle, parse -from stix2.sources import DataSink, DataSource, DataStore -from stix2.sources.filters import Filter, apply_common_filters +from stix2.datastore import DataSink, DataSource, DataStoreMixin +from stix2.datastore.filters import Filter, apply_common_filters def _add(store, stix_data=None, version=None): @@ -56,7 +56,7 @@ def _add(store, stix_data=None, version=None): " or a JSON formatted STIX bundle. stix_data was of type: " + str(type(stix_data))) -class MemoryStore(DataStore): +class MemoryStore(DataStoreMixin): """Interface to an in-memory dictionary of STIX objects. MemoryStore is a wrapper around a paired MemorySink and MemorySource. @@ -124,7 +124,7 @@ class MemorySink(DataSink): Args: stix_data (dict OR list): valid STIX 2.0 content in bundle or a list. - _store (bool): whether the MemorySink is a part of a DataStore, + _store (bool): whether the MemorySink is a part of a MemoryStore, in which case "stix_data" is a direct reference to shared memory with DataSource. Not user supplied allow_custom (bool): whether to allow custom objects/properties @@ -170,7 +170,7 @@ class MemorySource(DataSource): Args: stix_data (dict OR list OR STIX object): valid STIX 2.0 content in bundle or list. - _store (bool): if the MemorySource is a part of a DataStore, + _store (bool): if the MemorySource is a part of a MemoryStore, in which case "stix_data" is a direct reference to shared memory with DataSink. Not user supplied allow_custom (bool): whether to allow custom objects/properties diff --git a/stix2/sources/taxii.py b/stix2/datastore/taxii.py similarity index 98% rename from stix2/sources/taxii.py rename to stix2/datastore/taxii.py index 2d54725..5af4354 100644 --- a/stix2/sources/taxii.py +++ b/stix2/datastore/taxii.py @@ -5,14 +5,14 @@ from requests.exceptions import HTTPError from stix2.base import _STIXBase from stix2.core import Bundle, parse -from stix2.sources import DataSink, DataSource, DataStore -from stix2.sources.filters import Filter, apply_common_filters +from stix2.datastore import DataSink, DataSource, DataStoreMixin +from stix2.datastore.filters import Filter, apply_common_filters from stix2.utils import deduplicate TAXII_FILTERS = ['added_after', 'id', 'type', 'version'] -class TAXIICollectionStore(DataStore): +class TAXIICollectionStore(DataStoreMixin): """Provides an interface to a local/remote TAXII Collection of STIX data. TAXIICollectionStore is a wrapper around a paired TAXIICollectionSink and TAXIICollectionSource. diff --git a/stix2/environment.py b/stix2/environment.py index ab16b09..eb5583e 100644 --- a/stix2/environment.py +++ b/stix2/environment.py @@ -1,7 +1,7 @@ import copy from .core import parse as _parse -from .sources import CompositeDataSource, DataStore +from .datastore import CompositeDataSource, DataStoreMixin class ObjectFactory(object): @@ -75,7 +75,7 @@ class ObjectFactory(object): return cls(**properties) -class Environment(object): +class Environment(DataStoreMixin): """Abstract away some of the nasty details of working with STIX content. Args: @@ -86,6 +86,14 @@ class Environment(object): source (DataSource, optional): Source for retrieving STIX objects. sink (DataSink, optional): Destination for saving STIX objects. Invalid if `store` is also provided. + + .. automethod:: get + .. automethod:: all_versions + .. automethod:: query + .. automethod:: creator_of + .. automethod:: relationships + .. automethod:: related_to + .. automethod:: add """ def __init__(self, factory=ObjectFactory(), store=None, source=None, sink=None): @@ -105,14 +113,6 @@ class Environment(object): return self.factory.create(*args, **kwargs) create.__doc__ = ObjectFactory.create.__doc__ - get = DataStore.__dict__['get'] - all_versions = DataStore.__dict__['all_versions'] - query = DataStore.__dict__['query'] - creator_of = DataStore.__dict__['creator_of'] - relationships = DataStore.__dict__['relationships'] - related_to = DataStore.__dict__['related_to'] - add = DataStore.__dict__['add'] - def add_filters(self, *args, **kwargs): try: return self.source.filters.update(*args, **kwargs)
Rename "sources" subpackage It contains sources, sinks, and datastores, so this name doesn't fit. We should rename it to something like "datastore".
oasis-open/cti-python-stix2
diff --git a/stix2/test/test_data_sources.py b/stix2/test/test_datastore.py similarity index 98% rename from stix2/test/test_data_sources.py rename to stix2/test/test_datastore.py index d7f238a..3490d6a 100644 --- a/stix2/test/test_data_sources.py +++ b/stix2/test/test_datastore.py @@ -2,9 +2,9 @@ import pytest from taxii2client import Collection from stix2 import Filter, MemorySink, MemorySource -from stix2.sources import (CompositeDataSource, DataSink, DataSource, make_id, - taxii) -from stix2.sources.filters import apply_common_filters +from stix2.datastore import (CompositeDataSource, DataSink, DataSource, + make_id, taxii) +from stix2.datastore.filters import apply_common_filters from stix2.utils import deduplicate COLLECTION_URL = 'https://example.com/api1/collections/91a7b528-80eb-42ed-a74d-c6fbd5a26116/' @@ -476,7 +476,7 @@ def test_add_remove_composite_datasource(): with pytest.raises(TypeError) as excinfo: cds.add_data_sources([ds1, ds2, ds1, ds3]) assert str(excinfo.value) == ("DataSource (to be added) is not of type " - "stix2.DataSource. DataSource type is '<class 'stix2.sources.memory.MemorySink'>'") + "stix2.DataSource. DataSource type is '<class 'stix2.datastore.memory.MemorySink'>'") cds.add_data_sources([ds1, ds2, ds1]) diff --git a/stix2/test/test_memory.py b/stix2/test/test_memory.py index ad78611..2384848 100644 --- a/stix2/test/test_memory.py +++ b/stix2/test/test_memory.py @@ -6,7 +6,7 @@ import pytest from stix2 import (Bundle, Campaign, CustomObject, Filter, Identity, Indicator, Malware, MemorySource, MemoryStore, Relationship, properties) -from stix2.sources import make_id +from stix2.datastore import make_id from .constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, IDENTITY_KWARGS, INDICATOR_ID, INDICATOR_KWARGS,
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_removed_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 12 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "coverage" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 antlr4-python3-runtime==4.9.3 async-generator==1.10 attrs==22.2.0 Babel==2.11.0 backcall==0.2.0 bleach==4.1.0 bump2version==1.0.1 bumpversion==0.6.0 certifi==2021.5.30 cfgv==3.3.1 charset-normalizer==2.0.12 coverage==6.2 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.9 docutils==0.18.1 entrypoints==0.4 filelock==3.4.1 identify==2.4.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nbsphinx==0.8.8 nest-asyncio==1.6.0 nodeenv==1.6.0 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 pyzmq==25.1.2 requests==2.27.1 simplejson==3.20.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-prompt==1.5.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -e git+https://github.com/oasis-open/cti-python-stix2.git@9402cff110ad6c8da0e2c3229520499dadbc7feb#egg=stix2 stix2-patterns==2.0.0 taxii2-client==2.3.0 testpath==0.6.0 toml==0.10.2 tomli==1.2.3 tornado==6.1 tox==3.28.0 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 wcwidth==0.2.13 webencodings==0.5.1 zipp==3.6.0
name: cti-python-stix2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - antlr4-python3-runtime==4.9.3 - async-generator==1.10 - attrs==22.2.0 - babel==2.11.0 - backcall==0.2.0 - bleach==4.1.0 - bump2version==1.0.1 - bumpversion==0.6.0 - cfgv==3.3.1 - charset-normalizer==2.0.12 - coverage==6.2 - decorator==5.1.1 - defusedxml==0.7.1 - distlib==0.3.9 - docutils==0.18.1 - entrypoints==0.4 - filelock==3.4.1 - identify==2.4.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbsphinx==0.8.8 - nest-asyncio==1.6.0 - nodeenv==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - pyzmq==25.1.2 - requests==2.27.1 - simplejson==3.20.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-prompt==1.5.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - stix2-patterns==2.0.0 - taxii2-client==2.3.0 - testpath==0.6.0 - toml==0.10.2 - tomli==1.2.3 - tornado==6.1 - tox==3.28.0 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - wcwidth==0.2.13 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/cti-python-stix2
[ "stix2/test/test_datastore.py::test_ds_abstract_class_smoke", "stix2/test/test_datastore.py::test_ds_taxii", "stix2/test/test_datastore.py::test_ds_taxii_name", "stix2/test/test_datastore.py::test_parse_taxii_filters", "stix2/test/test_datastore.py::test_add_get_remove_filter", "stix2/test/test_datastore.py::test_apply_common_filters", "stix2/test/test_datastore.py::test_filters0", "stix2/test/test_datastore.py::test_filters1", "stix2/test/test_datastore.py::test_filters2", "stix2/test/test_datastore.py::test_filters3", "stix2/test/test_datastore.py::test_filters4", "stix2/test/test_datastore.py::test_filters5", "stix2/test/test_datastore.py::test_filters6", "stix2/test/test_datastore.py::test_filters7", "stix2/test/test_datastore.py::test_deduplicate", "stix2/test/test_datastore.py::test_add_remove_composite_datasource", "stix2/test/test_datastore.py::test_composite_datasource_operations", "stix2/test/test_datastore.py::test_composite_datastore_no_datasource", "stix2/test/test_memory.py::test_memory_source_get", "stix2/test/test_memory.py::test_memory_source_get_nonexistant_object", "stix2/test/test_memory.py::test_memory_store_all_versions", "stix2/test/test_memory.py::test_memory_store_query", "stix2/test/test_memory.py::test_memory_store_query_single_filter", "stix2/test/test_memory.py::test_memory_store_query_empty_query", "stix2/test/test_memory.py::test_memory_store_query_multiple_filters", "stix2/test/test_memory.py::test_memory_store_save_load_file", "stix2/test/test_memory.py::test_memory_store_add_invalid_object", "stix2/test/test_memory.py::test_memory_store_object_with_custom_property", "stix2/test/test_memory.py::test_memory_store_object_with_custom_property_in_bundle", "stix2/test/test_memory.py::test_memory_store_custom_object", "stix2/test/test_memory.py::test_relationships", "stix2/test/test_memory.py::test_relationships_by_type", "stix2/test/test_memory.py::test_relationships_by_source", "stix2/test/test_memory.py::test_relationships_by_target", "stix2/test/test_memory.py::test_relationships_by_target_and_type", "stix2/test/test_memory.py::test_relationships_by_target_and_source", "stix2/test/test_memory.py::test_related_to", "stix2/test/test_memory.py::test_related_to_by_source", "stix2/test/test_memory.py::test_related_to_by_target" ]
[]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,235
[ "docs/guide/taxii.ipynb", "docs/api/sources/stix2.sources.filesystem.rst", "docs/guide/datastore.ipynb", "stix2/__init__.py", "docs/api/stix2.datastore.rst", "stix2/environment.py", "stix2/sources/memory.py", "docs/api/sources/stix2.sources.filters.rst", "docs/api/datastore/stix2.datastore.filesystem.rst", "docs/api/sources/stix2.sources.taxii.rst", "docs/api/sources/stix2.sources.memory.rst", "docs/api/datastore/stix2.datastore.memory.rst", "docs/guide/memory.ipynb", "stix2/sources/__init__.py", "docs/api/stix2.sources.rst", "docs/guide/filesystem.ipynb", "docs/api/datastore/stix2.datastore.taxii.rst", "docs/guide/environment.ipynb", "stix2/sources/filters.py", "stix2/sources/taxii.py", "examples/taxii_example.py", "stix2/sources/filesystem.py", "docs/api/datastore/stix2.datastore.filters.rst" ]
[ "docs/guide/taxii.ipynb", "docs/api/sources/stix2.sources.filesystem.rst", "docs/guide/datastore.ipynb", "stix2/__init__.py", "docs/api/stix2.datastore.rst", "stix2/datastore/taxii.py", "stix2/environment.py", "stix2/datastore/memory.py", "stix2/datastore/filesystem.py", "docs/api/sources/stix2.sources.filters.rst", "docs/api/datastore/stix2.datastore.filesystem.rst", "docs/api/sources/stix2.sources.taxii.rst", "docs/api/sources/stix2.sources.memory.rst", "stix2/datastore/filters.py", "stix2/datastore/__init__.py", "docs/api/datastore/stix2.datastore.memory.rst", "docs/guide/memory.ipynb", "docs/api/stix2.sources.rst", "docs/guide/filesystem.ipynb", "docs/api/datastore/stix2.datastore.taxii.rst", "docs/guide/environment.ipynb", "examples/taxii_example.py", "docs/api/datastore/stix2.datastore.filters.rst" ]
nipy__nipype-2479
e6792158568a51f0e6cdef77c6ca12ab6266a7dd
2018-03-01 22:23:30
704b97dee7848283692bac38f04541c5af2a87b5
diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 2b6bb6ed3..301a35844 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -1050,7 +1050,17 @@ def generate_expanded_graph(graph_in): expansions = defaultdict(list) for node in graph_in.nodes(): for src_id in list(old_edge_dict.keys()): - if node.itername.startswith(src_id): + # Drop the original JoinNodes; only concerned with + # generated Nodes + if hasattr(node, 'joinfield'): + continue + # Patterns: + # - src_id : Non-iterable node + # - src_id.[a-z]\d+ : IdentityInterface w/ iterables + # - src_id.[a-z]I.[a-z]\d+ : Non-IdentityInterface w/ iterables + # - src_idJ\d+ : JoinNode(IdentityInterface) + if re.match(src_id + r'((\.[a-z](I\.[a-z])?|J)\d+)?$', + node.itername): expansions[src_id].append(node) for in_id, in_nodes in list(expansions.items()): logger.debug("The join node %s input %s was expanded"
Issue with node name that starts with another node's name I think the [line ~801 in util.py](https://github.com/nipy/nipype/edit/master/nipype/pipeline/engine/utils.py#L801) should be something like this: for node in graph_in.nodes(): for src_id, edge_data in list(old_edge_dict.items()): if node.itername.startswith(src_id + '.'): # <-- add '.' to src_id expansions[src_id].append(node) For example, if the node "input" feeds into "input_files", the "input_files" can be included if you just test for node.itername.startswith(src_id). This change would prevent "input_files" from being included. Edit: removed last part of my comment.
nipy/nipype
diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index 436d29d9e..54ff15048 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -7,11 +7,9 @@ from __future__ import (print_function, division, unicode_literals, absolute_import) from builtins import open -import os - from ... import engine as pe from ....interfaces import base as nib -from ....interfaces.utility import IdentityInterface +from ....interfaces.utility import IdentityInterface, Function, Merge from ....interfaces.base import traits, File @@ -612,3 +610,20 @@ def test_nested_workflow_join(tmpdir): # there should be six nodes in total assert len(result.nodes()) == 6, \ "The number of expanded nodes is incorrect." + + +def test_name_prefix_join(tmpdir): + tmpdir.chdir() + + def sq(x): + return x ** 2 + + wf = pe.Workflow('wf', base_dir=tmpdir.strpath) + square = pe.Node(Function(function=sq), name='square') + square.iterables = [('x', [1, 2])] + square_join = pe.JoinNode(Merge(1, ravel_inputs=True), + name='square_join', + joinsource='square', + joinfield=['in1']) + wf.connect(square, 'out', square_join, "in1") + wf.run()
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==8.0.4 configparser==5.2.0 decorator==4.4.2 funcsigs==1.0.2 future==1.0.0 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 lxml==5.3.1 mock==5.2.0 networkx==2.5.1 nibabel==3.2.2 -e git+https://github.com/nipy/nipype.git@e6792158568a51f0e6cdef77c6ca12ab6266a7dd#egg=nipype numpy==1.19.5 packaging==21.3 pluggy==1.0.0 prov==1.5.0 py==1.11.0 pydot==1.4.2 pydotplus==2.0.2 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 rdflib==5.0.0 scipy==1.5.4 simplejson==3.20.1 six==1.17.0 tomli==1.2.3 traits==6.4.1 typing_extensions==4.1.1 zipp==3.6.0
name: nipype channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - configparser==5.2.0 - decorator==4.4.2 - funcsigs==1.0.2 - future==1.0.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - lxml==5.3.1 - mock==5.2.0 - networkx==2.5.1 - nibabel==3.2.2 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - prov==1.5.0 - py==1.11.0 - pydot==1.4.2 - pydotplus==2.0.2 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - rdflib==5.0.0 - scipy==1.5.4 - simplejson==3.20.1 - six==1.17.0 - tomli==1.2.3 - traits==6.4.1 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nipype
[ "nipype/pipeline/engine/tests/test_join.py::test_name_prefix_join" ]
[]
[ "nipype/pipeline/engine/tests/test_join.py::test_join_expansion", "nipype/pipeline/engine/tests/test_join.py::test_node_joinsource", "nipype/pipeline/engine/tests/test_join.py::test_set_join_node", "nipype/pipeline/engine/tests/test_join.py::test_unique_join_node", "nipype/pipeline/engine/tests/test_join.py::test_multiple_join_nodes", "nipype/pipeline/engine/tests/test_join.py::test_identity_join_node", "nipype/pipeline/engine/tests/test_join.py::test_multifield_join_node", "nipype/pipeline/engine/tests/test_join.py::test_synchronize_join_node", "nipype/pipeline/engine/tests/test_join.py::test_itersource_join_source_node", "nipype/pipeline/engine/tests/test_join.py::test_itersource_two_join_nodes", "nipype/pipeline/engine/tests/test_join.py::test_set_join_node_file_input", "nipype/pipeline/engine/tests/test_join.py::test_nested_workflow_join" ]
[]
Apache License 2.0
2,236
[ "nipype/pipeline/engine/utils.py" ]
[ "nipype/pipeline/engine/utils.py" ]
eliben__pycparser-236
4992410bf8c2d6d7eb94703d0f6f94b5a9acaa0a
2018-03-01 22:45:54
168f54c3ae324c3827d22fb90e456653e6fe584a
dbluhm: I failed to run tests before submitting :stuck_out_tongue_closed_eyes:. I'm looking into the errors reported now. dbluhm: One issue was that the there was no default case for pragma's without statements. Now, however, tests fail due to the structure being slightly different. @eliben, what do you think of these changes? dbluhm: Narrowed the affected pragma's with b1ff55d
diff --git a/pycparser/c_parser.py b/pycparser/c_parser.py index f01f67f..47d958f 100644 --- a/pycparser/c_parser.py +++ b/pycparser/c_parser.py @@ -616,6 +616,59 @@ class CParser(PLYParser): """ p[0] = p[1] + # A pragma is generally considered a decorator rather than an actual statement. + # Still, for the purposes of analyzing an abstract syntax tree of C code, + # pragma's should not be ignored and were previously treated as a statement. + # This presents a problem for constructs that take a statement such as labeled_statements, + # selection_statements, and iteration_statements, causing a misleading structure + # in the AST. For example, consider the following C code. + # + # for (int i = 0; i < 3; i++) + # #pragma omp critical + # sum += 1; + # + # This code will compile and execute "sum += 1;" as the body of the for loop. + # Previous implementations of PyCParser would render the AST for this + # block of code as follows: + # + # For: + # DeclList: + # Decl: i, [], [], [] + # TypeDecl: i, [] + # IdentifierType: ['int'] + # Constant: int, 0 + # BinaryOp: < + # ID: i + # Constant: int, 3 + # UnaryOp: p++ + # ID: i + # Pragma: omp critical + # Assignment: += + # ID: sum + # Constant: int, 1 + # + # This AST misleadingly takes the Pragma as the body of the loop and the + # assignment then becomes a sibling of the loop. + # + # To solve edge cases like these, the pragmacomp_or_statement rule groups + # a pragma and its following statement (which would otherwise be orphaned) + # using a compound block, effectively turning the above code into: + # + # for (int i = 0; i < 3; i++) { + # #pragma omp critical + # sum += 1; + # } + def p_pragmacomp_or_statement(self, p): + """ pragmacomp_or_statement : pppragma_directive statement + | statement + """ + if isinstance(p[1], c_ast.Pragma) and len(p) == 3: + p[0] = c_ast.Compound( + block_items=[p[1], p[2]], + coord=self._token_coord(p, 1)) + else: + p[0] = p[1] + # In C, declarations can come several in a line: # int x, *px, romulo = 5; # @@ -1410,44 +1463,44 @@ class CParser(PLYParser): coord=self._token_coord(p, 1)) def p_labeled_statement_1(self, p): - """ labeled_statement : ID COLON statement """ + """ labeled_statement : ID COLON pragmacomp_or_statement """ p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1)) def p_labeled_statement_2(self, p): - """ labeled_statement : CASE constant_expression COLON statement """ + """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """ p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1)) def p_labeled_statement_3(self, p): - """ labeled_statement : DEFAULT COLON statement """ + """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """ p[0] = c_ast.Default([p[3]], self._token_coord(p, 1)) def p_selection_statement_1(self, p): - """ selection_statement : IF LPAREN expression RPAREN statement """ + """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """ p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1)) def p_selection_statement_2(self, p): - """ selection_statement : IF LPAREN expression RPAREN statement ELSE statement """ + """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """ p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1)) def p_selection_statement_3(self, p): - """ selection_statement : SWITCH LPAREN expression RPAREN statement """ + """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """ p[0] = fix_switch_cases( c_ast.Switch(p[3], p[5], self._token_coord(p, 1))) def p_iteration_statement_1(self, p): - """ iteration_statement : WHILE LPAREN expression RPAREN statement """ + """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """ p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1)) def p_iteration_statement_2(self, p): - """ iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """ + """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """ p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1)) def p_iteration_statement_3(self, p): - """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """ + """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1)) def p_iteration_statement_4(self, p): - """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """ + """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)), p[4], p[6], p[8], self._token_coord(p, 1))
Incorrect AST Structure when #pragma follows For loop. Consider the following code snippet: ```c for(int i = 0; i < 3; i++) #pragma omp critical sum += 1; ``` When compiled without Open MP, the #pragma should be ignored completely, so that the statement ``sum += 1`` should be a descendent of the for loop. However, in the current implementation of pycparser, it is parsed a _sibling_ of the for loop, instead of as a descendant. ``` For: DeclList: Decl: i, [], [], [] TypeDecl: i, [] IdentifierType: ['int'] Constant: int, 0 BinaryOp: < ID: i Constant: int, 3 UnaryOp: p++ ID: i Pragma: omp critical Assignment: += ID: sum Constant: int, 1 ``` The same problem applies to other loops, Labels, and If statements, as in the following: ```c for(int i = 0; i < 3; i++) myLabel: #pragma omp critical sum += 1; ``` ```c while(sum < 100) #pragma omp critical sum += 1; ``` ```c if (sum < 100) #pragma omp critical sum += 1; ``` The following will not even parse, but it should: ```c do #pragma omp critical sum += 1; while(sum < 100) ```
eliben/pycparser
diff --git a/tests/test_c_parser.py b/tests/test_c_parser.py index ab6143f..3b336bf 100755 --- a/tests/test_c_parser.py +++ b/tests/test_c_parser.py @@ -1369,6 +1369,54 @@ class TestCParser_fundamentals(TestCParser_base): self.assertEqual(s1_ast.ext[2].type.type.decls[0].string, 'baz') self.assertEqual(s1_ast.ext[2].type.type.decls[0].coord.line, 9) + def test_pragmacomp_or_statement(self): + s1 = r''' + void main() { + int sum = 0; + for (int i; i < 3; i++) + #pragma omp critical + sum += 1; + + while(sum < 10) + #pragma omp critical + sum += 1; + + mylabel: + #pragma foo + sum += 10; + + if (sum > 10) + #pragma bar + sum = 10; + + switch (sum) + case 10: + #pragma foo + sum = 20; + } + ''' + s1_ast = self.parse(s1) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[1], For)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[1].stmt, Compound)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[1].stmt.block_items[0], Pragma)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[1].stmt.block_items[1], Assignment)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[2], While)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[2].stmt, Compound)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[2].stmt.block_items[0], Pragma)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[2].stmt.block_items[1], Assignment)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[3], Label)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[3].stmt, Compound)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[3].stmt.block_items[0], Pragma)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[3].stmt.block_items[1], Assignment)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[4], If)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[4].iftrue, Compound)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[4].iftrue.block_items[0], Pragma)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[4].iftrue.block_items[1], Assignment)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[5], Switch)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[5].stmt.stmts[0], Compound)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[5].stmt.stmts[0].block_items[0], Pragma)) + self.assertTrue(isinstance(s1_ast.ext[0].body.block_items[5].stmt.stmts[0].block_items[1], Assignment)) + class TestCParser_whole_code(TestCParser_base): """ Testing of parsing whole chunks of code.
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
2.18
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/eliben/pycparser.git@4992410bf8c2d6d7eb94703d0f6f94b5a9acaa0a#egg=pycparser pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pycparser channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/pycparser
[ "tests/test_c_parser.py::TestCParser_fundamentals::test_pragmacomp_or_statement" ]
[]
[ "tests/test_c_parser.py::TestCParser_fundamentals::test_FileAST", "tests/test_c_parser.py::TestCParser_fundamentals::test_anonymous_struct_union", "tests/test_c_parser.py::TestCParser_fundamentals::test_compound_literals", "tests/test_c_parser.py::TestCParser_fundamentals::test_compound_statement", "tests/test_c_parser.py::TestCParser_fundamentals::test_coords", "tests/test_c_parser.py::TestCParser_fundamentals::test_decl_inits", "tests/test_c_parser.py::TestCParser_fundamentals::test_decl_named_inits", "tests/test_c_parser.py::TestCParser_fundamentals::test_duplicate_typedef", "tests/test_c_parser.py::TestCParser_fundamentals::test_empty_toplevel_decl", "tests/test_c_parser.py::TestCParser_fundamentals::test_enums", "tests/test_c_parser.py::TestCParser_fundamentals::test_forloop_coord", "tests/test_c_parser.py::TestCParser_fundamentals::test_func_decls_with_array_dim_qualifiers", "tests/test_c_parser.py::TestCParser_fundamentals::test_function_definitions", "tests/test_c_parser.py::TestCParser_fundamentals::test_inline_specifier", "tests/test_c_parser.py::TestCParser_fundamentals::test_int128", "tests/test_c_parser.py::TestCParser_fundamentals::test_invalid_multiple_types_error", "tests/test_c_parser.py::TestCParser_fundamentals::test_multi_decls", "tests/test_c_parser.py::TestCParser_fundamentals::test_nested_decls", "tests/test_c_parser.py::TestCParser_fundamentals::test_offsetof", "tests/test_c_parser.py::TestCParser_fundamentals::test_pragma", "tests/test_c_parser.py::TestCParser_fundamentals::test_qualifiers_storage_specifiers", "tests/test_c_parser.py::TestCParser_fundamentals::test_simple_decls", "tests/test_c_parser.py::TestCParser_fundamentals::test_sizeof", "tests/test_c_parser.py::TestCParser_fundamentals::test_struct_bitfields", "tests/test_c_parser.py::TestCParser_fundamentals::test_struct_members_namespace", "tests/test_c_parser.py::TestCParser_fundamentals::test_struct_union", "tests/test_c_parser.py::TestCParser_fundamentals::test_struct_with_extra_semis_inside", "tests/test_c_parser.py::TestCParser_fundamentals::test_tags_namespace", "tests/test_c_parser.py::TestCParser_fundamentals::test_typedef", "tests/test_c_parser.py::TestCParser_fundamentals::test_unified_string_literals", "tests/test_c_parser.py::TestCParser_fundamentals::test_unified_wstring_literals", "tests/test_c_parser.py::TestCParser_fundamentals::test_vla", "tests/test_c_parser.py::TestCParser_whole_code::test_empty_statements", "tests/test_c_parser.py::TestCParser_whole_code::test_expressions", "tests/test_c_parser.py::TestCParser_whole_code::test_for_statement", "tests/test_c_parser.py::TestCParser_whole_code::test_statements", "tests/test_c_parser.py::TestCParser_whole_code::test_switch_statement", "tests/test_c_parser.py::TestCParser_whole_code::test_whole_file", "tests/test_c_parser.py::TestCParser_whole_code::test_whole_file_with_stdio", "tests/test_c_parser.py::TestCParser_typenames::test_ambiguous_parameters", "tests/test_c_parser.py::TestCParser_typenames::test_innerscope_reuse_typedef_name", "tests/test_c_parser.py::TestCParser_typenames::test_innerscope_typedef", "tests/test_c_parser.py::TestCParser_typenames::test_nested_function_decls", "tests/test_c_parser.py::TestCParser_typenames::test_parameter_reuse_typedef_name", "tests/test_c_parser.py::TestCParser_typenames::test_samescope_reuse_name" ]
[]
BSD License
2,237
[ "pycparser/c_parser.py" ]
[ "pycparser/c_parser.py" ]
pydata__sparse-116
217ca234309682fe1cb0c73aa6aa68cf444c1b6a
2018-03-02 05:00:44
b03b6b9a480a10a3cf59d7994292b9c5d3015cd5
codecov-io: # [Codecov](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=h1) Report > Merging [#116](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=desc) into [master](https://codecov.io/gh/pydata/sparse/commit/97d5690116f499f43e79c3debfe7d7881c8b10ba?src=pr&el=desc) will **decrease** coverage by `0.05%`. > The diff coverage is `96.45%`. [![Impacted file tree graph](https://codecov.io/gh/pydata/sparse/pull/116/graphs/tree.svg?src=pr&token=H212u0Uxxw&width=650&height=150)](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #116 +/- ## ========================================== - Coverage 96.04% 95.98% -0.06% ========================================== Files 11 13 +2 Lines 1896 1920 +24 ========================================== + Hits 1821 1843 +22 - Misses 75 77 +2 ``` | Flag | Coverage Δ | | |---|---|---| | #python27 | `95.41% <95.83%> (+0.26%)` | :arrow_up: | | #python36 | `95.78% <95.83%> (-0.27%)` | :arrow_down: | | [Impacted Files](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [sparse/tests/test\_coo.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL3Rlc3RzL3Rlc3RfY29vLnB5) | `99.83% <100%> (ø)` | :arrow_up: | | [sparse/coo/\_core.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL2Nvby9fY29yZS5weQ==) | `93.69% <100%> (ø)` | | | [sparse/coo/\_\_init\_\_.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL2Nvby9fX2luaXRfXy5weQ==) | `100% <100%> (ø)` | | | [sparse/compatibility.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL2NvbXBhdGliaWxpdHkucHk=) | `84.61% <83.33%> (-15.39%)` | :arrow_down: | | [sparse/coo/\_common.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL2Nvby9fY29tbW9uLnB5) | `96.26% <96.26%> (ø)` | | | [sparse/coo/\_elemwise.py](https://codecov.io/gh/pydata/sparse/pull/116/diff?src=pr&el=tree#diff-c3BhcnNlL2Nvby9fZWxlbXdpc2UucHk=) | `97.03% <97.03%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=footer). Last update [97d5690...74dae7e](https://codecov.io/gh/pydata/sparse/pull/116?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). hameerabbasi: cc @mrocklin Feedback welcome. This is blocking some other work, so quick is good. mrocklin: In general this seems fine to me. I left a couple of small comments. hameerabbasi: Hmm. Seems like there are some issues between Travis and GitHub today... On the other PR, Travis builds didn't report after finishing, and on this one, it isn't starting.
diff --git a/.travis.yml b/.travis.yml index ab81770..84462d0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,11 +25,9 @@ install: # Install sparse - pip install -e .[tests-all] -before_script: - - flake8 - script: - py.test --cov-config .coveragerc --cov=./ + - flake8 after_success: - codecov -F python${TRAVIS_PYTHON_VERSION//./} diff --git a/docs/generated/sparse.COO.nanreduce.rst b/docs/generated/sparse.COO.nanreduce.rst deleted file mode 100644 index 4f713b1..0000000 --- a/docs/generated/sparse.COO.nanreduce.rst +++ /dev/null @@ -1,6 +0,0 @@ -COO.nanreduce -============= - -.. currentmodule:: sparse - -.. automethod:: COO.nanreduce \ No newline at end of file diff --git a/docs/generated/sparse.COO.rst b/docs/generated/sparse.COO.rst index f66f9d3..1ca472e 100644 --- a/docs/generated/sparse.COO.rst +++ b/docs/generated/sparse.COO.rst @@ -44,8 +44,6 @@ COO COO.min COO.prod - COO.nanreduce - .. rubric:: :ref:`Converting to other formats <converting>` .. autosummary:: :toctree: diff --git a/docs/generated/sparse.nanreduce.rst b/docs/generated/sparse.nanreduce.rst new file mode 100644 index 0000000..454be0b --- /dev/null +++ b/docs/generated/sparse.nanreduce.rst @@ -0,0 +1,6 @@ +nanreduce +========= + +.. currentmodule:: sparse + +.. autofunction:: nanreduce \ No newline at end of file diff --git a/docs/generated/sparse.rst b/docs/generated/sparse.rst index 0c4b825..451f65b 100644 --- a/docs/generated/sparse.rst +++ b/docs/generated/sparse.rst @@ -35,6 +35,8 @@ API nanprod + nanreduce + nansum random diff --git a/sparse/__init__.py b/sparse/__init__.py index 488fdfa..fc11c47 100644 --- a/sparse/__init__.py +++ b/sparse/__init__.py @@ -1,10 +1,5 @@ -from .coo import COO, elemwise, tensordot, concatenate, stack, dot, triu, tril, where, \ - nansum, nanprod, nanmin, nanmax +from .coo import * from .dok import DOK from .sparse_array import SparseArray from .utils import random from ._version import __version__ - -__all__ = ["SparseArray", "COO", "DOK", - "tensordot", "concatenate", "stack", "dot", "triu", "tril", "random", "where", - "nansum", "nanprod", "nanmin", "nanmax"] diff --git a/sparse/compatibility.py b/sparse/compatibility.py index f680bfe..77fdda6 100644 --- a/sparse/compatibility.py +++ b/sparse/compatibility.py @@ -1,21 +1,18 @@ # flake8: noqa F401 +import sys -try: - from itertools import izip_longest as zip_longest -except ImportError: +if sys.version_info < (2, 7): + raise ImportError('Need at least Python 2.7.') + +if sys.version_info[0] == 3 and sys.version_info[1] < 5: + raise ImportError('Need at least Python 3.5 if using Python 3.') + +if sys.version_info[0] >= 3: from itertools import zip_longest + from builtins import int, range, zip +else: + from itertools import izip_longest as zip_longest + from itertools import izip as zip -try: int = long -except NameError: - from builtins import int - -try: range = xrange -except NameError: - from builtins import range - -try: - from itertools import izip as zip -except ImportError: - from builtins import zip diff --git a/sparse/coo/__init__.py b/sparse/coo/__init__.py new file mode 100644 index 0000000..c47dbe5 --- /dev/null +++ b/sparse/coo/__init__.py @@ -0,0 +1,4 @@ +from .core import COO +from .umath import elemwise +from .common import tensordot, dot, concatenate, stack, triu, tril, where, \ + nansum, nanprod, nanmin, nanmax, nanreduce diff --git a/sparse/coo/common.py b/sparse/coo/common.py new file mode 100644 index 0000000..48baa90 --- /dev/null +++ b/sparse/coo/common.py @@ -0,0 +1,627 @@ +from functools import reduce +import operator +import warnings + +import numpy as np +import scipy.sparse + +from ..sparse_array import SparseArray +from ..compatibility import range +from ..utils import isscalar + + +def asCOO(x, name='asCOO', check=True): + """ + Convert the input to :obj:`COO`. Passes through :obj:`COO` objects as-is. + + Parameters + ---------- + x : Union[SparseArray, scipy.sparse.spmatrix, numpy.ndarray] + The input array to convert. + name : str, optional + The name of the operation to use in the exception. + check : bool, optional + Whether to check for a dense input. + + Returns + ------- + COO + The converted :obj:`COO` array. + + Raises + ------ + ValueError + If ``check`` is true and a dense input is supplied. + """ + from .core import COO + + if check and not isinstance(x, (SparseArray, scipy.sparse.spmatrix)): + raise ValueError('Performing this operation would produce a dense result: %s' % name) + + if not isinstance(x, COO): + x = COO(x) + + return x + + +def linear_loc(coords, shape, signed=False): + n = reduce(operator.mul, shape, 1) + if signed: + n = -n + dtype = np.min_scalar_type(n) + out = np.zeros(coords.shape[1], dtype=dtype) + tmp = np.zeros(coords.shape[1], dtype=dtype) + strides = 1 + for i, d in enumerate(shape[::-1]): + np.multiply(coords[-(i + 1), :], strides, out=tmp, dtype=dtype) + np.add(tmp, out, out=out) + strides *= d + return out + + +def tensordot(a, b, axes=2): + """ + Perform the equivalent of :obj:`numpy.tensordot`. + + Parameters + ---------- + a, b : Union[COO, np.ndarray, scipy.sparse.spmatrix] + The arrays to perform the :code:`tensordot` operation on. + axes : tuple[Union[int, tuple[int], Union[int, tuple[int]], optional + The axes to match when performing the sum. + + Returns + ------- + Union[COO, numpy.ndarray] + The result of the operation. + + See Also + -------- + numpy.tensordot : NumPy equivalent function + """ + # Much of this is stolen from numpy/core/numeric.py::tensordot + # Please see license at https://github.com/numpy/numpy/blob/master/LICENSE.txt + from .core import COO + + try: + iter(axes) + except TypeError: + axes_a = list(range(-axes, 0)) + axes_b = list(range(0, axes)) + else: + axes_a, axes_b = axes + try: + na = len(axes_a) + axes_a = list(axes_a) + except TypeError: + axes_a = [axes_a] + na = 1 + try: + nb = len(axes_b) + axes_b = list(axes_b) + except TypeError: + axes_b = [axes_b] + nb = 1 + + # a, b = asarray(a), asarray(b) # <--- modified + as_ = a.shape + nda = a.ndim + bs = b.shape + ndb = b.ndim + equal = True + if na != nb: + equal = False + else: + for k in range(na): + if as_[axes_a[k]] != bs[axes_b[k]]: + equal = False + break + if axes_a[k] < 0: + axes_a[k] += nda + if axes_b[k] < 0: + axes_b[k] += ndb + if not equal: + raise ValueError("shape-mismatch for sum") + + # Move the axes to sum over to the end of "a" + # and to the front of "b" + notin = [k for k in range(nda) if k not in axes_a] + newaxes_a = notin + axes_a + N2 = 1 + for axis in axes_a: + N2 *= as_[axis] + newshape_a = (-1, N2) + olda = [as_[axis] for axis in notin] + + notin = [k for k in range(ndb) if k not in axes_b] + newaxes_b = axes_b + notin + N2 = 1 + for axis in axes_b: + N2 *= bs[axis] + newshape_b = (N2, -1) + oldb = [bs[axis] for axis in notin] + + at = a.transpose(newaxes_a).reshape(newshape_a) + bt = b.transpose(newaxes_b).reshape(newshape_b) + res = _dot(at, bt) + if isinstance(res, scipy.sparse.spmatrix): + if res.nnz > reduce(operator.mul, res.shape) / 2: + res = res.todense() + else: + res = COO.from_scipy_sparse(res) # <--- modified + res.has_duplicates = False + if isinstance(res, np.matrix): + res = np.asarray(res) + return res.reshape(olda + oldb) + + +def dot(a, b): + """ + Perform the equivalent of :obj:`numpy.dot` on two arrays. + + Parameters + ---------- + a, b : Union[COO, np.ndarray, scipy.sparse.spmatrix] + The arrays to perform the :code:`dot` operation on. + + Returns + ------- + Union[COO, numpy.ndarray] + The result of the operation. + + See Also + -------- + numpy.dot : NumPy equivalent function. + COO.dot : Equivalent function for COO objects. + """ + if not hasattr(a, 'ndim') or not hasattr(b, 'ndim'): + raise NotImplementedError( + "Cannot perform dot product on types %s, %s" % + (type(a), type(b))) + + if a.ndim == 1 and b.ndim == 1: + return (a * b).sum() + + a_axis = -1 + b_axis = -2 + + if b.ndim == 1: + b_axis = -1 + + return tensordot(a, b, axes=(a_axis, b_axis)) + + +def _dot(a, b): + from .core import COO + + if isinstance(a, COO): + a.sum_duplicates() + if isinstance(b, COO): + b.sum_duplicates() + if isinstance(b, COO) and not isinstance(a, COO): + return _dot(b.T, a.T).T + aa = a.tocsr() + + if isinstance(b, (COO, scipy.sparse.spmatrix)): + b = b.tocsc() + return aa.dot(b) + + +def concatenate(arrays, axis=0): + """ + Concatenate the input arrays along the given dimension. + + Parameters + ---------- + arrays : Iterable[Union[COO, numpy.ndarray, scipy.sparse.spmatrix]] + The input arrays to concatenate. + axis : int, optional + The axis along which to concatenate the input arrays. The default is zero. + + Returns + ------- + COO + The output concatenated array. + + See Also + -------- + numpy.concatenate : NumPy equivalent function + """ + from .core import COO + + arrays = [x if isinstance(x, COO) else COO(x) for x in arrays] + if axis < 0: + axis = axis + arrays[0].ndim + assert all(x.shape[ax] == arrays[0].shape[ax] + for x in arrays + for ax in set(range(arrays[0].ndim)) - {axis}) + nnz = 0 + dim = sum(x.shape[axis] for x in arrays) + shape = list(arrays[0].shape) + shape[axis] = dim + + coords_dtype = np.min_scalar_type(max(shape) - 1) if len(shape) != 0 else np.uint8 + data = np.concatenate([x.data for x in arrays]) + coords = np.concatenate([x.coords for x in arrays], axis=1).astype(coords_dtype) + + dim = 0 + for x in arrays: + if dim: + coords[axis, nnz:x.nnz + nnz] += dim + dim += x.shape[axis] + nnz += x.nnz + + has_duplicates = any(x.has_duplicates for x in arrays) + + return COO(coords, data, shape=shape, has_duplicates=has_duplicates, + sorted=(axis == 0) and all(a.sorted for a in arrays)) + + +def stack(arrays, axis=0): + """ + Stack the input arrays along the given dimension. + + Parameters + ---------- + arrays : Iterable[Union[COO, numpy.ndarray, scipy.sparse.spmatrix]] + The input arrays to stack. + axis : int, optional + The axis along which to stack the input arrays. + + Returns + ------- + COO + The output stacked array. + + See Also + -------- + numpy.stack : NumPy equivalent function + """ + from .core import COO + + assert len(set(x.shape for x in arrays)) == 1 + arrays = [x if isinstance(x, COO) else COO(x) for x in arrays] + if axis < 0: + axis = axis + arrays[0].ndim + 1 + data = np.concatenate([x.data for x in arrays]) + coords = np.concatenate([x.coords for x in arrays], axis=1) + shape = list(arrays[0].shape) + shape.insert(axis, len(arrays)) + + coords_dtype = np.min_scalar_type(max(shape) - 1) if len(shape) != 0 else np.uint8 + + nnz = 0 + dim = 0 + new = np.empty(shape=(coords.shape[1],), dtype=coords_dtype) + for x in arrays: + new[nnz:x.nnz + nnz] = dim + dim += 1 + nnz += x.nnz + + has_duplicates = any(x.has_duplicates for x in arrays) + coords = [coords[i].astype(coords_dtype) for i in range(coords.shape[0])] + coords.insert(axis, new) + coords = np.stack(coords, axis=0) + + return COO(coords, data, shape=shape, has_duplicates=has_duplicates, + sorted=(axis == 0) and all(a.sorted for a in arrays)) + + +def triu(x, k=0): + """ + Returns an array with all elements below the k-th diagonal set to zero. + + Parameters + ---------- + x : COO + The input array. + k : int, optional + The diagonal below which elements are set to zero. The default is + zero, which corresponds to the main diagonal. + + Returns + ------- + COO + The output upper-triangular matrix. + + See Also + -------- + numpy.triu : NumPy equivalent function + """ + from .core import COO + + if not x.ndim >= 2: + raise NotImplementedError('sparse.triu is not implemented for scalars or 1-D arrays.') + + mask = x.coords[-2] + k <= x.coords[-1] + + coords = x.coords[:, mask] + data = x.data[mask] + + return COO(coords, data, x.shape, x.has_duplicates, x.sorted) + + +def tril(x, k=0): + """ + Returns an array with all elements above the k-th diagonal set to zero. + + Parameters + ---------- + x : COO + The input array. + k : int, optional + The diagonal above which elements are set to zero. The default is + zero, which corresponds to the main diagonal. + + Returns + ------- + COO + The output lower-triangular matrix. + + See Also + -------- + numpy.tril : NumPy equivalent function + """ + from .core import COO + + if not x.ndim >= 2: + raise NotImplementedError('sparse.tril is not implemented for scalars or 1-D arrays.') + + mask = x.coords[-2] + k >= x.coords[-1] + + coords = x.coords[:, mask] + data = x.data[mask] + + return COO(coords, data, x.shape, x.has_duplicates, x.sorted) + + +def nansum(x, axis=None, keepdims=False, dtype=None, out=None): + """ + Performs a ``NaN`` skipping sum operation along the given axes. Uses all axes by default. + + Parameters + ---------- + x : SparseArray + The array to perform the reduction on. + axis : Union[int, Iterable[int]], optional + The axes along which to sum. Uses all axes by default. + keepdims : bool, optional + Whether or not to keep the dimensions of the original array. + dtype: numpy.dtype + The data type of the output array. + + Returns + ------- + COO + The reduced output sparse array. + + See Also + -------- + :obj:`COO.sum` : Function without ``NaN`` skipping. + numpy.nansum : Equivalent Numpy function. + """ + assert out is None + x = asCOO(x, name='nansum') + return nanreduce(x, np.add, axis=axis, keepdims=keepdims, dtype=dtype) + + +def nanmax(x, axis=None, keepdims=False, dtype=None, out=None): + """ + Maximize along the given axes, skipping ``NaN`` values. Uses all axes by default. + + Parameters + ---------- + x : SparseArray + The array to perform the reduction on. + axis : Union[int, Iterable[int]], optional + The axes along which to maximize. Uses all axes by default. + keepdims : bool, optional + Whether or not to keep the dimensions of the original array. + dtype: numpy.dtype + The data type of the output array. + + Returns + ------- + COO + The reduced output sparse array. + + See Also + -------- + :obj:`COO.max` : Function without ``NaN`` skipping. + numpy.nanmax : Equivalent Numpy function. + """ + assert out is None + x = asCOO(x, name='nanmax') + + ar = x.reduce(np.fmax, axis=axis, keepdims=keepdims, + dtype=dtype) + + if (isscalar(ar) and np.isnan(ar)) or np.isnan(ar.data).any(): + warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) + + return ar + + +def nanmin(x, axis=None, keepdims=False, dtype=None, out=None): + """ + Minimize along the given axes, skipping ``NaN`` values. Uses all axes by default. + + Parameters + ---------- + x : SparseArray + The array to perform the reduction on. + axis : Union[int, Iterable[int]], optional + The axes along which to minimize. Uses all axes by default. + keepdims : bool, optional + Whether or not to keep the dimensions of the original array. + dtype: numpy.dtype + The data type of the output array. + + Returns + ------- + COO + The reduced output sparse array. + + See Also + -------- + :obj:`COO.min` : Function without ``NaN`` skipping. + numpy.nanmin : Equivalent Numpy function. + """ + assert out is None + x = asCOO(x, name='nanmin') + + ar = x.reduce(np.fmin, axis=axis, keepdims=keepdims, + dtype=dtype) + + if (isscalar(ar) and np.isnan(ar)) or np.isnan(ar.data).any(): + warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) + + return ar + + +def nanprod(x, axis=None, keepdims=False, dtype=None, out=None): + """ + Performs a product operation along the given axes, skipping ``NaN`` values. + Uses all axes by default. + + Parameters + ---------- + x : SparseArray + The array to perform the reduction on. + axis : Union[int, Iterable[int]], optional + The axes along which to multiply. Uses all axes by default. + keepdims : bool, optional + Whether or not to keep the dimensions of the original array. + dtype: numpy.dtype + The data type of the output array. + + Returns + ------- + COO + The reduced output sparse array. + + See Also + -------- + :obj:`COO.prod` : Function without ``NaN`` skipping. + numpy.nanprod : Equivalent Numpy function. + """ + assert out is None + x = asCOO(x) + return nanreduce(x, np.multiply, axis=axis, keepdims=keepdims, dtype=dtype) + + +def where(condition, x=None, y=None): + """ + Select values from either ``x`` or ``y`` depending on ``condition``. + If ``x`` and ``y`` are not given, returns indices where ``condition`` + is nonzero. + + Performs the equivalent of :obj:`numpy.where`. + + Parameters + ---------- + condition : SparseArray + The condition based on which to select values from + either ``x`` or ``y``. + x : SparseArray, optional + The array to select values from if ``condition`` is nonzero. + y : SparseArray, optional + The array to select values from if ``condition`` is zero. + + Returns + ------- + COO + The output array with selected values if ``x`` and ``y`` are given; + else where the array is nonzero. + + Raises + ------ + ValueError + If the operation would produce a dense result; or exactly one of + ``x`` and ``y`` are given. + + See Also + -------- + numpy.where : Equivalent Numpy function. + """ + from .umath import elemwise + + x_given = x is not None + y_given = y is not None + + if not (x_given or y_given): + condition = asCOO(condition, name=str(np.where)) + return tuple(condition.coords) + + if x_given != y_given: + raise ValueError('either both or neither of x and y should be given') + + return elemwise(np.where, condition, x, y) + + +def _replace_nan(array, value): + """ + Replaces ``NaN``s in ``array`` with ``value``. + + Parameters + ---------- + array : COO + The input array. + value : numpy.number + The values to replace ``NaN`` with. + + Returns + ------- + COO + A copy of ``array`` with the ``NaN``s replaced. + """ + from .core import COO + + if not np.issubdtype(array.dtype, np.floating): + return array + + data = np.copy(array.data) + data[np.isnan(data)] = value + + return COO(array.coords, data, shape=array.shape, + has_duplicates=array.has_duplicates, + sorted=array.sorted) + + +def nanreduce(x, method, identity=None, axis=None, keepdims=False, **kwargs): + """ + Performs an ``NaN`` skipping reduction on this array. See the documentation + on :obj:`COO.reduce` for examples. + + Parameters + ---------- + x : COO + The array to reduce. + method : numpy.ufunc + The method to use for performing the reduction. + identity : numpy.number + The identity value for this reduction. Inferred from ``method`` if not given. + Note that some ``ufunc`` objects don't have this, so it may be necessary to give it. + axis : Union[int, Iterable[int]], optional + The axes along which to perform the reduction. Uses all axes by default. + keepdims : bool, optional + Whether or not to keep the dimensions of the original array. + kwargs : dict + Any extra arguments to pass to the reduction operation. + + Returns + ------- + COO + The result of the reduction operation. + + Raises + ------ + ValueError + If reducing an all-zero axis would produce a nonzero result. + + See Also + -------- + COO.reduce : Similar method without ``NaN`` skipping functionality. + """ + arr = _replace_nan(x, method.identity if identity is None else identity) + return arr.reduce(method, axis, keepdims, **kwargs) diff --git a/sparse/coo.py b/sparse/coo/core.py similarity index 59% rename from sparse/coo.py rename to sparse/coo/core.py index 5100167..6fbfdd9 100644 --- a/sparse/coo.py +++ b/sparse/coo/core.py @@ -1,20 +1,16 @@ -from __future__ import absolute_import, division, print_function - -import warnings -from collections import Iterable, defaultdict, deque -from functools import reduce -from itertools import product import numbers -import operator +from collections import Iterable, defaultdict, deque import numpy as np import scipy.sparse from numpy.lib.mixins import NDArrayOperatorsMixin -from .slicing import normalize_index -from .utils import _zero_of_dtype, isscalar, PositinalArgumentPartial -from .sparse_array import SparseArray -from .compatibility import int, zip_longest, range, zip +from .common import dot +from .umath import elemwise, broadcast_to +from ..compatibility import int, range +from ..slicing import normalize_index +from ..sparse_array import SparseArray +from ..utils import _zero_of_dtype class COO(SparseArray, NDArrayOperatorsMixin): @@ -185,7 +181,7 @@ class COO(SparseArray, NDArrayOperatorsMixin): if cache: self.enable_caching() if data is None: - from .dok import DOK + from ..dok import DOK if isinstance(coords, COO): self._make_shallow_copy_of(coords) @@ -596,42 +592,6 @@ class COO(SparseArray, NDArrayOperatorsMixin): return self.reduce(method, **kwargs) - def nanreduce(self, method, identity=None, axis=None, keepdims=False, **kwargs): - """ - Performs an ``NaN`` skipping reduction on this array. See the documentation - on :obj:`COO.reduce` for examples. - - Parameters - ---------- - method : numpy.ufunc - The method to use for performing the reduction. - identity : numpy.number - The identity value for this reduction. Inferred from ``method`` if not given. - Note that some ``ufunc`` objects don't have this, so it may be necessary to give it. - axis : Union[int, Iterable[int]], optional - The axes along which to perform the reduction. Uses all axes by default. - keepdims : bool, optional - Whether or not to keep the dimensions of the original array. - kwargs : dict - Any extra arguments to pass to the reduction operation. - - Returns - ------- - COO - The result of the reduction operation. - - Raises - ------ - ValueError - If reducing an all-zero axis would produce a nonzero result. - - See Also - -------- - COO.reduce : Similar method without ``NaN`` skipping functionality. - """ - arr = _replace_nan(self, method.identity if identity is None else identity) - return arr.reduce(method, axis, keepdims, **kwargs) - def reduce(self, method, axis=(0,), keepdims=False, **kwargs): """ Performs a reduction operation on this array. @@ -1231,7 +1191,9 @@ class COO(SparseArray, NDArrayOperatorsMixin): >>> np.array_equal(np.flatnonzero(x), s.linear_loc()) True """ - return _linear_loc(self.coords, self.shape, signed) + from .common import linear_loc + + return linear_loc(self.coords, self.shape, signed) def reshape(self, shape): """ @@ -1514,15 +1476,7 @@ class COO(SparseArray, NDArrayOperatorsMixin): -------- :obj:`numpy.broadcast_to` : NumPy equivalent function """ - if shape == self.shape: - return self - - result_shape = _get_broadcast_shape(self.shape, shape, is_result=True) - params = _get_broadcast_parameters(self.shape, result_shape) - coords, data = _get_expanded_coords_data(self.coords, self.data, params, result_shape) - - return COO(coords, data, shape=result_shape, has_duplicates=self.has_duplicates, - sorted=self.sorted) + return broadcast_to(self, shape) def round(self, decimals=0, out=None): """ @@ -1611,150 +1565,6 @@ class COO(SparseArray, NDArrayOperatorsMixin): "large sparse array to dense") -def tensordot(a, b, axes=2): - """ - Perform the equivalent of :obj:`numpy.tensordot`. - - Parameters - ---------- - a, b : Union[COO, np.ndarray, scipy.sparse.spmatrix] - The arrays to perform the :code:`tensordot` operation on. - axes : tuple[Union[int, tuple[int], Union[int, tuple[int]], optional - The axes to match when performing the sum. - - Returns - ------- - Union[COO, numpy.ndarray] - The result of the operation. - - See Also - -------- - numpy.tensordot : NumPy equivalent function - """ - # Much of this is stolen from numpy/core/numeric.py::tensordot - # Please see license at https://github.com/numpy/numpy/blob/master/LICENSE.txt - try: - iter(axes) - except TypeError: - axes_a = list(range(-axes, 0)) - axes_b = list(range(0, axes)) - else: - axes_a, axes_b = axes - try: - na = len(axes_a) - axes_a = list(axes_a) - except TypeError: - axes_a = [axes_a] - na = 1 - try: - nb = len(axes_b) - axes_b = list(axes_b) - except TypeError: - axes_b = [axes_b] - nb = 1 - - # a, b = asarray(a), asarray(b) # <--- modified - as_ = a.shape - nda = a.ndim - bs = b.shape - ndb = b.ndim - equal = True - if na != nb: - equal = False - else: - for k in range(na): - if as_[axes_a[k]] != bs[axes_b[k]]: - equal = False - break - if axes_a[k] < 0: - axes_a[k] += nda - if axes_b[k] < 0: - axes_b[k] += ndb - if not equal: - raise ValueError("shape-mismatch for sum") - - # Move the axes to sum over to the end of "a" - # and to the front of "b" - notin = [k for k in range(nda) if k not in axes_a] - newaxes_a = notin + axes_a - N2 = 1 - for axis in axes_a: - N2 *= as_[axis] - newshape_a = (-1, N2) - olda = [as_[axis] for axis in notin] - - notin = [k for k in range(ndb) if k not in axes_b] - newaxes_b = axes_b + notin - N2 = 1 - for axis in axes_b: - N2 *= bs[axis] - newshape_b = (N2, -1) - oldb = [bs[axis] for axis in notin] - - at = a.transpose(newaxes_a).reshape(newshape_a) - bt = b.transpose(newaxes_b).reshape(newshape_b) - res = _dot(at, bt) - if isinstance(res, scipy.sparse.spmatrix): - if res.nnz > reduce(operator.mul, res.shape) / 2: - res = res.todense() - else: - res = COO.from_scipy_sparse(res) # <--- modified - res.has_duplicates = False - if isinstance(res, np.matrix): - res = np.asarray(res) - return res.reshape(olda + oldb) - - -def dot(a, b): - """ - Perform the equivalent of :obj:`numpy.dot` on two arrays. - - Parameters - ---------- - a, b : Union[COO, np.ndarray, scipy.sparse.spmatrix] - The arrays to perform the :code:`dot` operation on. - - Returns - ------- - Union[COO, numpy.ndarray] - The result of the operation. - - See Also - -------- - numpy.dot : NumPy equivalent function. - COO.dot : Equivalent function for COO objects. - """ - if not hasattr(a, 'ndim') or not hasattr(b, 'ndim'): - raise NotImplementedError( - "Cannot perform dot product on types %s, %s" % - (type(a), type(b))) - - if a.ndim == 1 and b.ndim == 1: - return (a * b).sum() - - a_axis = -1 - b_axis = -2 - - if b.ndim == 1: - b_axis = -1 - - return tensordot(a, b, axes=(a_axis, b_axis)) - - -def _dot(a, b): - if isinstance(a, COO): - a.sum_duplicates() - if isinstance(b, COO): - b.sum_duplicates() - if isinstance(b, COO) and not isinstance(a, COO): - return _dot(b.T, a.T).T - aa = a.tocsr() - - if isinstance(b, (COO, scipy.sparse.spmatrix)): - b = b.tocsc() - return aa.dot(b) - - def _keepdims(original, new, axis): shape = list(original.shape) for ax in axis: @@ -1784,238 +1594,6 @@ def _mask(coords, idx, shape): return mask -def _replace_nan(array, value): - """ - Replaces ``NaN``s in ``array`` with ``value``. - - Parameters - ---------- - array : COO - The input array. - value : numpy.number - The values to replace ``NaN`` with. - - Returns - ------- - COO - A copy of ``array`` with the ``NaN``s replaced. - """ - if not np.issubdtype(array.dtype, np.floating): - return array - - data = np.copy(array.data) - data[np.isnan(data)] = value - - return COO(array.coords, data, shape=array.shape, - has_duplicates=array.has_duplicates, - sorted=array.sorted) - - -def concatenate(arrays, axis=0): - """ - Concatenate the input arrays along the given dimension. - - Parameters - ---------- - arrays : Iterable[Union[COO, numpy.ndarray, scipy.sparse.spmatrix]] - The input arrays to concatenate. - axis : int, optional - The axis along which to concatenate the input arrays. The default is zero. - - Returns - ------- - COO - The output concatenated array. - - See Also - -------- - numpy.concatenate : NumPy equivalent function - """ - arrays = [x if isinstance(x, COO) else COO(x) for x in arrays] - if axis < 0: - axis = axis + arrays[0].ndim - assert all(x.shape[ax] == arrays[0].shape[ax] - for x in arrays - for ax in set(range(arrays[0].ndim)) - {axis}) - nnz = 0 - dim = sum(x.shape[axis] for x in arrays) - shape = list(arrays[0].shape) - shape[axis] = dim - - coords_dtype = np.min_scalar_type(max(shape) - 1) if len(shape) != 0 else np.uint8 - data = np.concatenate([x.data for x in arrays]) - coords = np.concatenate([x.coords for x in arrays], axis=1).astype(coords_dtype) - - dim = 0 - for x in arrays: - if dim: - coords[axis, nnz:x.nnz + nnz] += dim - dim += x.shape[axis] - nnz += x.nnz - - has_duplicates = any(x.has_duplicates for x in arrays) - - return COO(coords, data, shape=shape, has_duplicates=has_duplicates, - sorted=(axis == 0) and all(a.sorted for a in arrays)) - - -def stack(arrays, axis=0): - """ - Stack the input arrays along the given dimension. - - Parameters - ---------- - arrays : Iterable[Union[COO, numpy.ndarray, scipy.sparse.spmatrix]] - The input arrays to stack. - axis : int, optional - The axis along which to stack the input arrays. - - Returns - ------- - COO - The output stacked array. - - See Also - -------- - numpy.stack : NumPy equivalent function - """ - assert len(set(x.shape for x in arrays)) == 1 - arrays = [x if isinstance(x, COO) else COO(x) for x in arrays] - if axis < 0: - axis = axis + arrays[0].ndim + 1 - data = np.concatenate([x.data for x in arrays]) - coords = np.concatenate([x.coords for x in arrays], axis=1) - shape = list(arrays[0].shape) - shape.insert(axis, len(arrays)) - - coords_dtype = np.min_scalar_type(max(shape) - 1) if len(shape) != 0 else np.uint8 - - nnz = 0 - dim = 0 - new = np.empty(shape=(coords.shape[1],), dtype=coords_dtype) - for x in arrays: - new[nnz:x.nnz + nnz] = dim - dim += 1 - nnz += x.nnz - - has_duplicates = any(x.has_duplicates for x in arrays) - coords = [coords[i].astype(coords_dtype) for i in range(coords.shape[0])] - coords.insert(axis, new) - coords = np.stack(coords, axis=0) - - return COO(coords, data, shape=shape, has_duplicates=has_duplicates, - sorted=(axis == 0) and all(a.sorted for a in arrays)) - - -def triu(x, k=0): - """ - Returns an array with all elements below the k-th diagonal set to zero. - - Parameters - ---------- - x : COO - The input array. - k : int, optional - The diagonal below which elements are set to zero. The default is - zero, which corresponds to the main diagonal. - - Returns - ------- - COO - The output upper-triangular matrix. - - See Also - -------- - numpy.triu : NumPy equivalent function - """ - if not x.ndim >= 2: - raise NotImplementedError('sparse.triu is not implemented for scalars or 1-D arrays.') - - mask = x.coords[-2] + k <= x.coords[-1] - - coords = x.coords[:, mask] - data = x.data[mask] - - return COO(coords, data, x.shape, x.has_duplicates, x.sorted) - - -def tril(x, k=0): - """ - Returns an array with all elements above the k-th diagonal set to zero. - - Parameters - ---------- - x : COO - The input array. - k : int, optional - The diagonal above which elements are set to zero. The default is - zero, which corresponds to the main diagonal. - - Returns - ------- - COO - The output lower-triangular matrix. - - See Also - -------- - numpy.tril : NumPy equivalent function - """ - if not x.ndim >= 2: - raise NotImplementedError('sparse.tril is not implemented for scalars or 1-D arrays.') - - mask = x.coords[-2] + k >= x.coords[-1] - - coords = x.coords[:, mask] - data = x.data[mask] - - return COO(coords, data, x.shape, x.has_duplicates, x.sorted) - - -# (c) Paul Panzer -# Taken from https://stackoverflow.com/a/47833496/774273 -# License: https://creativecommons.org/licenses/by-sa/3.0/ -def _match_arrays(a, b): - """ - Finds all indexes into a and b such that a[i] = b[j]. The outputs are sorted - in lexographical order. - - Parameters - ---------- - a, b : np.ndarray - The input 1-D arrays to match. If matching of multiple fields is - needed, use np.recarrays. These two arrays must be sorted. - - Returns - ------- - a_idx, b_idx : np.ndarray - The output indices of every possible pair of matching elements. - """ - if len(a) == 0 or len(b) == 0: - return np.array([], dtype=np.uint8), np.array([], dtype=np.uint8) - asw = np.r_[0, 1 + np.flatnonzero(a[:-1] != a[1:]), len(a)] - bsw = np.r_[0, 1 + np.flatnonzero(b[:-1] != b[1:]), len(b)] - al, bl = np.diff(asw), np.diff(bsw) - na = len(al) - asw, bsw = asw, bsw - abunq = np.r_[a[asw[:-1]], b[bsw[:-1]]] - m = np.argsort(abunq, kind='mergesort') - mv = abunq[m] - midx = np.flatnonzero(mv[:-1] == mv[1:]) - ai, bi = m[midx], m[midx + 1] - na - aic = np.r_[0, np.cumsum(al[ai])] - a_idx = np.ones((aic[-1],), dtype=np.int_) - a_idx[aic[:-1]] = asw[ai] - a_idx[aic[1:-1]] -= asw[ai[:-1]] + al[ai[:-1]] - 1 - a_idx = np.repeat(np.cumsum(a_idx), np.repeat(bl[bi], al[ai])) - bi = np.repeat(bi, al[ai]) - bic = np.r_[0, np.cumsum(bl[bi])] - b_idx = np.ones((bic[-1],), dtype=np.int_) - b_idx[bic[:-1]] = bsw[bi] - b_idx[bic[1:-1]] -= bsw[bi[:-1]] + bl[bi[:-1]] - 1 - b_idx = np.cumsum(b_idx) - return a_idx, b_idx - - def _grouped_reduce(x, groups, method, **kwargs): """ Performs a :code:`ufunc` grouped reduce. @@ -2049,769 +1627,3 @@ def _grouped_reduce(x, groups, method, **kwargs): result = method.reduceat(x, inv_idx, **kwargs) counts = np.diff(np.concatenate((inv_idx, [len(x)]))) return result, inv_idx, counts - - -def elemwise(func, *args, **kwargs): - """ - Apply a function to any number of arguments. - - Parameters - ---------- - func : Callable - The function to apply. Must support broadcasting. - args : tuple, optional - The arguments to the function. Can be :obj:`SparseArray` objects - or :obj:`scipy.sparse.spmatrix` objects. - kwargs : dict, optional - Any additional arguments to pass to the function. - - Returns - ------- - COO - The result of applying the function. - - Raises - ------ - ValueError - If the operation would result in a dense matrix, or if the operands - don't have broadcastable shapes. - - See Also - -------- - :obj:`numpy.ufunc` : A similar Numpy construct. Note that any :code:`ufunc` can be used - as the :code:`func` input to this function. - - Notes - ----- - Previously, operations with Numpy arrays were sometimes supported. Now, - it is necessary to convert Numpy arrays to :obj:`COO` objects. - """ - # Because we need to mutate args. - args = list(args) - posargs = [] - pos = [] - for i, arg in enumerate(args): - if isinstance(arg, scipy.sparse.spmatrix): - args[i] = COO.from_scipy_sparse(arg) - elif isscalar(arg) or (isinstance(arg, np.ndarray) - and not arg.shape): - # Faster and more reliable to pass ()-shaped ndarrays as scalars. - if isinstance(arg, np.ndarray): - args[i] = arg[()] - - pos.append(i) - posargs.append(args[i]) - elif isinstance(arg, SparseArray) and not isinstance(arg, COO): - args[i] = COO(arg) - elif not isinstance(arg, COO): - raise ValueError("Performing this operation would produce " - "a dense result: %s" % str(func)) - - # Filter out scalars as they are 'baked' into the function. - func = PositinalArgumentPartial(func, pos, posargs) - args = [arg for arg in args if not isscalar(arg)] - - if len(args) == 0: - return func(**kwargs) - - if len(args) == 1: - return _elemwise_unary(func, args[0], **kwargs) - - return _elemwise_n_ary(func, *args, **kwargs) - - -def _elemwise_n_ary(func, *args, **kwargs): - """ - Apply a function to any number of arguments with broadcasting. - - Parameters - ---------- - func : Callable - The function to apply to arguments. Must support broadcasting. - args : list - Input :obj:`COO` or :obj:`numpy.ndarray`s. - kwargs : dict - Additional arguments to pass to the function. - - Returns - ------- - COO - The output array. - - Raises - ------ - ValueError - If the input shapes aren't compatible or the result will be dense. - """ - args = list(args) - - for arg in args: - if isinstance(arg, COO): - arg.sum_duplicates() - - args_zeros = tuple(_zero_of_dtype(arg.dtype)[()] for arg in args) - - func_value = func(*args_zeros, **kwargs) - func_zero = _zero_of_dtype(func_value.dtype) - if func_value != func_zero: - raise ValueError("Performing this operation would produce " - "a dense result: %s" % str(func)) - data_list = [] - coords_list = [] - - cache = {} - for mask in product([True, False], repeat=len(args)): - if not any(mask): - continue - - ci, di = _unmatch_coo(func, args, mask, cache, **kwargs) - - coords_list.extend(ci) - data_list.extend(di) - - result_shape = _get_nary_broadcast_shape(*[arg.shape for arg in args]) - - # Concatenate matches and mismatches - data = np.concatenate(data_list) if len(data_list) else np.empty((0,), dtype=args[0].dtype) - coords = np.concatenate(coords_list, axis=1) if len(coords_list) else \ - np.empty((0, len(result_shape)), dtype=np.min_scalar_type(max(result_shape) - 1)) - - nonzero = data != func_zero - data = data[nonzero] - coords = coords[:, nonzero] - - return COO(coords, data, shape=result_shape, has_duplicates=False) - - -def _match_coo(*args, **kwargs): - """ - Matches the coordinates for any number of input :obj:`COO` arrays. - Equivalent to "sparse" broadcasting for all arrays. - - Parameters - ---------- - args : Tuple[COO] - The input :obj:`COO` arrays. - return_midx : bool - Whether to return matched indices or matched arrays. Matching - only supported for two arrays. ``False`` by default. - cache : dict - Cache of things already matched. No cache by default. - - Returns - ------- - matched_idx : List[ndarray] - The indices of matched elements in the original arrays. Only returned if - ``return_midx`` is ``True``. - matched_arrays : List[COO] - The expanded, matched :obj:`COO` objects. Only returned if - ``return_midx`` is ``False``. - """ - return_midx = kwargs.pop('return_midx', False) - cache = kwargs.pop('cache', None) - - if kwargs: - raise ValueError('Unknown kwargs %s' % kwargs.keys()) - - if return_midx and (len(args) != 2 or cache is not None): - raise NotImplementedError('Matching indices only supported for two args, and no cache.') - - matched_arrays = [args[0]] - cache_key = [id(args[0])] - for arg2 in args[1:]: - cache_key.append(id(arg2)) - key = tuple(cache_key) - if cache is not None and key in cache: - matched_arrays = cache[key] - continue - - cargs = [matched_arrays[0], arg2] - current_shape = _get_broadcast_shape(matched_arrays[0].shape, arg2.shape) - params = [_get_broadcast_parameters(arg.shape, current_shape) for arg in cargs] - reduced_params = [all(p) for p in zip(*params)] - reduced_shape = _get_reduced_shape(arg2.shape, - reduced_params[-arg2.ndim:]) - - reduced_coords = [_get_reduced_coords(arg.coords, reduced_params[-arg.ndim:]) - for arg in cargs] - - linear = [_linear_loc(rc, reduced_shape) for rc in reduced_coords] - sorted_idx = [np.argsort(idx) for idx in linear] - linear = [idx[s] for idx, s in zip(linear, sorted_idx)] - matched_idx = _match_arrays(*linear) - - if return_midx: - matched_idx = [sidx[midx] for sidx, midx in zip(sorted_idx, matched_idx)] - return matched_idx - - coords = [arg.coords[:, s] for arg, s in zip(cargs, sorted_idx)] - mcoords = [c[:, idx] for c, idx in zip(coords, matched_idx)] - mcoords = _get_matching_coords(mcoords, params, current_shape) - mdata = [arg.data[sorted_idx[0]][matched_idx[0]] for arg in matched_arrays] - mdata.append(arg2.data[sorted_idx[1]][matched_idx[1]]) - matched_arrays = [COO(mcoords, md, shape=current_shape) for md in mdata] - - if cache is not None: - cache[key] = matched_arrays - - return matched_arrays - - -def _unmatch_coo(func, args, mask, cache, **kwargs): - """ - Matches the coordinates for any number of input :obj:`COO` arrays. - - First computes the matches, then filters out the non-matches. - - Parameters - ---------- - func : Callable - The function to compute matches - args : tuple[COO] - The input :obj:`COO` arrays. - mask : tuple[bool] - Specifies the inputs that are zero and the ones that are - nonzero. - kwargs: dict - Extra keyword arguments to pass to func. - - Returns - ------- - matched_coords : list[ndarray] - The matched coordinates. - matched_data : list[ndarray] - The matched data. - """ - matched_args = [a for a, m in zip(args, mask) if m] - unmatched_args = [a for a, m in zip(args, mask) if not m] - - matched_arrays = _match_coo(*matched_args, cache=cache) - - pos = tuple(i for i, m in enumerate(mask) if not m) - posargs = [_zero_of_dtype(arg.dtype)[()] for arg, m in zip(args, mask) if not m] - result_shape = _get_nary_broadcast_shape(*[arg.shape for arg in args]) - - partial = PositinalArgumentPartial(func, pos, posargs) - matched_func = partial(*[a.data for a in matched_arrays], **kwargs) - - unmatched_mask = matched_func != _zero_of_dtype(matched_func.dtype) - - if not unmatched_mask.any(): - return [], [] - - func_data = matched_func[unmatched_mask] - func_coords = matched_arrays[0].coords[:, unmatched_mask] - - func_array = COO(func_coords, func_data, shape=matched_arrays[0].shape).broadcast_to(result_shape) - - if all(mask): - return [func_array.coords], [func_array.data] - - unmatched_mask = np.ones(func_array.nnz, dtype=np.bool) - - for arg in unmatched_args: - matched_idx = _match_coo(func_array, arg, return_midx=True)[0] - unmatched_mask[matched_idx] = False - - coords = np.asarray(func_array.coords[:, unmatched_mask], order='C') - data = np.asarray(func_array.data[unmatched_mask], order='C') - - return [coords], [data] - - -def _get_nary_broadcast_shape(*shapes): - """ - Broadcast any number of shapes to a result shape. - - Parameters - ---------- - shapes : tuple[tuple[int]] - The shapes to broadcast. - - Returns - ------- - tuple[int] - The output shape. - - Raises - ------ - ValueError - If the input shapes cannot be broadcast to a single shape. - """ - result_shape = () - - for shape in shapes: - try: - result_shape = _get_broadcast_shape(shape, result_shape) - except ValueError: - shapes_str = ', '.join(str(shape) for shape in shapes) - raise ValueError('operands could not be broadcast together with shapes %s' - % shapes_str) - - return result_shape - - -def _get_broadcast_shape(shape1, shape2, is_result=False): - """ - Get the overall broadcasted shape. - - Parameters - ---------- - shape1, shape2 : tuple[int] - The input shapes to broadcast together. - is_result : bool - Whether or not shape2 is also the result shape. - - Returns - ------- - result_shape : tuple[int] - The overall shape of the result. - - Raises - ------ - ValueError - If the two shapes cannot be broadcast together. - """ - # https://stackoverflow.com/a/47244284/774273 - if not all((l1 == l2) or (l1 == 1) or ((l2 == 1) and not is_result) for l1, l2 in - zip(shape1[::-1], shape2[::-1])): - raise ValueError('operands could not be broadcast together with shapes %s, %s' % - (shape1, shape2)) - - result_shape = tuple(max(l1, l2) for l1, l2 in - zip_longest(shape1[::-1], shape2[::-1], fillvalue=1))[::-1] - - return result_shape - - -def _get_broadcast_parameters(shape, broadcast_shape): - """ - Get the broadcast parameters. - - Parameters - ---------- - shape : tuple[int] - The input shape. - broadcast_shape - The shape to broadcast to. - - Returns - ------- - params : list - A list containing None if the dimension isn't in the original array, False if - it needs to be broadcast, and True if it doesn't. - """ - params = [None if l1 is None else l1 == l2 for l1, l2 - in zip_longest(shape[::-1], broadcast_shape[::-1], fillvalue=None)][::-1] - - return params - - -def _get_reduced_coords(coords, params): - """ - Gets only those dimensions of the coordinates that don't need to be broadcast. - - Parameters - ---------- - coords : np.ndarray - The coordinates to reduce. - params : list - The params from which to check which dimensions to get. - - Returns - ------- - reduced_coords : np.ndarray - The reduced coordinates. - """ - - reduced_params = [bool(param) for param in params] - - return coords[reduced_params] - - -def _get_reduced_shape(shape, params): - """ - Gets only those dimensions of the coordinates that don't need to be broadcast. - - Parameters - ---------- - coords : np.ndarray - The coordinates to reduce. - params : list - The params from which to check which dimensions to get. - - Returns - ------- - reduced_coords : np.ndarray - The reduced coordinates. - """ - reduced_shape = tuple(l for l, p in zip(shape, params) if p) - - return reduced_shape - - -def _get_expanded_coords_data(coords, data, params, broadcast_shape): - """ - Expand coordinates/data to broadcast_shape. Does most of the heavy lifting for broadcast_to. - Produces sorted output for sorted inputs. - - Parameters - ---------- - coords : np.ndarray - The coordinates to expand. - data : np.ndarray - The data corresponding to the coordinates. - params : list - The broadcast parameters. - broadcast_shape : tuple[int] - The shape to broadcast to. - - Returns - ------- - expanded_coords : np.ndarray - List of 1-D arrays. Each item in the list has one dimension of coordinates. - expanded_data : np.ndarray - The data corresponding to expanded_coords. - """ - first_dim = -1 - expand_shapes = [] - for d, p, l in zip(range(len(broadcast_shape)), params, broadcast_shape): - if p and first_dim == -1: - expand_shapes.append(coords.shape[1]) - first_dim = d - - if not p: - expand_shapes.append(l) - - all_idx = _cartesian_product(*(np.arange(d, dtype=np.min_scalar_type(d - 1)) for d in expand_shapes)) - dt = np.result_type(*(np.min_scalar_type(l - 1) for l in broadcast_shape)) - - false_dim = 0 - dim = 0 - - expanded_coords = np.empty((len(broadcast_shape), all_idx.shape[1]), dtype=dt) - expanded_data = data[all_idx[first_dim]] - - for d, p, l in zip(range(len(broadcast_shape)), params, broadcast_shape): - if p: - expanded_coords[d] = coords[dim, all_idx[first_dim]] - else: - expanded_coords[d] = all_idx[false_dim + (d > first_dim)] - false_dim += 1 - - if p is not None: - dim += 1 - - return np.asarray(expanded_coords), np.asarray(expanded_data) - - -# (c) senderle -# Taken from https://stackoverflow.com/a/11146645/774273 -# License: https://creativecommons.org/licenses/by-sa/3.0/ -def _cartesian_product(*arrays): - """ - Get the cartesian product of a number of arrays. - - Parameters - ---------- - arrays : Tuple[np.ndarray] - The arrays to get a cartesian product of. Always sorted with respect - to the original array. - - Returns - ------- - out : np.ndarray - The overall cartesian product of all the input arrays. - """ - broadcastable = np.ix_(*arrays) - broadcasted = np.broadcast_arrays(*broadcastable) - rows, cols = np.prod(broadcasted[0].shape), len(broadcasted) - dtype = np.result_type(*arrays) - out = np.empty(rows * cols, dtype=dtype) - start, end = 0, rows - for a in broadcasted: - out[start:end] = a.reshape(-1) - start, end = end, end + rows - return out.reshape(cols, rows) - - -def _elemwise_unary(func, self, *args, **kwargs): - check = kwargs.pop('check', True) - data_zero = _zero_of_dtype(self.dtype) - zero_func = func(data_zero, *args, **kwargs) - func_zero = _zero_of_dtype(zero_func.dtype) - if check and zero_func != func_zero: - raise ValueError("Performing this operation would produce " - "a dense result: %s" % str(func)) - - data_func = func(self.data, *args, **kwargs) - nonzero = data_func != func_zero - - return COO(self.coords[:, nonzero], data_func[nonzero], - shape=self.shape, - has_duplicates=self.has_duplicates, - sorted=self.sorted) - - -def _get_matching_coords(coords, params, shape): - """ - Get the matching coords across a number of broadcast operands. - - Parameters - ---------- - coords : list[numpy.ndarray] - The input coordinates. - params : list[Union[bool, none]] - The broadcast parameters. - Returns - ------- - numpy.ndarray - The broacasted coordinates - """ - matching_coords = [] - dims = np.zeros(len(coords), dtype=np.uint8) - - for p_all in zip(*params): - for i, p in enumerate(p_all): - if p: - matching_coords.append(coords[i][dims[i]]) - break - else: - matching_coords.append(coords[dims[0]]) - - for i, p in enumerate(p_all): - if p is not None: - dims[i] += 1 - - dtype = np.min_scalar_type(max(shape) - 1) - - return np.asarray(matching_coords, dtype=dtype) - - -def _linear_loc(coords, shape, signed=False): - n = reduce(operator.mul, shape, 1) - if signed: - n = -n - dtype = np.min_scalar_type(n) - out = np.zeros(coords.shape[1], dtype=dtype) - tmp = np.zeros(coords.shape[1], dtype=dtype) - strides = 1 - for i, d in enumerate(shape[::-1]): - np.multiply(coords[-(i + 1), :], strides, out=tmp, dtype=dtype) - np.add(tmp, out, out=out) - strides *= d - return out - - -def asCOO(x, name='asCOO', check=True): - """ - Convert the input to :obj:`COO`. Passes through :obj:`COO` objects as-is. - - Parameters - ---------- - x : Union[SparseArray, scipy.sparse.spmatrix, numpy.ndarray] - The input array to convert. - name : str, optional - The name of the operation to use in the exception. - check : bool, optional - Whether to check for a dense input. - - Returns - ------- - COO - The converted :obj:`COO` array. - - Raises - ------ - ValueError - If ``check`` is true and a dense input is supplied. - """ - if check and not isinstance(x, (SparseArray, scipy.sparse.spmatrix)): - raise ValueError('Performing this operation would produce a dense result: %s' % name) - - if not isinstance(x, COO): - x = COO(x) - - return x - - -def where(condition, x=None, y=None): - """ - Select values from either ``x`` or ``y`` depending on ``condition``. - If ``x`` and ``y`` are not given, returns indices where ``condition`` - is nonzero. - - Performs the equivalent of :obj:`numpy.where`. - - Parameters - ---------- - condition : SparseArray - The condition based on which to select values from - either ``x`` or ``y``. - x : SparseArray, optional - The array to select values from if ``condition`` is nonzero. - y : SparseArray, optional - The array to select values from if ``condition`` is zero. - - Returns - ------- - COO - The output array with selected values if ``x`` and ``y`` are given; - else where the array is nonzero. - - Raises - ------ - ValueError - If the operation would produce a dense result; or exactly one of - ``x`` and ``y`` are given. - - See Also - -------- - numpy.where : Equivalent Numpy function. - """ - x_given = x is not None - y_given = y is not None - - if not (x_given or y_given): - condition = asCOO(condition, name=str(np.where)) - return tuple(condition.coords) - - if x_given != y_given: - raise ValueError('either both or neither of x and y should be given') - - return elemwise(np.where, condition, x, y) - - -def nansum(x, axis=None, keepdims=False, dtype=None, out=None): - """ - Performs a ``NaN`` skipping sum operation along the given axes. Uses all axes by default. - - Parameters - ---------- - x : SparseArray - The array to perform the reduction on. - axis : Union[int, Iterable[int]], optional - The axes along which to sum. Uses all axes by default. - keepdims : bool, optional - Whether or not to keep the dimensions of the original array. - dtype: numpy.dtype - The data type of the output array. - - Returns - ------- - COO - The reduced output sparse array. - - See Also - -------- - :obj:`COO.sum` : Function without ``NaN`` skipping. - numpy.nansum : Equivalent Numpy function. - """ - assert out is None - x = asCOO(x, name='nansum') - return x.nanreduce(np.add, axis=axis, keepdims=keepdims, dtype=dtype) - - -def nanmax(x, axis=None, keepdims=False, dtype=None, out=None): - """ - Maximize along the given axes, skipping ``NaN`` values. Uses all axes by default. - - Parameters - ---------- - x : SparseArray - The array to perform the reduction on. - axis : Union[int, Iterable[int]], optional - The axes along which to maximize. Uses all axes by default. - keepdims : bool, optional - Whether or not to keep the dimensions of the original array. - dtype: numpy.dtype - The data type of the output array. - - Returns - ------- - COO - The reduced output sparse array. - - See Also - -------- - :obj:`COO.max` : Function without ``NaN`` skipping. - numpy.nanmax : Equivalent Numpy function. - """ - assert out is None - x = asCOO(x, name='nanmax') - - ar = x.reduce(np.fmax, axis=axis, keepdims=keepdims, - dtype=dtype) - - if (isscalar(ar) and np.isnan(ar)) or np.isnan(ar.data).any(): - warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) - - return ar - - -def nanmin(x, axis=None, keepdims=False, dtype=None, out=None): - """ - Minimize along the given axes, skipping ``NaN`` values. Uses all axes by default. - - Parameters - ---------- - x : SparseArray - The array to perform the reduction on. - axis : Union[int, Iterable[int]], optional - The axes along which to minimize. Uses all axes by default. - keepdims : bool, optional - Whether or not to keep the dimensions of the original array. - dtype: numpy.dtype - The data type of the output array. - - Returns - ------- - COO - The reduced output sparse array. - - See Also - -------- - :obj:`COO.min` : Function without ``NaN`` skipping. - numpy.nanmin : Equivalent Numpy function. - """ - assert out is None - x = asCOO(x, name='nanmin') - - ar = x.reduce(np.fmin, axis=axis, keepdims=keepdims, - dtype=dtype) - - if (isscalar(ar) and np.isnan(ar)) or np.isnan(ar.data).any(): - warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) - - return ar - - -def nanprod(x, axis=None, keepdims=False, dtype=None, out=None): - """ - Performs a product operation along the given axes, skipping ``NaN`` values. - Uses all axes by default. - - Parameters - ---------- - x : SparseArray - The array to perform the reduction on. - axis : Union[int, Iterable[int]], optional - The axes along which to multiply. Uses all axes by default. - keepdims : bool, optional - Whether or not to keep the dimensions of the original array. - dtype: numpy.dtype - The data type of the output array. - - Returns - ------- - COO - The reduced output sparse array. - - See Also - -------- - :obj:`COO.prod` : Function without ``NaN`` skipping. - numpy.nanprod : Equivalent Numpy function. - """ - assert out is None - x = asCOO(x) - return x.nanreduce(np.multiply, axis=axis, keepdims=keepdims, dtype=dtype) diff --git a/sparse/coo/umath.py b/sparse/coo/umath.py new file mode 100644 index 0000000..f130133 --- /dev/null +++ b/sparse/coo/umath.py @@ -0,0 +1,638 @@ +from itertools import product + +import numpy as np +import scipy.sparse + +from ..utils import isscalar, PositinalArgumentPartial, _zero_of_dtype +from ..compatibility import range, zip, zip_longest + + +def elemwise(func, *args, **kwargs): + """ + Apply a function to any number of arguments. + + Parameters + ---------- + func : Callable + The function to apply. Must support broadcasting. + args : tuple, optional + The arguments to the function. Can be :obj:`SparseArray` objects + or :obj:`scipy.sparse.spmatrix` objects. + kwargs : dict, optional + Any additional arguments to pass to the function. + + Returns + ------- + COO + The result of applying the function. + + Raises + ------ + ValueError + If the operation would result in a dense matrix, or if the operands + don't have broadcastable shapes. + + See Also + -------- + :obj:`numpy.ufunc` : A similar Numpy construct. Note that any :code:`ufunc` can be used + as the :code:`func` input to this function. + + Notes + ----- + Previously, operations with Numpy arrays were sometimes supported. Now, + it is necessary to convert Numpy arrays to :obj:`COO` objects. + """ + # Because we need to mutate args. + from .core import COO + from ..sparse_array import SparseArray + + args = list(args) + posargs = [] + pos = [] + for i, arg in enumerate(args): + if isinstance(arg, scipy.sparse.spmatrix): + args[i] = COO.from_scipy_sparse(arg) + elif isscalar(arg) or (isinstance(arg, np.ndarray) + and not arg.shape): + # Faster and more reliable to pass ()-shaped ndarrays as scalars. + if isinstance(arg, np.ndarray): + args[i] = arg[()] + + pos.append(i) + posargs.append(args[i]) + elif isinstance(arg, SparseArray) and not isinstance(arg, COO): + args[i] = COO(arg) + elif not isinstance(arg, COO): + raise ValueError("Performing this operation would produce " + "a dense result: %s" % str(func)) + + # Filter out scalars as they are 'baked' into the function. + func = PositinalArgumentPartial(func, pos, posargs) + args = [arg for arg in args if not isscalar(arg)] + + if len(args) == 0: + return func(**kwargs) + + if len(args) == 1: + return _elemwise_unary(func, args[0], **kwargs) + + return _elemwise_n_ary(func, *args, **kwargs) + + +# (c) Paul Panzer +# Taken from https://stackoverflow.com/a/47833496/774273 +# License: https://creativecommons.org/licenses/by-sa/3.0/ +def _match_arrays(a, b): + """ + Finds all indexes into a and b such that a[i] = b[j]. The outputs are sorted + in lexographical order. + + Parameters + ---------- + a, b : np.ndarray + The input 1-D arrays to match. If matching of multiple fields is + needed, use np.recarrays. These two arrays must be sorted. + + Returns + ------- + a_idx, b_idx : np.ndarray + The output indices of every possible pair of matching elements. + """ + if len(a) == 0 or len(b) == 0: + return np.array([], dtype=np.uint8), np.array([], dtype=np.uint8) + asw = np.r_[0, 1 + np.flatnonzero(a[:-1] != a[1:]), len(a)] + bsw = np.r_[0, 1 + np.flatnonzero(b[:-1] != b[1:]), len(b)] + al, bl = np.diff(asw), np.diff(bsw) + na = len(al) + asw, bsw = asw, bsw + abunq = np.r_[a[asw[:-1]], b[bsw[:-1]]] + m = np.argsort(abunq, kind='mergesort') + mv = abunq[m] + midx = np.flatnonzero(mv[:-1] == mv[1:]) + ai, bi = m[midx], m[midx + 1] - na + aic = np.r_[0, np.cumsum(al[ai])] + a_idx = np.ones((aic[-1],), dtype=np.int_) + a_idx[aic[:-1]] = asw[ai] + a_idx[aic[1:-1]] -= asw[ai[:-1]] + al[ai[:-1]] - 1 + a_idx = np.repeat(np.cumsum(a_idx), np.repeat(bl[bi], al[ai])) + bi = np.repeat(bi, al[ai]) + bic = np.r_[0, np.cumsum(bl[bi])] + b_idx = np.ones((bic[-1],), dtype=np.int_) + b_idx[bic[:-1]] = bsw[bi] + b_idx[bic[1:-1]] -= bsw[bi[:-1]] + bl[bi[:-1]] - 1 + b_idx = np.cumsum(b_idx) + return a_idx, b_idx + + +def _elemwise_n_ary(func, *args, **kwargs): + """ + Apply a function to any number of arguments with broadcasting. + + Parameters + ---------- + func : Callable + The function to apply to arguments. Must support broadcasting. + args : list + Input :obj:`COO` or :obj:`numpy.ndarray`s. + kwargs : dict + Additional arguments to pass to the function. + + Returns + ------- + COO + The output array. + + Raises + ------ + ValueError + If the input shapes aren't compatible or the result will be dense. + """ + from .core import COO + + args = list(args) + + for arg in args: + if isinstance(arg, COO): + arg.sum_duplicates() + + args_zeros = tuple(_zero_of_dtype(arg.dtype)[()] for arg in args) + + func_value = func(*args_zeros, **kwargs) + func_zero = _zero_of_dtype(func_value.dtype) + if func_value != func_zero: + raise ValueError("Performing this operation would produce " + "a dense result: %s" % str(func)) + data_list = [] + coords_list = [] + + cache = {} + for mask in product([True, False], repeat=len(args)): + if not any(mask): + continue + + ci, di = _unmatch_coo(func, args, mask, cache, **kwargs) + + coords_list.extend(ci) + data_list.extend(di) + + result_shape = _get_nary_broadcast_shape(*[arg.shape for arg in args]) + + # Concatenate matches and mismatches + data = np.concatenate(data_list) if len(data_list) else np.empty((0,), dtype=args[0].dtype) + coords = np.concatenate(coords_list, axis=1) if len(coords_list) else \ + np.empty((0, len(result_shape)), dtype=np.min_scalar_type(max(result_shape) - 1)) + + nonzero = data != func_zero + data = data[nonzero] + coords = coords[:, nonzero] + + return COO(coords, data, shape=result_shape, has_duplicates=False) + + +def _match_coo(*args, **kwargs): + """ + Matches the coordinates for any number of input :obj:`COO` arrays. + Equivalent to "sparse" broadcasting for all arrays. + + Parameters + ---------- + args : Tuple[COO] + The input :obj:`COO` arrays. + return_midx : bool + Whether to return matched indices or matched arrays. Matching + only supported for two arrays. ``False`` by default. + cache : dict + Cache of things already matched. No cache by default. + + Returns + ------- + matched_idx : List[ndarray] + The indices of matched elements in the original arrays. Only returned if + ``return_midx`` is ``True``. + matched_arrays : List[COO] + The expanded, matched :obj:`COO` objects. Only returned if + ``return_midx`` is ``False``. + """ + from .core import COO + from .common import linear_loc + + return_midx = kwargs.pop('return_midx', False) + cache = kwargs.pop('cache', None) + + if kwargs: + raise ValueError('Unknown kwargs %s' % kwargs.keys()) + + if return_midx and (len(args) != 2 or cache is not None): + raise NotImplementedError('Matching indices only supported for two args, and no cache.') + + matched_arrays = [args[0]] + cache_key = [id(args[0])] + for arg2 in args[1:]: + cache_key.append(id(arg2)) + key = tuple(cache_key) + if cache is not None and key in cache: + matched_arrays = cache[key] + continue + + cargs = [matched_arrays[0], arg2] + current_shape = _get_broadcast_shape(matched_arrays[0].shape, arg2.shape) + params = [_get_broadcast_parameters(arg.shape, current_shape) for arg in cargs] + reduced_params = [all(p) for p in zip(*params)] + reduced_shape = _get_reduced_shape(arg2.shape, + reduced_params[-arg2.ndim:]) + + reduced_coords = [_get_reduced_coords(arg.coords, reduced_params[-arg.ndim:]) + for arg in cargs] + + linear = [linear_loc(rc, reduced_shape) for rc in reduced_coords] + sorted_idx = [np.argsort(idx) for idx in linear] + linear = [idx[s] for idx, s in zip(linear, sorted_idx)] + matched_idx = _match_arrays(*linear) + + if return_midx: + matched_idx = [sidx[midx] for sidx, midx in zip(sorted_idx, matched_idx)] + return matched_idx + + coords = [arg.coords[:, s] for arg, s in zip(cargs, sorted_idx)] + mcoords = [c[:, idx] for c, idx in zip(coords, matched_idx)] + mcoords = _get_matching_coords(mcoords, params, current_shape) + mdata = [arg.data[sorted_idx[0]][matched_idx[0]] for arg in matched_arrays] + mdata.append(arg2.data[sorted_idx[1]][matched_idx[1]]) + matched_arrays = [COO(mcoords, md, shape=current_shape) for md in mdata] + + if cache is not None: + cache[key] = matched_arrays + + return matched_arrays + + +def _unmatch_coo(func, args, mask, cache, **kwargs): + """ + Matches the coordinates for any number of input :obj:`COO` arrays. + + First computes the matches, then filters out the non-matches. + + Parameters + ---------- + func : Callable + The function to compute matches + args : tuple[COO] + The input :obj:`COO` arrays. + mask : tuple[bool] + Specifies the inputs that are zero and the ones that are + nonzero. + kwargs: dict + Extra keyword arguments to pass to func. + + Returns + ------- + matched_coords : list[ndarray] + The matched coordinates. + matched_data : list[ndarray] + The matched data. + """ + from .core import COO + + matched_args = [a for a, m in zip(args, mask) if m] + unmatched_args = [a for a, m in zip(args, mask) if not m] + + matched_arrays = _match_coo(*matched_args, cache=cache) + + pos = tuple(i for i, m in enumerate(mask) if not m) + posargs = [_zero_of_dtype(arg.dtype)[()] for arg, m in zip(args, mask) if not m] + result_shape = _get_nary_broadcast_shape(*[arg.shape for arg in args]) + + partial = PositinalArgumentPartial(func, pos, posargs) + matched_func = partial(*[a.data for a in matched_arrays], **kwargs) + + unmatched_mask = matched_func != _zero_of_dtype(matched_func.dtype) + + if not unmatched_mask.any(): + return [], [] + + func_data = matched_func[unmatched_mask] + func_coords = matched_arrays[0].coords[:, unmatched_mask] + + func_array = COO(func_coords, func_data, shape=matched_arrays[0].shape).broadcast_to(result_shape) + + if all(mask): + return [func_array.coords], [func_array.data] + + unmatched_mask = np.ones(func_array.nnz, dtype=np.bool) + + for arg in unmatched_args: + matched_idx = _match_coo(func_array, arg, return_midx=True)[0] + unmatched_mask[matched_idx] = False + + coords = np.asarray(func_array.coords[:, unmatched_mask], order='C') + data = np.asarray(func_array.data[unmatched_mask], order='C') + + return [coords], [data] + + +def _get_nary_broadcast_shape(*shapes): + """ + Broadcast any number of shapes to a result shape. + + Parameters + ---------- + shapes : tuple[tuple[int]] + The shapes to broadcast. + + Returns + ------- + tuple[int] + The output shape. + + Raises + ------ + ValueError + If the input shapes cannot be broadcast to a single shape. + """ + result_shape = () + + for shape in shapes: + try: + result_shape = _get_broadcast_shape(shape, result_shape) + except ValueError: + shapes_str = ', '.join(str(shape) for shape in shapes) + raise ValueError('operands could not be broadcast together with shapes %s' + % shapes_str) + + return result_shape + + +def _get_broadcast_shape(shape1, shape2, is_result=False): + """ + Get the overall broadcasted shape. + + Parameters + ---------- + shape1, shape2 : tuple[int] + The input shapes to broadcast together. + is_result : bool + Whether or not shape2 is also the result shape. + + Returns + ------- + result_shape : tuple[int] + The overall shape of the result. + + Raises + ------ + ValueError + If the two shapes cannot be broadcast together. + """ + # https://stackoverflow.com/a/47244284/774273 + if not all((l1 == l2) or (l1 == 1) or ((l2 == 1) and not is_result) for l1, l2 in + zip(shape1[::-1], shape2[::-1])): + raise ValueError('operands could not be broadcast together with shapes %s, %s' % + (shape1, shape2)) + + result_shape = tuple(max(l1, l2) for l1, l2 in + zip_longest(shape1[::-1], shape2[::-1], fillvalue=1))[::-1] + + return result_shape + + +def _get_broadcast_parameters(shape, broadcast_shape): + """ + Get the broadcast parameters. + + Parameters + ---------- + shape : tuple[int] + The input shape. + broadcast_shape + The shape to broadcast to. + + Returns + ------- + params : list + A list containing None if the dimension isn't in the original array, False if + it needs to be broadcast, and True if it doesn't. + """ + params = [None if l1 is None else l1 == l2 for l1, l2 + in zip_longest(shape[::-1], broadcast_shape[::-1], fillvalue=None)][::-1] + + return params + + +def _get_reduced_coords(coords, params): + """ + Gets only those dimensions of the coordinates that don't need to be broadcast. + + Parameters + ---------- + coords : np.ndarray + The coordinates to reduce. + params : list + The params from which to check which dimensions to get. + + Returns + ------- + reduced_coords : np.ndarray + The reduced coordinates. + """ + + reduced_params = [bool(param) for param in params] + + return coords[reduced_params] + + +def _get_reduced_shape(shape, params): + """ + Gets only those dimensions of the coordinates that don't need to be broadcast. + + Parameters + ---------- + coords : np.ndarray + The coordinates to reduce. + params : list + The params from which to check which dimensions to get. + + Returns + ------- + reduced_coords : np.ndarray + The reduced coordinates. + """ + reduced_shape = tuple(l for l, p in zip(shape, params) if p) + + return reduced_shape + + +def _get_expanded_coords_data(coords, data, params, broadcast_shape): + """ + Expand coordinates/data to broadcast_shape. Does most of the heavy lifting for broadcast_to. + Produces sorted output for sorted inputs. + + Parameters + ---------- + coords : np.ndarray + The coordinates to expand. + data : np.ndarray + The data corresponding to the coordinates. + params : list + The broadcast parameters. + broadcast_shape : tuple[int] + The shape to broadcast to. + + Returns + ------- + expanded_coords : np.ndarray + List of 1-D arrays. Each item in the list has one dimension of coordinates. + expanded_data : np.ndarray + The data corresponding to expanded_coords. + """ + first_dim = -1 + expand_shapes = [] + for d, p, l in zip(range(len(broadcast_shape)), params, broadcast_shape): + if p and first_dim == -1: + expand_shapes.append(coords.shape[1]) + first_dim = d + + if not p: + expand_shapes.append(l) + + all_idx = _cartesian_product(*(np.arange(d, dtype=np.min_scalar_type(d - 1)) for d in expand_shapes)) + dt = np.result_type(*(np.min_scalar_type(l - 1) for l in broadcast_shape)) + + false_dim = 0 + dim = 0 + + expanded_coords = np.empty((len(broadcast_shape), all_idx.shape[1]), dtype=dt) + expanded_data = data[all_idx[first_dim]] + + for d, p, l in zip(range(len(broadcast_shape)), params, broadcast_shape): + if p: + expanded_coords[d] = coords[dim, all_idx[first_dim]] + else: + expanded_coords[d] = all_idx[false_dim + (d > first_dim)] + false_dim += 1 + + if p is not None: + dim += 1 + + return np.asarray(expanded_coords), np.asarray(expanded_data) + + +# (c) senderle +# Taken from https://stackoverflow.com/a/11146645/774273 +# License: https://creativecommons.org/licenses/by-sa/3.0/ +def _cartesian_product(*arrays): + """ + Get the cartesian product of a number of arrays. + + Parameters + ---------- + arrays : Tuple[np.ndarray] + The arrays to get a cartesian product of. Always sorted with respect + to the original array. + + Returns + ------- + out : np.ndarray + The overall cartesian product of all the input arrays. + """ + broadcastable = np.ix_(*arrays) + broadcasted = np.broadcast_arrays(*broadcastable) + rows, cols = np.prod(broadcasted[0].shape), len(broadcasted) + dtype = np.result_type(*arrays) + out = np.empty(rows * cols, dtype=dtype) + start, end = 0, rows + for a in broadcasted: + out[start:end] = a.reshape(-1) + start, end = end, end + rows + return out.reshape(cols, rows) + + +def _elemwise_unary(func, self, *args, **kwargs): + from .core import COO + + check = kwargs.pop('check', True) + data_zero = _zero_of_dtype(self.dtype) + zero_func = func(data_zero, *args, **kwargs) + func_zero = _zero_of_dtype(zero_func.dtype) + if check and zero_func != func_zero: + raise ValueError("Performing this operation would produce " + "a dense result: %s" % str(func)) + + data_func = func(self.data, *args, **kwargs) + nonzero = data_func != func_zero + + return COO(self.coords[:, nonzero], data_func[nonzero], + shape=self.shape, + has_duplicates=self.has_duplicates, + sorted=self.sorted) + + +def _get_matching_coords(coords, params, shape): + """ + Get the matching coords across a number of broadcast operands. + + Parameters + ---------- + coords : list[numpy.ndarray] + The input coordinates. + params : list[Union[bool, none]] + The broadcast parameters. + Returns + ------- + numpy.ndarray + The broacasted coordinates + """ + matching_coords = [] + dims = np.zeros(len(coords), dtype=np.uint8) + + for p_all in zip(*params): + for i, p in enumerate(p_all): + if p: + matching_coords.append(coords[i][dims[i]]) + break + else: + matching_coords.append(coords[dims[0]]) + + for i, p in enumerate(p_all): + if p is not None: + dims[i] += 1 + + dtype = np.min_scalar_type(max(shape) - 1) + + return np.asarray(matching_coords, dtype=dtype) + + +def broadcast_to(x, shape): + """ + Performs the equivalent of :obj:`numpy.broadcast_to` for :obj:`COO`. Note that + this function returns a new array instead of a view. + + Parameters + ---------- + shape : tuple[int] + The shape to broadcast the data to. + + Returns + ------- + COO + The broadcasted sparse array. + + Raises + ------ + ValueError + If the operand cannot be broadcast to the given shape. + + See also + -------- + :obj:`numpy.broadcast_to` : NumPy equivalent function + """ + from .core import COO + + if shape == x.shape: + return x + + result_shape = _get_broadcast_shape(x.shape, shape, is_result=True) + params = _get_broadcast_parameters(x.shape, result_shape) + coords, data = _get_expanded_coords_data(x.coords, x.data, params, result_shape) + + return COO(coords, data, shape=result_shape, has_duplicates=x.has_duplicates, + sorted=x.sorted)
Reorganize coo.py? `coo.py` is getting large and unweildly. I sometimes spend a lot of time scrolling through it or finding stuff in it. I was thinking of breaking it up into smaller chunks for better maintainability. I propose the following: - A `coo` directory. - `coo/coo.py` (for the `COO` class) - Move large implementations out of the `COO` class into methods, one for `elemwise`, one for `reduction`, one for `__getitem__`, and one for other methods. - Similarly for `DOK`. I love any proposed changes to this structure.
pydata/sparse
diff --git a/sparse/tests/test_coo.py b/sparse/tests/test_coo.py index 8c4a767..d903683 100644 --- a/sparse/tests/test_coo.py +++ b/sparse/tests/test_coo.py @@ -441,7 +441,7 @@ def test_trinary_broadcasting_pathological(shapes, func, value, fraction): def test_sparse_broadcasting(monkeypatch): - orig_unmatch_coo = sparse.coo._unmatch_coo + orig_unmatch_coo = sparse.coo.umath._unmatch_coo state = {'num_matches': 0} @@ -453,7 +453,7 @@ def test_sparse_broadcasting(monkeypatch): state['num_matches'] += len(result[0]) return result - monkeypatch.setattr(sparse.coo, '_unmatch_coo', mock_unmatch_coo) + monkeypatch.setattr(sparse.coo.umath, '_unmatch_coo', mock_unmatch_coo) xs * ys @@ -462,7 +462,7 @@ def test_sparse_broadcasting(monkeypatch): def test_dense_broadcasting(monkeypatch): - orig_unmatch_coo = sparse.coo._unmatch_coo + orig_unmatch_coo = sparse.coo.umath._unmatch_coo state = {'num_matches': 0} @@ -474,7 +474,7 @@ def test_dense_broadcasting(monkeypatch): state['num_matches'] += len(result[0]) return result - monkeypatch.setattr(sparse.coo, '_unmatch_coo', mock_unmatch_coo) + monkeypatch.setattr(sparse.coo.umath, '_unmatch_coo', mock_unmatch_coo) xs + ys
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files", "has_removed_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 6 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[all]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-flake8", "pytest-cov" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 distlib==0.3.9 docutils==0.17.1 filelock==3.4.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 numpy==1.19.5 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 pockets==0.9.1 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-flake8==1.1.1 pytz==2025.2 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 -e git+https://github.com/pydata/sparse.git@217ca234309682fe1cb0c73aa6aa68cf444c1b6a#egg=sparse Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 zipp==3.6.0
name: sparse channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - distlib==0.3.9 - docutils==0.17.1 - filelock==3.4.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - numpy==1.19.5 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-flake8==1.1.1 - pytz==2025.2 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - zipp==3.6.0 prefix: /opt/conda/envs/sparse
[ "sparse/coo/core.py::sparse.coo.core.COO", "sparse/coo/core.py::sparse.coo.core.COO.T", "sparse/coo/core.py::sparse.coo.core.COO.__len__", "sparse/coo/core.py::sparse.coo.core.COO.dot", "sparse/coo/core.py::sparse.coo.core.COO.dtype", "sparse/coo/core.py::sparse.coo.core.COO.from_numpy", "sparse/coo/core.py::sparse.coo.core.COO.from_scipy_sparse", "sparse/coo/core.py::sparse.coo.core.COO.linear_loc", "sparse/coo/core.py::sparse.coo.core.COO.max", "sparse/coo/core.py::sparse.coo.core.COO.maybe_densify", "sparse/coo/core.py::sparse.coo.core.COO.min", "sparse/coo/core.py::sparse.coo.core.COO.nbytes", "sparse/coo/core.py::sparse.coo.core.COO.nnz", "sparse/coo/core.py::sparse.coo.core.COO.prod", "sparse/coo/core.py::sparse.coo.core.COO.reduce", "sparse/coo/core.py::sparse.coo.core.COO.reshape", "sparse/coo/core.py::sparse.coo.core.COO.sort_indices", "sparse/coo/core.py::sparse.coo.core.COO.sum", "sparse/coo/core.py::sparse.coo.core.COO.sum_duplicates", "sparse/coo/core.py::sparse.coo.core.COO.todense", "sparse/coo/core.py::sparse.coo.core.COO.transpose", "sparse/tests/test_coo.py::test_sparse_broadcasting", "sparse/tests/test_coo.py::test_dense_broadcasting", "sparse/tests/test_dok.py::test_setitem[shape0-index0-0.1995501122045349]", "sparse/tests/test_dok.py::test_setitem[shape1-index1-0.529689488271047]", "sparse/tests/test_dok.py::test_setitem[shape3-1-0.31064007628728896]", "sparse/tests/test_dok.py::test_setitem[shape4-index4-0.3563093922627718]", "sparse/tests/test_dok.py::test_setitem[shape5-index5-0.8073559024979022]", "sparse/tests/test_dok.py::test_setitem[shape9-index9-0.4950109236013853]", "sparse/tests/test_dok.py::test_setitem[shape11-index11-0.1117528158771176]", "sparse/tests/test_dok.py::test_setitem[shape13-index13-0.9448236788746877]" ]
[ "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func2]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func3]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func4]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func5]" ]
[ "sparse/dok.py::sparse.dok.DOK", "sparse/dok.py::sparse.dok.DOK.from_coo", "sparse/dok.py::sparse.dok.DOK.from_numpy", "sparse/dok.py::sparse.dok.DOK.nnz", "sparse/dok.py::sparse.dok.DOK.to_coo", "sparse/dok.py::sparse.dok.DOK.todense", "sparse/slicing.py::sparse.slicing.check_index", "sparse/slicing.py::sparse.slicing.normalize_index", "sparse/slicing.py::sparse.slicing.normalize_slice", "sparse/slicing.py::sparse.slicing.posify_index", "sparse/slicing.py::sparse.slicing.replace_ellipsis", "sparse/slicing.py::sparse.slicing.sanitize_index", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.density", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.ndim", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.nnz", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.size", "sparse/utils.py::sparse.utils.random", "sparse/tests/test_coo.py::test_reductions[True-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True--3-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True--3-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True--3-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True--3-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True--3-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis6-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis6-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis6-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis6-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis6-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False--3-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False--3-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False--3-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False--3-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False--3-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis6-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis6-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis6-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis6-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis6-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[amax-kwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[sum-kwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[prod-kwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs5]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[None-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[None-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[0-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[0-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[1-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[1-nanmin]", "sparse/tests/test_coo.py::test_transpose[None]", "sparse/tests/test_coo.py::test_transpose[axis1]", "sparse/tests/test_coo.py::test_transpose[axis2]", "sparse/tests/test_coo.py::test_transpose[axis3]", "sparse/tests/test_coo.py::test_transpose[axis4]", "sparse/tests/test_coo.py::test_transpose[axis5]", "sparse/tests/test_coo.py::test_transpose[axis6]", "sparse/tests/test_coo.py::test_transpose_error[axis0]", "sparse/tests/test_coo.py::test_transpose_error[axis1]", "sparse/tests/test_coo.py::test_transpose_error[axis2]", "sparse/tests/test_coo.py::test_transpose_error[axis3]", "sparse/tests/test_coo.py::test_transpose_error[axis4]", "sparse/tests/test_coo.py::test_transpose_error[axis5]", "sparse/tests/test_coo.py::test_reshape[a0-b0]", "sparse/tests/test_coo.py::test_reshape[a1-b1]", "sparse/tests/test_coo.py::test_reshape[a2-b2]", "sparse/tests/test_coo.py::test_reshape[a3-b3]", "sparse/tests/test_coo.py::test_reshape[a4-b4]", "sparse/tests/test_coo.py::test_reshape[a5-b5]", "sparse/tests/test_coo.py::test_reshape[a6-b6]", "sparse/tests/test_coo.py::test_reshape[a7-b7]", "sparse/tests/test_coo.py::test_reshape[a8-b8]", "sparse/tests/test_coo.py::test_reshape[a9-b9]", "sparse/tests/test_coo.py::test_large_reshape", "sparse/tests/test_coo.py::test_reshape_same", "sparse/tests/test_coo.py::test_to_scipy_sparse", "sparse/tests/test_coo.py::test_tensordot[a_shape0-b_shape0-axes0]", "sparse/tests/test_coo.py::test_tensordot[a_shape1-b_shape1-axes1]", "sparse/tests/test_coo.py::test_tensordot[a_shape2-b_shape2-axes2]", "sparse/tests/test_coo.py::test_tensordot[a_shape3-b_shape3-axes3]", "sparse/tests/test_coo.py::test_tensordot[a_shape4-b_shape4-axes4]", "sparse/tests/test_coo.py::test_tensordot[a_shape5-b_shape5-axes5]", "sparse/tests/test_coo.py::test_tensordot[a_shape6-b_shape6-axes6]", "sparse/tests/test_coo.py::test_tensordot[a_shape7-b_shape7-axes7]", "sparse/tests/test_coo.py::test_tensordot[a_shape8-b_shape8-axes8]", "sparse/tests/test_coo.py::test_tensordot[a_shape9-b_shape9-0]", "sparse/tests/test_coo.py::test_dot[a_shape0-b_shape0]", "sparse/tests/test_coo.py::test_dot[a_shape1-b_shape1]", "sparse/tests/test_coo.py::test_dot[a_shape2-b_shape2]", "sparse/tests/test_coo.py::test_dot[a_shape3-b_shape3]", "sparse/tests/test_coo.py::test_dot[a_shape4-b_shape4]", "sparse/tests/test_coo.py::test_elemwise[expm1]", "sparse/tests/test_coo.py::test_elemwise[log1p]", "sparse/tests/test_coo.py::test_elemwise[sin]", "sparse/tests/test_coo.py::test_elemwise[tan]", "sparse/tests/test_coo.py::test_elemwise[sinh]", "sparse/tests/test_coo.py::test_elemwise[tanh]", "sparse/tests/test_coo.py::test_elemwise[floor]", "sparse/tests/test_coo.py::test_elemwise[ceil]", "sparse/tests/test_coo.py::test_elemwise[sqrt]", "sparse/tests/test_coo.py::test_elemwise[conjugate0]", "sparse/tests/test_coo.py::test_elemwise[round_]", "sparse/tests/test_coo.py::test_elemwise[rint]", "sparse/tests/test_coo.py::test_elemwise[<lambda>0]", "sparse/tests/test_coo.py::test_elemwise[conjugate1]", "sparse/tests/test_coo.py::test_elemwise[conjugate2]", "sparse/tests/test_coo.py::test_elemwise[<lambda>1]", "sparse/tests/test_coo.py::test_elemwise[abs]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-ne]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>3]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape10-shape20-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape11-shape21-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape12-shape22-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape13-shape23-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape14-shape24-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape15-shape25-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape16-shape26-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape16-shape26-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape17-shape27-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape17-shape27-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape18-shape28-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape18-shape28-mul]", "sparse/tests/test_coo.py::test_broadcast_to[shape10-shape20]", "sparse/tests/test_coo.py::test_broadcast_to[shape11-shape21]", "sparse/tests/test_coo.py::test_broadcast_to[shape12-shape22]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_sparsearray_elemwise[coo]", "sparse/tests/test_coo.py::test_sparsearray_elemwise[dok]", "sparse/tests/test_coo.py::test_ndarray_densification_fails", "sparse/tests/test_coo.py::test_elemwise_noargs", "sparse/tests/test_coo.py::test_auto_densification_fails[pow]", "sparse/tests/test_coo.py::test_auto_densification_fails[truediv]", "sparse/tests/test_coo.py::test_auto_densification_fails[floordiv]", "sparse/tests/test_coo.py::test_auto_densification_fails[ge]", "sparse/tests/test_coo.py::test_auto_densification_fails[le]", "sparse/tests/test_coo.py::test_auto_densification_fails[eq]", "sparse/tests/test_coo.py::test_auto_densification_fails[mod]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mod-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mod-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[add-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[sub--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[pow--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[truediv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[floordiv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[gt--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[lt-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ne-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ge--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[le-3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[eq-0]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-xor]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape3-and_]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-rshift]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape0-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape1-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape2-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape3-invert]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-xor]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-xor]", "sparse/tests/test_coo.py::test_elemwise_binary_empty", "sparse/tests/test_coo.py::test_gt", "sparse/tests/test_coo.py::test_slicing[0]", "sparse/tests/test_coo.py::test_slicing[1]", "sparse/tests/test_coo.py::test_slicing[-1]", "sparse/tests/test_coo.py::test_slicing[index3]", "sparse/tests/test_coo.py::test_slicing[index4]", "sparse/tests/test_coo.py::test_slicing[index5]", "sparse/tests/test_coo.py::test_slicing[index6]", "sparse/tests/test_coo.py::test_slicing[index7]", "sparse/tests/test_coo.py::test_slicing[index8]", "sparse/tests/test_coo.py::test_slicing[index9]", "sparse/tests/test_coo.py::test_slicing[index10]", "sparse/tests/test_coo.py::test_slicing[index11]", "sparse/tests/test_coo.py::test_slicing[index12]", "sparse/tests/test_coo.py::test_slicing[index13]", "sparse/tests/test_coo.py::test_slicing[index14]", "sparse/tests/test_coo.py::test_slicing[index15]", "sparse/tests/test_coo.py::test_slicing[index16]", "sparse/tests/test_coo.py::test_slicing[index17]", "sparse/tests/test_coo.py::test_slicing[index18]", "sparse/tests/test_coo.py::test_slicing[index19]", "sparse/tests/test_coo.py::test_slicing[index20]", "sparse/tests/test_coo.py::test_slicing[index21]", "sparse/tests/test_coo.py::test_slicing[index22]", "sparse/tests/test_coo.py::test_slicing[index23]", "sparse/tests/test_coo.py::test_slicing[index24]", "sparse/tests/test_coo.py::test_slicing[index25]", "sparse/tests/test_coo.py::test_slicing[index26]", "sparse/tests/test_coo.py::test_slicing[index27]", "sparse/tests/test_coo.py::test_slicing[index28]", "sparse/tests/test_coo.py::test_slicing[index29]", "sparse/tests/test_coo.py::test_slicing[index30]", "sparse/tests/test_coo.py::test_slicing[index31]", "sparse/tests/test_coo.py::test_slicing[index32]", "sparse/tests/test_coo.py::test_slicing[index33]", "sparse/tests/test_coo.py::test_slicing[index34]", "sparse/tests/test_coo.py::test_slicing[index35]", "sparse/tests/test_coo.py::test_slicing[index36]", "sparse/tests/test_coo.py::test_slicing[index37]", "sparse/tests/test_coo.py::test_slicing[index38]", "sparse/tests/test_coo.py::test_slicing[index39]", "sparse/tests/test_coo.py::test_slicing[index40]", "sparse/tests/test_coo.py::test_slicing[index41]", "sparse/tests/test_coo.py::test_slicing[index42]", "sparse/tests/test_coo.py::test_slicing[index43]", "sparse/tests/test_coo.py::test_slicing[index44]", "sparse/tests/test_coo.py::test_custom_dtype_slicing", "sparse/tests/test_coo.py::test_slicing_errors[index0]", "sparse/tests/test_coo.py::test_slicing_errors[index1]", "sparse/tests/test_coo.py::test_slicing_errors[index2]", "sparse/tests/test_coo.py::test_slicing_errors[5]", "sparse/tests/test_coo.py::test_slicing_errors[-5]", "sparse/tests/test_coo.py::test_slicing_errors[foo]", "sparse/tests/test_coo.py::test_slicing_errors[index6]", "sparse/tests/test_coo.py::test_slicing_errors[0.5]", "sparse/tests/test_coo.py::test_slicing_errors[index8]", "sparse/tests/test_coo.py::test_slicing_errors[index9]", "sparse/tests/test_coo.py::test_canonical", "sparse/tests/test_coo.py::test_concatenate", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-1]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-1]", "sparse/tests/test_coo.py::test_stack[0-shape0]", "sparse/tests/test_coo.py::test_stack[0-shape1]", "sparse/tests/test_coo.py::test_stack[0-shape2]", "sparse/tests/test_coo.py::test_stack[1-shape0]", "sparse/tests/test_coo.py::test_stack[1-shape1]", "sparse/tests/test_coo.py::test_stack[1-shape2]", "sparse/tests/test_coo.py::test_stack[-1-shape0]", "sparse/tests/test_coo.py::test_stack[-1-shape1]", "sparse/tests/test_coo.py::test_stack[-1-shape2]", "sparse/tests/test_coo.py::test_large_concat_stack", "sparse/tests/test_coo.py::test_coord_dtype", "sparse/tests/test_coo.py::test_addition", "sparse/tests/test_coo.py::test_addition_not_ok_when_large_and_sparse", "sparse/tests/test_coo.py::test_scalar_multiplication[2]", "sparse/tests/test_coo.py::test_scalar_multiplication[2.5]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar2]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar3]", "sparse/tests/test_coo.py::test_scalar_exponentiation", "sparse/tests/test_coo.py::test_create_with_lists_of_tuples", "sparse/tests/test_coo.py::test_sizeof", "sparse/tests/test_coo.py::test_scipy_sparse_interface", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[coo]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csr]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[dok]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csc]", "sparse/tests/test_coo.py::test_op_scipy_sparse[mul]", "sparse/tests/test_coo.py::test_op_scipy_sparse[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse[sub]", "sparse/tests/test_coo.py::test_op_scipy_sparse[gt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[lt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[ne]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[sub]", "sparse/tests/test_coo.py::test_cache_csr", "sparse/tests/test_coo.py::test_empty_shape", "sparse/tests/test_coo.py::test_single_dimension", "sparse/tests/test_coo.py::test_raise_dense", "sparse/tests/test_coo.py::test_large_sum", "sparse/tests/test_coo.py::test_add_many_sparse_arrays", "sparse/tests/test_coo.py::test_caching", "sparse/tests/test_coo.py::test_scalar_slicing", "sparse/tests/test_coo.py::test_triul[shape0-0]", "sparse/tests/test_coo.py::test_triul[shape1-1]", "sparse/tests/test_coo.py::test_triul[shape2--1]", "sparse/tests/test_coo.py::test_triul[shape3--2]", "sparse/tests/test_coo.py::test_triul[shape4-1000]", "sparse/tests/test_coo.py::test_empty_reduction", "sparse/tests/test_coo.py::test_random_shape[0.1-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape2]", "sparse/tests/test_coo.py::test_two_random_unequal", "sparse/tests/test_coo.py::test_two_random_same_seed", "sparse/tests/test_coo.py::test_random_sorted", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_scalar_shape_construction", "sparse/tests/test_coo.py::test_len", "sparse/tests/test_coo.py::test_density", "sparse/tests/test_coo.py::test_size", "sparse/tests/test_coo.py::test_np_array", "sparse/tests/test_coo.py::test_three_arg_where[shapes0]", "sparse/tests/test_coo.py::test_three_arg_where[shapes1]", "sparse/tests/test_coo.py::test_three_arg_where[shapes2]", "sparse/tests/test_coo.py::test_three_arg_where[shapes3]", "sparse/tests/test_coo.py::test_three_arg_where[shapes4]", "sparse/tests/test_coo.py::test_three_arg_where[shapes5]", "sparse/tests/test_coo.py::test_three_arg_where[shapes6]", "sparse/tests/test_coo.py::test_three_arg_where[shapes7]", "sparse/tests/test_coo.py::test_one_arg_where", "sparse/tests/test_coo.py::test_one_arg_where_dense", "sparse/tests/test_coo.py::test_two_arg_where", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape2]", "sparse/tests/test_dok.py::test_convert_to_coo", "sparse/tests/test_dok.py::test_convert_from_coo", "sparse/tests/test_dok.py::test_convert_from_numpy", "sparse/tests/test_dok.py::test_convert_to_numpy", "sparse/tests/test_dok.py::test_construct[2-data0]", "sparse/tests/test_dok.py::test_construct[shape1-data1]", "sparse/tests/test_dok.py::test_construct[shape2-data2]", "sparse/tests/test_dok.py::test_getitem[0.1-shape0]", "sparse/tests/test_dok.py::test_getitem[0.1-shape1]", "sparse/tests/test_dok.py::test_getitem[0.1-shape2]", "sparse/tests/test_dok.py::test_getitem[0.3-shape0]", "sparse/tests/test_dok.py::test_getitem[0.3-shape1]", "sparse/tests/test_dok.py::test_getitem[0.3-shape2]", "sparse/tests/test_dok.py::test_getitem[0.5-shape0]", "sparse/tests/test_dok.py::test_getitem[0.5-shape1]", "sparse/tests/test_dok.py::test_getitem[0.5-shape2]", "sparse/tests/test_dok.py::test_getitem[0.7-shape0]", "sparse/tests/test_dok.py::test_getitem[0.7-shape1]", "sparse/tests/test_dok.py::test_getitem[0.7-shape2]", "sparse/tests/test_dok.py::test_setitem[shape2-index2-value2]", "sparse/tests/test_dok.py::test_setitem[shape6-index6-value6]", "sparse/tests/test_dok.py::test_setitem[shape7-index7-value7]", "sparse/tests/test_dok.py::test_setitem[shape8-index8-value8]", "sparse/tests/test_dok.py::test_setitem[shape10-index10-value10]", "sparse/tests/test_dok.py::test_setitem[shape12-index12-value12]", "sparse/tests/test_dok.py::test_default_dtype", "sparse/tests/test_dok.py::test_int_dtype", "sparse/tests/test_dok.py::test_float_dtype", "sparse/tests/test_dok.py::test_set_zero" ]
[]
BSD 3-Clause "New" or "Revised" License
2,238
[ "sparse/coo/common.py", "docs/generated/sparse.nanreduce.rst", "sparse/coo.py", "docs/generated/sparse.COO.rst", "docs/generated/sparse.COO.nanreduce.rst", "sparse/coo/__init__.py", "sparse/coo/umath.py", ".travis.yml", "docs/generated/sparse.rst", "sparse/compatibility.py", "sparse/__init__.py" ]
[ "sparse/coo/common.py", "docs/generated/sparse.nanreduce.rst", "docs/generated/sparse.COO.rst", "docs/generated/sparse.COO.nanreduce.rst", "sparse/coo/__init__.py", "sparse/coo/umath.py", ".travis.yml", "docs/generated/sparse.rst", "sparse/compatibility.py", "sparse/coo/core.py", "sparse/__init__.py" ]
pydata__sparse-118
97d5690116f499f43e79c3debfe7d7881c8b10ba
2018-03-02 10:39:36
b03b6b9a480a10a3cf59d7994292b9c5d3015cd5
codecov-io: # [Codecov](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=h1) Report > Merging [#118](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=desc) into [master](https://codecov.io/gh/pydata/sparse/commit/97d5690116f499f43e79c3debfe7d7881c8b10ba?src=pr&el=desc) will **increase** coverage by `<.01%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/pydata/sparse/pull/118/graphs/tree.svg?height=150&width=650&token=H212u0Uxxw&src=pr)](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #118 +/- ## ========================================== + Coverage 96.04% 96.04% +<.01% ========================================== Files 11 11 Lines 1896 1897 +1 ========================================== + Hits 1821 1822 +1 Misses 75 75 ``` | Flag | Coverage Δ | | |---|---|---| | #python27 | `95.15% <100%> (ø)` | :arrow_up: | | #python36 | `96.04% <100%> (ø)` | :arrow_up: | | [Impacted Files](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [sparse/coo.py](https://codecov.io/gh/pydata/sparse/pull/118/diff?src=pr&el=tree#diff-c3BhcnNlL2Nvby5weQ==) | `95.05% <100%> (ø)` | :arrow_up: | | [sparse/tests/test\_coo.py](https://codecov.io/gh/pydata/sparse/pull/118/diff?src=pr&el=tree#diff-c3BhcnNlL3Rlc3RzL3Rlc3RfY29vLnB5) | `99.83% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=footer). Last update [97d5690...2e470ae](https://codecov.io/gh/pydata/sparse/pull/118?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/sparse/coo.py b/sparse/coo.py index c305802..5100167 100644 --- a/sparse/coo.py +++ b/sparse/coo.py @@ -712,13 +712,15 @@ class COO(SparseArray, NDArrayOperatorsMixin): if not isinstance(axis, tuple): axis = (axis,) + axis = tuple(a if a >= 0 else a + self.ndim for a in axis) + if set(axis) == set(range(self.ndim)): result = method.reduce(self.data, **kwargs) if self.nnz != self.size: result = method(result, _zero_of_dtype(self.dtype)[()], **kwargs) else: axis = tuple(axis) - neg_axis = tuple(ax for ax in range(self.ndim) if ax not in axis) + neg_axis = tuple(ax for ax in range(self.ndim) if ax not in set(axis)) a = self.transpose(neg_axis + axis) a = a.reshape((np.prod([self.shape[d] for d in neg_axis]),
Reduce operations raise error for negative axis values Reduce operations like `.sum()`, `.prod()` etc. raise exceptions when you pass a negative `axis` value: ```python import sparse sparse.random((40, 50)).sum(0) # works sparse.random((40, 50)).sum(1) # works sparse.random((40, 50)).sum(-1) # fails, should be equivalent to .sum(1) ``` The traceback I get: ``` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "proj_dir/sparse/sparse/coo.py", line 1038, in prod return self.reduce(np.multiply, axis=axis, keepdims=keepdims, dtype=dtype) File "proj_dir/sparse/sparse/coo.py", line 771, in reduce a = self.transpose(neg_axis + axis) File "proj_dir/sparse/sparse/coo.py", line 1099, in transpose raise ValueError("repeated axis in transpose") ValueError: repeated axis in transpose ```
pydata/sparse
diff --git a/sparse/tests/test_coo.py b/sparse/tests/test_coo.py index f03742b..8c4a767 100644 --- a/sparse/tests/test_coo.py +++ b/sparse/tests/test_coo.py @@ -19,7 +19,7 @@ from sparse.utils import assert_eq, is_lexsorted, random_value_array ('prod', {}, {}), ('min', {}, {}), ]) [email protected]('axis', [None, 0, 1, 2, (0, 2)]) [email protected]('axis', [None, 0, 1, 2, (0, 2), -3, (1, -1)]) @pytest.mark.parametrize('keepdims', [True, False]) def test_reductions(reduction, axis, keepdims, kwargs, eqkwargs): x = sparse.random((2, 3, 4), density=.25)
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[all]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-flake8" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 distlib==0.3.9 docutils==0.17.1 filelock==3.4.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 numpy==1.19.5 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 pockets==0.9.1 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-flake8==1.1.1 pytz==2025.2 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 -e git+https://github.com/pydata/sparse.git@97d5690116f499f43e79c3debfe7d7881c8b10ba#egg=sparse Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 zipp==3.6.0
name: sparse channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - distlib==0.3.9 - docutils==0.17.1 - filelock==3.4.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - numpy==1.19.5 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-flake8==1.1.1 - pytz==2025.2 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - zipp==3.6.0 prefix: /opt/conda/envs/sparse
[ "sparse/tests/test_coo.py::test_reductions[True--3-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True--3-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True--3-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True--3-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True--3-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis6-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis6-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis6-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis6-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis6-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False--3-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False--3-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False--3-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False--3-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False--3-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis6-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis6-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis6-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis6-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis6-min-kwargs4-eqkwargs4]", "sparse/tests/test_dok.py::test_setitem[shape0-index0-0.7310079324091565]", "sparse/tests/test_dok.py::test_setitem[shape1-index1-0.9397650048613811]", "sparse/tests/test_dok.py::test_setitem[shape3-1-0.913702328682973]", "sparse/tests/test_dok.py::test_setitem[shape4-index4-0.41929456607776927]", "sparse/tests/test_dok.py::test_setitem[shape5-index5-0.14437558963401265]", "sparse/tests/test_dok.py::test_setitem[shape9-index9-0.009333677970012055]", "sparse/tests/test_dok.py::test_setitem[shape11-index11-0.0986223103134295]", "sparse/tests/test_dok.py::test_setitem[shape13-index13-0.8151000944026473]" ]
[ "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func2]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func3]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func4]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func5]" ]
[ "sparse/coo.py::sparse.coo.COO", "sparse/coo.py::sparse.coo.COO.T", "sparse/coo.py::sparse.coo.COO.__len__", "sparse/coo.py::sparse.coo.COO.dot", "sparse/coo.py::sparse.coo.COO.dtype", "sparse/coo.py::sparse.coo.COO.from_numpy", "sparse/coo.py::sparse.coo.COO.from_scipy_sparse", "sparse/coo.py::sparse.coo.COO.linear_loc", "sparse/coo.py::sparse.coo.COO.max", "sparse/coo.py::sparse.coo.COO.maybe_densify", "sparse/coo.py::sparse.coo.COO.min", "sparse/coo.py::sparse.coo.COO.nbytes", "sparse/coo.py::sparse.coo.COO.nnz", "sparse/coo.py::sparse.coo.COO.prod", "sparse/coo.py::sparse.coo.COO.reduce", "sparse/coo.py::sparse.coo.COO.reshape", "sparse/coo.py::sparse.coo.COO.sort_indices", "sparse/coo.py::sparse.coo.COO.sum", "sparse/coo.py::sparse.coo.COO.sum_duplicates", "sparse/coo.py::sparse.coo.COO.todense", "sparse/coo.py::sparse.coo.COO.transpose", "sparse/dok.py::sparse.dok.DOK", "sparse/dok.py::sparse.dok.DOK.from_coo", "sparse/dok.py::sparse.dok.DOK.from_numpy", "sparse/dok.py::sparse.dok.DOK.nnz", "sparse/dok.py::sparse.dok.DOK.to_coo", "sparse/dok.py::sparse.dok.DOK.todense", "sparse/slicing.py::sparse.slicing.check_index", "sparse/slicing.py::sparse.slicing.normalize_index", "sparse/slicing.py::sparse.slicing.normalize_slice", "sparse/slicing.py::sparse.slicing.posify_index", "sparse/slicing.py::sparse.slicing.replace_ellipsis", "sparse/slicing.py::sparse.slicing.sanitize_index", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.density", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.ndim", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.nnz", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.size", "sparse/utils.py::sparse.utils.random", "sparse/tests/test_coo.py::test_reductions[True-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[amax-kwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[sum-kwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[prod-kwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions_kwargs[reduce-kwargs5]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.25-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.5-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[0.75-False-1-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-None-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-0-nanmin]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nansum]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanprod]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanmax]", "sparse/tests/test_coo.py::test_nan_reductions[1.0-False-1-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[None-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[None-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[0-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[0-nanmin]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[1-nanmax]", "sparse/tests/test_coo.py::test_all_nan_reduction_warning[1-nanmin]", "sparse/tests/test_coo.py::test_transpose[None]", "sparse/tests/test_coo.py::test_transpose[axis1]", "sparse/tests/test_coo.py::test_transpose[axis2]", "sparse/tests/test_coo.py::test_transpose[axis3]", "sparse/tests/test_coo.py::test_transpose[axis4]", "sparse/tests/test_coo.py::test_transpose[axis5]", "sparse/tests/test_coo.py::test_transpose[axis6]", "sparse/tests/test_coo.py::test_transpose_error[axis0]", "sparse/tests/test_coo.py::test_transpose_error[axis1]", "sparse/tests/test_coo.py::test_transpose_error[axis2]", "sparse/tests/test_coo.py::test_transpose_error[axis3]", "sparse/tests/test_coo.py::test_transpose_error[axis4]", "sparse/tests/test_coo.py::test_transpose_error[axis5]", "sparse/tests/test_coo.py::test_reshape[a0-b0]", "sparse/tests/test_coo.py::test_reshape[a1-b1]", "sparse/tests/test_coo.py::test_reshape[a2-b2]", "sparse/tests/test_coo.py::test_reshape[a3-b3]", "sparse/tests/test_coo.py::test_reshape[a4-b4]", "sparse/tests/test_coo.py::test_reshape[a5-b5]", "sparse/tests/test_coo.py::test_reshape[a6-b6]", "sparse/tests/test_coo.py::test_reshape[a7-b7]", "sparse/tests/test_coo.py::test_reshape[a8-b8]", "sparse/tests/test_coo.py::test_reshape[a9-b9]", "sparse/tests/test_coo.py::test_large_reshape", "sparse/tests/test_coo.py::test_reshape_same", "sparse/tests/test_coo.py::test_to_scipy_sparse", "sparse/tests/test_coo.py::test_tensordot[a_shape0-b_shape0-axes0]", "sparse/tests/test_coo.py::test_tensordot[a_shape1-b_shape1-axes1]", "sparse/tests/test_coo.py::test_tensordot[a_shape2-b_shape2-axes2]", "sparse/tests/test_coo.py::test_tensordot[a_shape3-b_shape3-axes3]", "sparse/tests/test_coo.py::test_tensordot[a_shape4-b_shape4-axes4]", "sparse/tests/test_coo.py::test_tensordot[a_shape5-b_shape5-axes5]", "sparse/tests/test_coo.py::test_tensordot[a_shape6-b_shape6-axes6]", "sparse/tests/test_coo.py::test_tensordot[a_shape7-b_shape7-axes7]", "sparse/tests/test_coo.py::test_tensordot[a_shape8-b_shape8-axes8]", "sparse/tests/test_coo.py::test_tensordot[a_shape9-b_shape9-0]", "sparse/tests/test_coo.py::test_dot[a_shape0-b_shape0]", "sparse/tests/test_coo.py::test_dot[a_shape1-b_shape1]", "sparse/tests/test_coo.py::test_dot[a_shape2-b_shape2]", "sparse/tests/test_coo.py::test_dot[a_shape3-b_shape3]", "sparse/tests/test_coo.py::test_dot[a_shape4-b_shape4]", "sparse/tests/test_coo.py::test_elemwise[expm1]", "sparse/tests/test_coo.py::test_elemwise[log1p]", "sparse/tests/test_coo.py::test_elemwise[sin]", "sparse/tests/test_coo.py::test_elemwise[tan]", "sparse/tests/test_coo.py::test_elemwise[sinh]", "sparse/tests/test_coo.py::test_elemwise[tanh]", "sparse/tests/test_coo.py::test_elemwise[floor]", "sparse/tests/test_coo.py::test_elemwise[ceil]", "sparse/tests/test_coo.py::test_elemwise[sqrt]", "sparse/tests/test_coo.py::test_elemwise[conjugate0]", "sparse/tests/test_coo.py::test_elemwise[round_]", "sparse/tests/test_coo.py::test_elemwise[rint]", "sparse/tests/test_coo.py::test_elemwise[<lambda>0]", "sparse/tests/test_coo.py::test_elemwise[conjugate1]", "sparse/tests/test_coo.py::test_elemwise[conjugate2]", "sparse/tests/test_coo.py::test_elemwise[<lambda>1]", "sparse/tests/test_coo.py::test_elemwise[abs]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-ne]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape0-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape1-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape2-<lambda>3]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>0]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>1]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>2]", "sparse/tests/test_coo.py::test_elemwise_trinary[shape3-<lambda>3]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape10-shape20-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape11-shape21-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape12-shape22-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape13-shape23-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape14-shape24-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape15-shape25-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape16-shape26-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape16-shape26-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape17-shape27-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape17-shape27-mul]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape18-shape28-add]", "sparse/tests/test_coo.py::test_binary_broadcasting[shape18-shape28-mul]", "sparse/tests/test_coo.py::test_broadcast_to[shape10-shape20]", "sparse/tests/test_coo.py::test_broadcast_to[shape11-shape21]", "sparse/tests/test_coo.py::test_broadcast_to[shape12-shape22]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>0-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>1-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>2-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>3-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>4-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes0]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes1]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes2]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes3]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes4]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes5]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes6]", "sparse/tests/test_coo.py::test_trinary_broadcasting[<lambda>5-shapes7]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.25--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.5--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[0.75--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-nan-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0-inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes0-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes1-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes2-<lambda>]", "sparse/tests/test_coo.py::test_trinary_broadcasting_pathological[1.0--inf-shapes3-<lambda>]", "sparse/tests/test_coo.py::test_sparse_broadcasting", "sparse/tests/test_coo.py::test_dense_broadcasting", "sparse/tests/test_coo.py::test_sparsearray_elemwise[coo]", "sparse/tests/test_coo.py::test_sparsearray_elemwise[dok]", "sparse/tests/test_coo.py::test_ndarray_densification_fails", "sparse/tests/test_coo.py::test_elemwise_noargs", "sparse/tests/test_coo.py::test_auto_densification_fails[pow]", "sparse/tests/test_coo.py::test_auto_densification_fails[truediv]", "sparse/tests/test_coo.py::test_auto_densification_fails[floordiv]", "sparse/tests/test_coo.py::test_auto_densification_fails[ge]", "sparse/tests/test_coo.py::test_auto_densification_fails[le]", "sparse/tests/test_coo.py::test_auto_densification_fails[eq]", "sparse/tests/test_coo.py::test_auto_densification_fails[mod]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mod-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mod-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[add-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[sub--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[pow--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[truediv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[floordiv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[gt--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[lt-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ne-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ge--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[le-3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[eq-0]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-xor]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape3-and_]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-rshift]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape0-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape1-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape2-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape3-invert]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-xor]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-xor]", "sparse/tests/test_coo.py::test_elemwise_binary_empty", "sparse/tests/test_coo.py::test_gt", "sparse/tests/test_coo.py::test_slicing[0]", "sparse/tests/test_coo.py::test_slicing[1]", "sparse/tests/test_coo.py::test_slicing[-1]", "sparse/tests/test_coo.py::test_slicing[index3]", "sparse/tests/test_coo.py::test_slicing[index4]", "sparse/tests/test_coo.py::test_slicing[index5]", "sparse/tests/test_coo.py::test_slicing[index6]", "sparse/tests/test_coo.py::test_slicing[index7]", "sparse/tests/test_coo.py::test_slicing[index8]", "sparse/tests/test_coo.py::test_slicing[index9]", "sparse/tests/test_coo.py::test_slicing[index10]", "sparse/tests/test_coo.py::test_slicing[index11]", "sparse/tests/test_coo.py::test_slicing[index12]", "sparse/tests/test_coo.py::test_slicing[index13]", "sparse/tests/test_coo.py::test_slicing[index14]", "sparse/tests/test_coo.py::test_slicing[index15]", "sparse/tests/test_coo.py::test_slicing[index16]", "sparse/tests/test_coo.py::test_slicing[index17]", "sparse/tests/test_coo.py::test_slicing[index18]", "sparse/tests/test_coo.py::test_slicing[index19]", "sparse/tests/test_coo.py::test_slicing[index20]", "sparse/tests/test_coo.py::test_slicing[index21]", "sparse/tests/test_coo.py::test_slicing[index22]", "sparse/tests/test_coo.py::test_slicing[index23]", "sparse/tests/test_coo.py::test_slicing[index24]", "sparse/tests/test_coo.py::test_slicing[index25]", "sparse/tests/test_coo.py::test_slicing[index26]", "sparse/tests/test_coo.py::test_slicing[index27]", "sparse/tests/test_coo.py::test_slicing[index28]", "sparse/tests/test_coo.py::test_slicing[index29]", "sparse/tests/test_coo.py::test_slicing[index30]", "sparse/tests/test_coo.py::test_slicing[index31]", "sparse/tests/test_coo.py::test_slicing[index32]", "sparse/tests/test_coo.py::test_slicing[index33]", "sparse/tests/test_coo.py::test_slicing[index34]", "sparse/tests/test_coo.py::test_slicing[index35]", "sparse/tests/test_coo.py::test_slicing[index36]", "sparse/tests/test_coo.py::test_slicing[index37]", "sparse/tests/test_coo.py::test_slicing[index38]", "sparse/tests/test_coo.py::test_slicing[index39]", "sparse/tests/test_coo.py::test_slicing[index40]", "sparse/tests/test_coo.py::test_slicing[index41]", "sparse/tests/test_coo.py::test_slicing[index42]", "sparse/tests/test_coo.py::test_slicing[index43]", "sparse/tests/test_coo.py::test_slicing[index44]", "sparse/tests/test_coo.py::test_custom_dtype_slicing", "sparse/tests/test_coo.py::test_slicing_errors[index0]", "sparse/tests/test_coo.py::test_slicing_errors[index1]", "sparse/tests/test_coo.py::test_slicing_errors[index2]", "sparse/tests/test_coo.py::test_slicing_errors[5]", "sparse/tests/test_coo.py::test_slicing_errors[-5]", "sparse/tests/test_coo.py::test_slicing_errors[foo]", "sparse/tests/test_coo.py::test_slicing_errors[index6]", "sparse/tests/test_coo.py::test_slicing_errors[0.5]", "sparse/tests/test_coo.py::test_slicing_errors[index8]", "sparse/tests/test_coo.py::test_slicing_errors[index9]", "sparse/tests/test_coo.py::test_canonical", "sparse/tests/test_coo.py::test_concatenate", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-1]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-1]", "sparse/tests/test_coo.py::test_stack[0-shape0]", "sparse/tests/test_coo.py::test_stack[0-shape1]", "sparse/tests/test_coo.py::test_stack[0-shape2]", "sparse/tests/test_coo.py::test_stack[1-shape0]", "sparse/tests/test_coo.py::test_stack[1-shape1]", "sparse/tests/test_coo.py::test_stack[1-shape2]", "sparse/tests/test_coo.py::test_stack[-1-shape0]", "sparse/tests/test_coo.py::test_stack[-1-shape1]", "sparse/tests/test_coo.py::test_stack[-1-shape2]", "sparse/tests/test_coo.py::test_large_concat_stack", "sparse/tests/test_coo.py::test_coord_dtype", "sparse/tests/test_coo.py::test_addition", "sparse/tests/test_coo.py::test_addition_not_ok_when_large_and_sparse", "sparse/tests/test_coo.py::test_scalar_multiplication[2]", "sparse/tests/test_coo.py::test_scalar_multiplication[2.5]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar2]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar3]", "sparse/tests/test_coo.py::test_scalar_exponentiation", "sparse/tests/test_coo.py::test_create_with_lists_of_tuples", "sparse/tests/test_coo.py::test_sizeof", "sparse/tests/test_coo.py::test_scipy_sparse_interface", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[coo]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csr]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[dok]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csc]", "sparse/tests/test_coo.py::test_op_scipy_sparse[mul]", "sparse/tests/test_coo.py::test_op_scipy_sparse[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse[sub]", "sparse/tests/test_coo.py::test_op_scipy_sparse[gt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[lt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[ne]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[sub]", "sparse/tests/test_coo.py::test_cache_csr", "sparse/tests/test_coo.py::test_empty_shape", "sparse/tests/test_coo.py::test_single_dimension", "sparse/tests/test_coo.py::test_raise_dense", "sparse/tests/test_coo.py::test_large_sum", "sparse/tests/test_coo.py::test_add_many_sparse_arrays", "sparse/tests/test_coo.py::test_caching", "sparse/tests/test_coo.py::test_scalar_slicing", "sparse/tests/test_coo.py::test_triul[shape0-0]", "sparse/tests/test_coo.py::test_triul[shape1-1]", "sparse/tests/test_coo.py::test_triul[shape2--1]", "sparse/tests/test_coo.py::test_triul[shape3--2]", "sparse/tests/test_coo.py::test_triul[shape4-1000]", "sparse/tests/test_coo.py::test_empty_reduction", "sparse/tests/test_coo.py::test_random_shape[0.1-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape2]", "sparse/tests/test_coo.py::test_two_random_unequal", "sparse/tests/test_coo.py::test_two_random_same_seed", "sparse/tests/test_coo.py::test_random_sorted", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_scalar_shape_construction", "sparse/tests/test_coo.py::test_len", "sparse/tests/test_coo.py::test_density", "sparse/tests/test_coo.py::test_size", "sparse/tests/test_coo.py::test_np_array", "sparse/tests/test_coo.py::test_three_arg_where[shapes0]", "sparse/tests/test_coo.py::test_three_arg_where[shapes1]", "sparse/tests/test_coo.py::test_three_arg_where[shapes2]", "sparse/tests/test_coo.py::test_three_arg_where[shapes3]", "sparse/tests/test_coo.py::test_three_arg_where[shapes4]", "sparse/tests/test_coo.py::test_three_arg_where[shapes5]", "sparse/tests/test_coo.py::test_three_arg_where[shapes6]", "sparse/tests/test_coo.py::test_three_arg_where[shapes7]", "sparse/tests/test_coo.py::test_one_arg_where", "sparse/tests/test_coo.py::test_one_arg_where_dense", "sparse/tests/test_coo.py::test_two_arg_where", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape2]", "sparse/tests/test_dok.py::test_convert_to_coo", "sparse/tests/test_dok.py::test_convert_from_coo", "sparse/tests/test_dok.py::test_convert_from_numpy", "sparse/tests/test_dok.py::test_convert_to_numpy", "sparse/tests/test_dok.py::test_construct[2-data0]", "sparse/tests/test_dok.py::test_construct[shape1-data1]", "sparse/tests/test_dok.py::test_construct[shape2-data2]", "sparse/tests/test_dok.py::test_getitem[0.1-shape0]", "sparse/tests/test_dok.py::test_getitem[0.1-shape1]", "sparse/tests/test_dok.py::test_getitem[0.1-shape2]", "sparse/tests/test_dok.py::test_getitem[0.3-shape0]", "sparse/tests/test_dok.py::test_getitem[0.3-shape1]", "sparse/tests/test_dok.py::test_getitem[0.3-shape2]", "sparse/tests/test_dok.py::test_getitem[0.5-shape0]", "sparse/tests/test_dok.py::test_getitem[0.5-shape1]", "sparse/tests/test_dok.py::test_getitem[0.5-shape2]", "sparse/tests/test_dok.py::test_getitem[0.7-shape0]", "sparse/tests/test_dok.py::test_getitem[0.7-shape1]", "sparse/tests/test_dok.py::test_getitem[0.7-shape2]", "sparse/tests/test_dok.py::test_setitem[shape2-index2-value2]", "sparse/tests/test_dok.py::test_setitem[shape6-index6-value6]", "sparse/tests/test_dok.py::test_setitem[shape7-index7-value7]", "sparse/tests/test_dok.py::test_setitem[shape8-index8-value8]", "sparse/tests/test_dok.py::test_setitem[shape10-index10-value10]", "sparse/tests/test_dok.py::test_setitem[shape12-index12-value12]", "sparse/tests/test_dok.py::test_default_dtype", "sparse/tests/test_dok.py::test_int_dtype", "sparse/tests/test_dok.py::test_float_dtype", "sparse/tests/test_dok.py::test_set_zero" ]
[]
BSD 3-Clause "New" or "Revised" License
2,239
[ "sparse/coo.py" ]
[ "sparse/coo.py" ]
melexis__warnings-plugin-52
0c7e730a491d32ad90f258439715fb6507be37f2
2018-03-02 14:08:39
0c7e730a491d32ad90f258439715fb6507be37f2
diff --git a/README.rst b/README.rst index 139e744..e5af286 100644 --- a/README.rst +++ b/README.rst @@ -57,11 +57,16 @@ You can find more details in `Installation guide <docs/installation.rst>`_ Usage ===== -Since warnings plugin parses log messages (so far), you will need to redirect -your stderr to some text file. You can do that with shell pipes or with +Warnings plugin parses log messages as well as direct command stream. In case you +want to create log file, you will need to redirect your stderr to some text file. +You can do that with shell pipes or with command line arguments to command (if it supports outputting errors to file instead of stderr). Be aware that some commands print warnings on stdout. +Also warnings plugin log files need to be the last argument as otherwise the +arguments after that are discarded, because they are considered as command +arguments (with or without command flag). + ------------ Pipe example ------------ @@ -73,6 +78,16 @@ file. yourcommand 2>&1 | tee doc_log.txt +--------------- +Command example +--------------- + +Below is the command example for the plugin (keep in mind that parse commands are +required). + +.. code-block:: bash + + mlx-warnings --command <yourcommand> --------------- Running command @@ -104,20 +119,26 @@ The command returns (shell $? variable): - value 0 when the number of counted warnings is within the supplied minimum and maximum limits: ok, - number of counted warnings (positive) when the counter number is not within those limit. ----------------------------- +------------------------- Parse for Sphinx warnings ----------------------------- +------------------------- After you saved your Sphinx warnings to the file, you can parse it with command: .. code-block:: bash - # command line + # command line log file mlx-warnings doc_log.txt --sphinx + # command line command execution + mlx-warnings --command --sphinx <commandforsphinx> + + # explicitly as python module for log file + python3 -m mlx.warnings --sphinx doc_log.txt + python -m mlx.warnings --sphinx doc_log.txt # explicitly as python module - python3 -m mlx.warnings doc_log.txt --sphinx - python -m mlx.warnings doc_log.txt --sphinx + python3 -m mlx.warnings --command --sphinx <commandforsphinx> + python -m mlx.warnings --command --sphinx <commandforsphinx> -------------------------- @@ -129,11 +150,17 @@ command: .. code-block:: bash - # command line + # command line log file mlx-warnings doc_log.txt --doxygen + # command line command execution + mlx-warnings --command --doxygen <commandfordoxygen> + + # explicitly as python module for log file + python3 -m mlx.warnings --doxygen doc_log.txt + python -m mlx.warnings --doxygen doc_log.txt # explicitly as python module - python3 -m mlx.warnings doc_log.txt --doxygen - python -m mlx.warnings doc_log.txt --doxygen + python3 -m mlx.warnings --command --doxygen <commandfordoxygen> + python -m mlx.warnings --command --doxygen <commandfordoxygen> ------------------------ @@ -145,11 +172,17 @@ command: .. code-block:: bash - # command line + # command line log file mlx-warnings junit_output.xml --junit + # command line command execution + mlx-warnings --command --junit <commandforjunit> + + # explicitly as python module for log file + python3 -m mlx.warnings --junit junit_output.xml + python -m mlx.warnings --junit junit_output.xml # explicitly as python module - python3 -m mlx.warnings junit_output.xml --junit - python -m mlx.warnings junit_output.xml --junit + python3 -m mlx.warnings --command --junit <commandforjunit> + python -m mlx.warnings --command --junit <commandforjunit> ------------- Other options diff --git a/src/mlx/warnings.py b/src/mlx/warnings.py index 240e13b..325ad23 100644 --- a/src/mlx/warnings.py +++ b/src/mlx/warnings.py @@ -1,9 +1,13 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from __future__ import print_function + import argparse +import os import pkg_resources import re +import subprocess import sys import abc from junitparser import JUnitXml, Failure, Error @@ -13,7 +17,7 @@ from setuptools_scm import get_version DOXYGEN_WARNING_REGEX = r"(?:((?:[/.]|[A-Za-z]).+?):(-?\d+):\s*([Ww]arning|[Ee]rror)|<.+>:-?\d+(?::\s*([Ww]arning|[Ee]rror))?): (.+(?:(?!\s*(?:[Nn]otice|[Ww]arning|[Ee]rror): )[^/<\n][^:\n][^/\n].+)*)|\s*([Nn]otice|[Ww]arning|[Ee]rror): (.+)\n?" doxy_pattern = re.compile(DOXYGEN_WARNING_REGEX) -SPHINX_WARNING_REGEX = r"(.+?:(?:\d+|None)?):?\s*(DEBUG|INFO|WARNING|ERROR|SEVERE):\s*(.+)\n?" +SPHINX_WARNING_REGEX = r"(.+?:(?:\d+|None)):\s*(DEBUG|INFO|WARNING|ERROR|SEVERE):\s*(.+)\n?" sphinx_pattern = re.compile(SPHINX_WARNING_REGEX) __version__ = get_version() @@ -313,33 +317,76 @@ def warnings_wrapper(args): group.add_argument('-s', '--sphinx', dest='sphinx', action='store_true') group.add_argument('-j', '--junit', dest='junit', action='store_true') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true') + parser.add_argument('--command', dest='command', action='store_true', + help='Treat program arguments as command to execute to obtain data') parser.add_argument('-m', '--maxwarnings', type=int, required=False, default=0, help='Maximum amount of warnings accepted') parser.add_argument('--minwarnings', type=int, required=False, default=0, help='Minimum amount of warnings accepted') parser.add_argument('--version', action='version', version='%(prog)s {version}'.format(version=pkg_resources.require('mlx.warnings')[0].version)) - parser.add_argument('logfile', nargs='+', help='Logfile that might contain warnings') + parser.add_argument('logfile', nargs='+', help='Logfile (or command) that might contain warnings') + parser.add_argument('flags', nargs=argparse.REMAINDER, help='Possible not-used flags from above are considered as command flags') + args = parser.parse_args(args) warnings = WarningsPlugin(sphinx=args.sphinx, doxygen=args.doxygen, junit=args.junit, verbose=args.verbose) warnings.set_maximum(args.maxwarnings) warnings.set_minimum(args.minwarnings) + if args.command: + cmd = args.logfile + if args.flags: + cmd.extend(args.flags) + warnings_command(warnings, cmd) + else: + warnings_logfile(warnings, args.logfile) + + warnings.return_count() + return warnings.return_check_limits() + + +def warnings_command(warnings, cmd): + try: + print("Executing: ", end='') + print(cmd) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + stdin=subprocess.PIPE, bufsize=1, universal_newlines=True) + out, err = proc.communicate() + # Check stdout + if out: + try: + print(out.decode(encoding="utf-8")) + warnings.check(out.decode(encoding="utf-8")) + except AttributeError as e: + warnings.check(out) + print(out) + # Check stderr + if err: + try: + warnings.check(err.decode(encoding="utf-8")) + print(err.decode(encoding="utf-8"), file=sys.stderr) + except AttributeError as e: + warnings.check(err) + print(err, file=sys.stderr) + except OSError as e: + if e.errno == os.errno.ENOENT: + print("It seems like program " + str(cmd) + " is not installed.") + raise + + +def warnings_logfile(warnings, log): # args.logfile doesn't necessarily contain wildcards, but just to be safe, we # assume it does, and try to expand them. # This mechanism is put in place to allow wildcards to be passed on even when # executing the script on windows (in that case there is no shell expansion of wildcards) # so that the script can be used in the exact same way even when moving from one # OS to another. - for file_wildcard in args.logfile: + for file_wildcard in log: for logfile in glob.glob(file_wildcard): with open(logfile, 'r') as loghandle: warnings.check(loghandle.read()) - warnings.return_count() - return warnings.return_check_limits() - def main(): sys.exit(warnings_wrapper(sys.argv[1:])) diff --git a/tox.ini b/tox.ini index 33199f3..d369cc7 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,6 @@ deps = pytest-cov mock junitparser>=1.0.0 - setuptools_scm coverage commands = {posargs:py.test --cov=mlx --cov-report=term-missing -vv tests/} @@ -35,8 +34,9 @@ commands = python -c 'import mlx.warnings;print(mlx.warnings.__version__)' python -m mlx.warnings -h python -m mlx.warnings --version - python -m mlx.warnings -j tests/junit*.xml --maxwarnings 3 --minwarnings 3 - python -m mlx.warnings -j "tests/junit*.xml" --maxwarnings 3 --minwarnings 3 #emulate for windows (no shell expansion) + python -m mlx.warnings -j --maxwarnings 3 --minwarnings 3 tests/junit*.xml + python -m mlx.warnings -j --maxwarnings 3 --minwarnings 3 "tests/junit*.xml" #emulate for windows (no shell expansion) + python -m mlx.warnings -j --command --maxwarnings 2 --minwarnings 2 cat tests/junit_double_fail.xml [testenv:bootstrap] deps =
Parse output stream of command ``` warning-plugin <plugin-thresholds> <command> <arguments> ``` should disregard the command return value and parse stdout and stderr for warnings in plugin thresholds or in some special json formatted conf file in the root of the project. This will make it a lot more useable than generating log files to parse through plugin
melexis/warnings-plugin
diff --git a/tests/sphinx_double_warning.txt b/tests/sphinx_double_warning.txt new file mode 100644 index 0000000..54d4f42 --- /dev/null +++ b/tests/sphinx_double_warning.txt @@ -0,0 +1,3 @@ +/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation' +/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document u'installation' + diff --git a/tests/sphinx_single_warning.txt b/tests/sphinx_single_warning.txt new file mode 100644 index 0000000..3dd77fc --- /dev/null +++ b/tests/sphinx_single_warning.txt @@ -0,0 +1,2 @@ +/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation' + diff --git a/tests/test_integration.py b/tests/test_integration.py index 0a06f45..97351a5 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -5,21 +5,6 @@ from mlx.warnings import warnings_wrapper class TestIntegration(TestCase): - def test_help(self): - with self.assertRaises(SystemExit) as ex: - warnings_wrapper(['--help']) - self.assertEqual(0, ex.exception.code) - - def test_version(self): - with self.assertRaises(SystemExit) as ex: - warnings_wrapper(['--version']) - self.assertEqual(0, ex.exception.code) - - def test_no_parser_selection(self): - with self.assertRaises(SystemExit) as ex: - warnings_wrapper([]) - self.assertEqual(2, ex.exception.code) - junit_warning_cnt = 3 def test_single_argument(self): @@ -30,6 +15,26 @@ class TestIntegration(TestCase): retval = warnings_wrapper(['--junit', 'tests/junit_single_fail.xml', 'tests/junit_double_fail.xml']) self.assertEqual(1 + 2, retval) + def test_single_command_argument(self): + retval = warnings_wrapper(['--junit', '--command', 'cat', 'tests/junit_single_fail.xml']) + self.assertEqual(1, retval) + + def test_two_command_arguments(self): + retval = warnings_wrapper(['--sphinx', '--command', 'cat', 'tests/sphinx_single_warning.txt', 'tests/sphinx_double_warning.txt']) + self.assertEqual(1 + 2, retval) + + def test_command_with_its_own_arguments(self): + retval = warnings_wrapper(['--sphinx', '--command', 'cat', '-A', 'tests/sphinx_single_warning.txt', 'tests/sphinx_double_warning.txt']) + self.assertEqual(1 + 2, retval) + + def test_command_to_stderr(self): + retval = warnings_wrapper(['--sphinx', '--command', 'cat', 'tests/sphinx_single_warning.txt', '>&2']) + self.assertEqual(1, retval) + + def test_faulty_command(self): + with self.assertRaises(OSError): + warnings_wrapper(['--sphinx', '--command', 'blahahahaha', 'tests/sphinx_single_warning.txt']) + def test_wildcarded_arguments(self): # note: no shell expansion simulation (e.g. as in windows) retval = warnings_wrapper(['--junit', 'tests/junit*.xml']) diff --git a/tests/test_sphinx.py b/tests/test_sphinx.py index ac72886..7200652 100644 --- a/tests/test_sphinx.py +++ b/tests/test_sphinx.py @@ -26,18 +26,12 @@ class TestSphinxWarnings(TestCase): def test_warning_no_line_number(self): dut1 = "/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'" dut2 = "/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document u'installation'" - dut3 = "/home/bljah/test/index.rst:: WARNING: toctree contains reference to nonexisting document u'installation'" - dut4 = "/home/bljah/test/SRS.rst: WARNING: item non_existing_requirement is not defined" with patch('sys.stdout', new=StringIO()) as fake_out: self.warnings.check(dut1) self.warnings.check(dut2) - self.warnings.check(dut3) - self.warnings.check(dut4) - self.assertEqual(self.warnings.return_count(), 4) + self.assertEqual(self.warnings.return_count(), 2) self.assertRegexpMatches(fake_out.getvalue(), dut1) self.assertRegexpMatches(fake_out.getvalue(), dut2) - self.assertRegexpMatches(fake_out.getvalue(), dut3) - self.assertRegexpMatches(fake_out.getvalue(), dut4) def test_single_warning_mixed(self): dut1 = 'This1 should not be treated as warning'
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 3 }
0.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio", "pytest-bdd", "pytest-benchmark", "pytest-randomly", "responses", "mock", "hypothesis", "freezegun", "trustme", "requests-mock", "requests", "tomlkit" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 certifi==2025.1.31 cffi==1.17.1 charset-normalizer==3.4.1 coverage==7.8.0 cryptography==44.0.2 exceptiongroup==1.2.2 execnet==2.1.1 freezegun==1.5.1 gherkin-official==29.0.0 hypothesis==6.130.5 idna==3.10 importlib_metadata==8.6.1 iniconfig==2.1.0 junitparser==3.2.0 Mako==1.3.9 MarkupSafe==3.0.2 -e git+https://github.com/melexis/warnings-plugin.git@0c7e730a491d32ad90f258439715fb6507be37f2#egg=mlx.warnings mock==5.2.0 packaging==24.2 parse==1.20.2 parse_type==0.6.4 pluggy==1.5.0 py-cpuinfo==9.0.0 pycparser==2.22 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-bdd==8.1.0 pytest-benchmark==5.1.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.16.0 pytest-xdist==3.6.1 python-dateutil==2.9.0.post0 PyYAML==6.0.2 requests==2.32.3 requests-mock==1.12.1 responses==0.25.7 setuptools-scm==8.2.0 six==1.17.0 sortedcontainers==2.4.0 tomli==2.2.1 tomlkit==0.13.2 trustme==1.2.1 typing_extensions==4.13.0 urllib3==2.3.0 zipp==3.21.0
name: warnings-plugin channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - certifi==2025.1.31 - cffi==1.17.1 - charset-normalizer==3.4.1 - coverage==7.8.0 - cryptography==44.0.2 - exceptiongroup==1.2.2 - execnet==2.1.1 - freezegun==1.5.1 - gherkin-official==29.0.0 - hypothesis==6.130.5 - idna==3.10 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - junitparser==3.2.0 - mako==1.3.9 - markupsafe==3.0.2 - mock==5.2.0 - packaging==24.2 - parse==1.20.2 - parse-type==0.6.4 - pluggy==1.5.0 - py-cpuinfo==9.0.0 - pycparser==2.22 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-bdd==8.1.0 - pytest-benchmark==5.1.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-randomly==3.16.0 - pytest-xdist==3.6.1 - python-dateutil==2.9.0.post0 - pyyaml==6.0.2 - requests==2.32.3 - requests-mock==1.12.1 - responses==0.25.7 - setuptools-scm==8.2.0 - six==1.17.0 - sortedcontainers==2.4.0 - tomli==2.2.1 - tomlkit==0.13.2 - trustme==1.2.1 - typing-extensions==4.13.0 - urllib3==2.3.0 - zipp==3.21.0 prefix: /opt/conda/envs/warnings-plugin
[ "tests/test_integration.py::TestIntegration::test_command_to_stderr", "tests/test_integration.py::TestIntegration::test_two_command_arguments", "tests/test_integration.py::TestIntegration::test_command_with_its_own_arguments", "tests/test_integration.py::TestIntegration::test_single_command_argument" ]
[ "tests/test_integration.py::TestIntegration::test_faulty_command" ]
[ "tests/test_sphinx.py::TestSphinxWarnings::test_warning_no_line_number", "tests/test_sphinx.py::TestSphinxWarnings::test_no_warning", "tests/test_sphinx.py::TestSphinxWarnings::test_single_warning_mixed", "tests/test_sphinx.py::TestSphinxWarnings::test_single_warning", "tests/test_sphinx.py::TestSphinxWarnings::test_multiline", "tests/test_integration.py::TestIntegration::test_wildcarded_arguments", "tests/test_integration.py::TestIntegration::test_max_but_still_ok", "tests/test_integration.py::TestIntegration::test_single_argument", "tests/test_integration.py::TestIntegration::test_min_but_still_ok", "tests/test_integration.py::TestIntegration::test_max", "tests/test_integration.py::TestIntegration::test_min", "tests/test_integration.py::TestIntegration::test_two_arguments" ]
[]
Apache License 2.0
2,240
[ "README.rst", "src/mlx/warnings.py", "tox.ini" ]
[ "README.rst", "src/mlx/warnings.py", "tox.ini" ]
oasis-open__cti-python-stix2-133
4a9c38e0b50415f4733072fc76eb8ebd0749c84b
2018-03-02 16:37:42
4a9c38e0b50415f4733072fc76eb8ebd0749c84b
diff --git a/stix2/markings/granular_markings.py b/stix2/markings/granular_markings.py index be5d258..7c227d9 100644 --- a/stix2/markings/granular_markings.py +++ b/stix2/markings/granular_markings.py @@ -116,9 +116,9 @@ def remove_markings(obj, marking, selectors): granular_markings = utils.compress_markings(granular_markings) if granular_markings: - return new_version(obj, granular_markings=granular_markings) + return new_version(obj, granular_markings=granular_markings, allow_custom=True) else: - return new_version(obj, granular_markings=None) + return new_version(obj, granular_markings=None, allow_custom=True) def add_markings(obj, marking, selectors): @@ -152,7 +152,7 @@ def add_markings(obj, marking, selectors): granular_marking = utils.expand_markings(granular_marking) granular_marking = utils.compress_markings(granular_marking) - return new_version(obj, granular_markings=granular_marking) + return new_version(obj, granular_markings=granular_marking, allow_custom=True) def clear_markings(obj, selectors): @@ -207,9 +207,9 @@ def clear_markings(obj, selectors): granular_markings = utils.compress_markings(granular_markings) if granular_markings: - return new_version(obj, granular_markings=granular_markings) + return new_version(obj, granular_markings=granular_markings, allow_custom=True) else: - return new_version(obj, granular_markings=None) + return new_version(obj, granular_markings=None, allow_custom=True) def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=False): diff --git a/stix2/markings/object_markings.py b/stix2/markings/object_markings.py index c0375c3..a169fe3 100644 --- a/stix2/markings/object_markings.py +++ b/stix2/markings/object_markings.py @@ -37,7 +37,7 @@ def add_markings(obj, marking): object_markings = set(obj.get("object_marking_refs", []) + marking) - return new_version(obj, object_marking_refs=list(object_markings)) + return new_version(obj, object_marking_refs=list(object_markings), allow_custom=True) def remove_markings(obj, marking): @@ -69,9 +69,9 @@ def remove_markings(obj, marking): new_markings = [x for x in object_markings if x not in marking] if new_markings: - return new_version(obj, object_marking_refs=new_markings) + return new_version(obj, object_marking_refs=new_markings, allow_custom=True) else: - return new_version(obj, object_marking_refs=None) + return new_version(obj, object_marking_refs=None, allow_custom=True) def set_markings(obj, marking): @@ -103,7 +103,7 @@ def clear_markings(obj): A new version of the given SDO or SRO with object_marking_refs cleared. """ - return new_version(obj, object_marking_refs=None) + return new_version(obj, object_marking_refs=None, allow_custom=True) def is_marked(obj, marking=None): diff --git a/stix2/utils.py b/stix2/utils.py index 73337d0..37ff166 100644 --- a/stix2/utils.py +++ b/stix2/utils.py @@ -251,7 +251,7 @@ def revoke(data): if data.get("revoked"): raise RevokeError("revoke") - return new_version(data, revoked=True) + return new_version(data, revoked=True, allow_custom=True) def get_class_hierarchy_names(obj):
Calling add_markings on a existing STIX object with custom properties causes an exception Here is the stack trace: Traceback (most recent call last): ``` File "/Users/rpiazza/projects/capec-attack/capec2stix/capec2stix.py", line 197, in <module> main() File "/Users/rpiazza/projects/capec-attack/capec2stix/capec2stix.py", line 189, in main tlos = convert2stix(attack_pattern_catalog, marking_def.id, ident.id) File "/Users/rpiazza/projects/capec-attack/capec2stix/capec2stix.py", line 170, in convert2stix return convert_attack_pattern(attack_pattern_catalog.Attack_Patterns.Attack_Pattern[0], marking_def_id, ident_id) File "/Users/rpiazza/projects/capec-attack/capec2stix/capec2stix.py", line 158, in convert_attack_pattern ap.add_markings(marking_def_id) File "/Users/rpiazza/py-envs/python3.5/lib/python3.5/site-packages/stix2/markings/__init__.py", line 143, in add_markings return object_markings.add_markings(obj, marking) File "/Users/rpiazza/py-envs/python3.5/lib/python3.5/site-packages/stix2/markings/object_markings.py", line 40, in add_markings return new_version(obj, object_marking_refs=list(object_markings)) File "/Users/rpiazza/py-envs/python3.5/lib/python3.5/site-packages/stix2/utils.py", line 234, in new_version return cls(**{k: v for k, v in new_obj_inner.items() if v is not None}) File "/Users/rpiazza/py-envs/python3.5/lib/python3.5/site-packages/stix2/base.py", line 111, in __init__ raise ExtraPropertiesError(cls, extra_kwargs) stix2.exceptions.ExtraPropertiesError: Unexpected properties for AttackPattern: (x_capec_abstraction, x_capec_consequences, x_capec_example_instances, x_capec_likelihood_of_attack, x_capec_prerequisites, x_capec_skills_required, x_capec_typical_severity, x_resources_required). ```
oasis-open/cti-python-stix2
diff --git a/stix2/test/test_custom.py b/stix2/test/test_custom.py index 7c1832b..76ad61b 100644 --- a/stix2/test/test_custom.py +++ b/stix2/test/test_custom.py @@ -2,7 +2,14 @@ import pytest import stix2 -from .constants import FAKE_TIME +from .constants import FAKE_TIME, MARKING_DEFINITION_ID + +IDENTITY_CUSTOM_PROP = stix2.Identity( + name="John Smith", + identity_class="individual", + x_foo="bar", + allow_custom=True, +) def test_identity_custom_property(): @@ -82,18 +89,38 @@ def test_parse_identity_custom_property(data): def test_custom_property_in_bundled_object(): - identity = stix2.Identity( - name="John Smith", - identity_class="individual", - x_foo="bar", - allow_custom=True, - ) - bundle = stix2.Bundle(identity, allow_custom=True) + bundle = stix2.Bundle(IDENTITY_CUSTOM_PROP, allow_custom=True) assert bundle.objects[0].x_foo == "bar" assert '"x_foo": "bar"' in str(bundle) +def test_identity_custom_property_revoke(): + identity = IDENTITY_CUSTOM_PROP.revoke() + assert identity.x_foo == "bar" + + +def test_identity_custom_property_edit_markings(): + marking_obj = stix2.MarkingDefinition( + id=MARKING_DEFINITION_ID, + definition_type="statement", + definition=stix2.StatementMarking(statement="Copyright 2016, Example Corp") + ) + marking_obj2 = stix2.MarkingDefinition( + id=MARKING_DEFINITION_ID, + definition_type="statement", + definition=stix2.StatementMarking(statement="Another one") + ) + + # None of the following should throw exceptions + identity = IDENTITY_CUSTOM_PROP.add_markings(marking_obj) + identity2 = identity.add_markings(marking_obj2, ['x_foo']) + identity2.remove_markings(marking_obj.id) + identity2.remove_markings(marking_obj2.id, ['x_foo']) + identity2.clear_markings() + identity2.clear_markings('x_foo') + + def test_custom_marking_no_init_1(): @stix2.CustomMarking('x-new-obj', [ ('property1', stix2.properties.StringProperty(required=True)),
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": [], "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 antlr4-python3-runtime==4.9.3 async-generator==1.10 attrs==22.2.0 Babel==2.11.0 backcall==0.2.0 bleach==4.1.0 bump2version==1.0.1 bumpversion==0.6.0 certifi==2021.5.30 cfgv==3.3.1 charset-normalizer==2.0.12 coverage==6.2 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.9 docutils==0.18.1 entrypoints==0.4 filelock==3.4.1 identify==2.4.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nbsphinx==0.8.8 nest-asyncio==1.6.0 nodeenv==1.6.0 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 pyzmq==25.1.2 requests==2.27.1 simplejson==3.20.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-prompt==1.5.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -e git+https://github.com/oasis-open/cti-python-stix2.git@4a9c38e0b50415f4733072fc76eb8ebd0749c84b#egg=stix2 stix2-patterns==2.0.0 taxii2-client==2.3.0 testpath==0.6.0 toml==0.10.2 tomli==1.2.3 tornado==6.1 tox==3.28.0 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 wcwidth==0.2.13 webencodings==0.5.1 zipp==3.6.0
name: cti-python-stix2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - antlr4-python3-runtime==4.9.3 - async-generator==1.10 - attrs==22.2.0 - babel==2.11.0 - backcall==0.2.0 - bleach==4.1.0 - bump2version==1.0.1 - bumpversion==0.6.0 - cfgv==3.3.1 - charset-normalizer==2.0.12 - coverage==6.2 - decorator==5.1.1 - defusedxml==0.7.1 - distlib==0.3.9 - docutils==0.18.1 - entrypoints==0.4 - filelock==3.4.1 - identify==2.4.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbsphinx==0.8.8 - nest-asyncio==1.6.0 - nodeenv==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - pyzmq==25.1.2 - requests==2.27.1 - simplejson==3.20.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-prompt==1.5.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - stix2-patterns==2.0.0 - taxii2-client==2.3.0 - testpath==0.6.0 - toml==0.10.2 - tomli==1.2.3 - tornado==6.1 - tox==3.28.0 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - wcwidth==0.2.13 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/cti-python-stix2
[ "stix2/test/test_custom.py::test_identity_custom_property_revoke", "stix2/test/test_custom.py::test_identity_custom_property_edit_markings" ]
[]
[ "stix2/test/test_custom.py::test_identity_custom_property", "stix2/test/test_custom.py::test_identity_custom_property_invalid", "stix2/test/test_custom.py::test_identity_custom_property_allowed", "stix2/test/test_custom.py::test_parse_identity_custom_property[{\\n", "stix2/test/test_custom.py::test_custom_property_in_bundled_object", "stix2/test/test_custom.py::test_custom_marking_no_init_1", "stix2/test/test_custom.py::test_custom_marking_no_init_2", "stix2/test/test_custom.py::test_custom_object_raises_exception", "stix2/test/test_custom.py::test_custom_object_type", "stix2/test/test_custom.py::test_custom_object_no_init_1", "stix2/test/test_custom.py::test_custom_object_no_init_2", "stix2/test/test_custom.py::test_parse_custom_object_type", "stix2/test/test_custom.py::test_parse_unregistered_custom_object_type", "stix2/test/test_custom.py::test_custom_observable_object_1", "stix2/test/test_custom.py::test_custom_observable_object_2", "stix2/test/test_custom.py::test_custom_observable_object_3", "stix2/test/test_custom.py::test_custom_observable_raises_exception", "stix2/test/test_custom.py::test_custom_observable_object_no_init_1", "stix2/test/test_custom.py::test_custom_observable_object_no_init_2", "stix2/test/test_custom.py::test_custom_observable_object_invalid_ref_property", "stix2/test/test_custom.py::test_custom_observable_object_invalid_refs_property", "stix2/test/test_custom.py::test_custom_observable_object_invalid_refs_list_property", "stix2/test/test_custom.py::test_custom_observable_object_invalid_valid_refs", "stix2/test/test_custom.py::test_custom_no_properties_raises_exception", "stix2/test/test_custom.py::test_custom_wrong_properties_arg_raises_exception", "stix2/test/test_custom.py::test_parse_custom_observable_object", "stix2/test/test_custom.py::test_parse_unregistered_custom_observable_object", "stix2/test/test_custom.py::test_parse_invalid_custom_observable_object", "stix2/test/test_custom.py::test_observable_custom_property", "stix2/test/test_custom.py::test_observable_custom_property_invalid", "stix2/test/test_custom.py::test_observable_custom_property_allowed", "stix2/test/test_custom.py::test_observed_data_with_custom_observable_object", "stix2/test/test_custom.py::test_custom_extension_raises_exception", "stix2/test/test_custom.py::test_custom_extension", "stix2/test/test_custom.py::test_custom_extension_wrong_observable_type", "stix2/test/test_custom.py::test_custom_extension_invalid_observable", "stix2/test/test_custom.py::test_custom_extension_no_properties", "stix2/test/test_custom.py::test_custom_extension_empty_properties", "stix2/test/test_custom.py::test_custom_extension_no_init_1", "stix2/test/test_custom.py::test_custom_extension_no_init_2", "stix2/test/test_custom.py::test_parse_observable_with_custom_extension", "stix2/test/test_custom.py::test_parse_observable_with_unregistered_custom_extension", "stix2/test/test_custom.py::test_register_custom_object" ]
[]
BSD 3-Clause "New" or "Revised" License
2,241
[ "stix2/utils.py", "stix2/markings/granular_markings.py", "stix2/markings/object_markings.py" ]
[ "stix2/utils.py", "stix2/markings/granular_markings.py", "stix2/markings/object_markings.py" ]
pydicom__pydicom-586
da6f7917ca2a32a6886e023a71b0b095f5bc06c8
2018-03-03 08:17:24
fcc63f0b96fb370b0eb60b2c765b469ce62e597c
pep8speaks: Hello @scaramallion! Thanks for submitting the PR. - In the file [`pydicom/dataset.py`](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py), following are the PEP8 issues : > [Line 32:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L32): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 32:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L32): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 32:8](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L32): [E128](https://duckduckgo.com/?q=pep8%20E128) continuation line under-indented for visual indent > [Line 39:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L39): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 723:80](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L723): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (87 > 79 characters) > [Line 750:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L750): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 750:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L750): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 751:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L751): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 753:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L753): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 754:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L754): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 755:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L755): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 755:3](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L755): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 756:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L756): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 758:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L758): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 762:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L762): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 762:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L762): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 763:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L763): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 766:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L766): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 766:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L766): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 767:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L767): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 770:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L770): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs > [Line 770:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L770): [W191](https://duckduckgo.com/?q=pep8%20W191) indentation contains tabs > [Line 770:3](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L770): [E116](https://duckduckgo.com/?q=pep8%20E116) unexpected indentation (comment) > [Line 771:1](https://github.com/pydicom/pydicom/blob/f241e8e0592818a4d4a9c728d7f67bbb03e3edcb/pydicom/dataset.py#L771): [E101](https://duckduckgo.com/?q=pep8%20E101) indentation contains mixed spaces and tabs
diff --git a/LICENSE b/LICENSE index 23ca9e731..853630cfe 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ License file for pydicom, a pure-python DICOM library -Copyright (c) 2008-2017 Darcy Mason and pydicom contributors +Copyright (c) 2008-2018 Darcy Mason and pydicom contributors Except for portions outlined below, pydicom is released under an MIT license: diff --git a/dicom.py b/dicom.py new file mode 100644 index 000000000..f4fef0c63 --- /dev/null +++ b/dicom.py @@ -0,0 +1,11 @@ +msg = """ +Pydicom via 'import dicom' has been removed in pydicom version 1.0. +Please install the `dicom` package to restore function of code relying +on pydicom 0.9.9 or earlier. E.g. `pip install dicom`. +Alternatively, most code can easily be converted to pydicom > 1.0 by +changing import lines from 'import dicom' to 'import pydicom'. +See the Transition Guide at +https://pydicom.github.io/pydicom/stable/transition_to_pydicom1.html. +""" + +raise ImportError(msg) diff --git a/pydicom/dataset.py b/pydicom/dataset.py index 1052b8587..a3f2da9d7 100644 --- a/pydicom/dataset.py +++ b/pydicom/dataset.py @@ -28,8 +28,8 @@ from pydicom.datadict import (tag_for_keyword, keyword_for_tag, repeater_has_keyword) from pydicom.tag import Tag, BaseTag, tag_in_exception from pydicom.dataelem import DataElement, DataElement_from_raw, RawDataElement -from pydicom.uid import (UncompressedPixelTransferSyntaxes, - ExplicitVRLittleEndian) +from pydicom.uid import (UncompressedPixelTransferSyntaxes, + ExplicitVRLittleEndian) import pydicom # for dcmwrite import pydicom.charset from pydicom.config import logger @@ -400,14 +400,13 @@ class Dataset(dict): return True if isinstance(other, self.__class__): - # Compare Elements using values() and class variables using - # __dict__ + # Compare Elements using values() # Convert values() to a list for compatibility between # python 2 and 3 # Sort values() by element tag self_elem = sorted(list(self.values()), key=lambda x: x.tag) other_elem = sorted(list(other.values()), key=lambda x: x.tag) - return self_elem == other_elem and self.__dict__ == other.__dict__ + return self_elem == other_elem return NotImplemented @@ -697,9 +696,9 @@ class Dataset(dict): Returns ------- - None + None Converted pixel data is stored internally in the dataset. - + If a compressed image format, the image is decompressed, and any related data elements are changed accordingly. """ @@ -720,7 +719,9 @@ class Dataset(dict): pixel_array = x.get_pixeldata(self) self._pixel_array = self._reshape_pixel_array(pixel_array) if x.needs_to_convert_to_RGB(self): - self._pixel_array = self._convert_YBR_to_RGB(self._pixel_array) + self._pixel_array = self._convert_YBR_to_RGB( + self._pixel_array + ) successfully_read_pixel_data = True break except Exception as e: @@ -743,31 +744,31 @@ class Dataset(dict): raise NotImplementedError(msg) # is this guaranteed to work if memory is re-used?? self._pixel_id = id(self.PixelData) - + def decompress(self): """Decompresses pixel data and modifies the Dataset in-place - If not a compressed tranfer syntax, then pixel data is converted - to a numpy array internally, but not returned. - - If compressed pixel data, then is decompressed using an image handler, - and internal state is updated appropriately: - - TransferSyntax is updated to non-compressed form - - is_undefined_length for pixel data is set False + If not a compressed tranfer syntax, then pixel data is converted + to a numpy array internally, but not returned. + + If compressed pixel data, then is decompressed using an image handler, + and internal state is updated appropriately: + - TransferSyntax is updated to non-compressed form + - is_undefined_length for pixel data is set False Returns ------- None - Raises + Raises ------ NotImplementedError If the pixel data was originally compressed but file is not - ExplicitVR LittleEndian as required by Dicom standard - """ + ExplicitVR LittleEndian as required by Dicom standard + """ self.convert_pixel_data() self.is_decompressed = True - # May have been undefined length pixel data, but won't be now + # May have been undefined length pixel data, but won't be now if 'PixelData' in self: self[0x7fe00010].is_undefined_length = False @@ -780,14 +781,13 @@ class Dataset(dict): # Check that current file as read does match expected if not self.is_little_endian or self.is_implicit_VR: msg = ("Current dataset does not match expected ExplicitVR " - "LittleEndian transfer syntax from a compressed " + "LittleEndian transfer syntax from a compressed " "transfer syntax") raise NotImplementedError(msg) - + # All is as expected, updated the Transfer Syntax self.file_meta.TransferSyntaxUID = ExplicitVRLittleEndian - @property def pixel_array(self): """Return the Pixel Data as a NumPy array. @@ -1044,10 +1044,12 @@ class Dataset(dict): Parameters ---------- - start : int or None - The slice's starting element tag value. - stop : int or None - The slice's stopping element tag value. + start : int or 2-tuple of int or None + The slice's starting element tag value, in any format accepted by + pydicom.tag.Tag. + stop : int or 2-tuple of int or None + The slice's stopping element tag value, in any format accepted by + pydicom.tag.Tag. step : int or None The slice's step size. @@ -1073,7 +1075,18 @@ class Dataset(dict): if stop is None: stop = all_tags[-1] + 1 - slice_tags = [tag for tag in all_tags if Tag(start) <= tag < Tag(stop)] + # Issue 92: if `stop` is None then 0xFFFFFFFF + 1 causes overflow in + # Tag. The only this occurs if the `stop` parameter value is None + # and the dataset contains an (0xFFFF, 0xFFFF) element + if stop == 0x100000000: + slice_tags = [ + tag for tag in all_tags if Tag(start) <= tag <= Tag(stop - 1) + ] + else: + slice_tags = [ + tag for tag in all_tags if Tag(start) <= tag < Tag(stop) + ] + return slice_tags[::step] def __str__(self): @@ -1233,3 +1246,29 @@ class FileDataset(Dataset): if self.filename and os.path.exists(self.filename): statinfo = os.stat(self.filename) self.timestamp = statinfo.st_mtime + + def __eq__(self, other): + """Compare `self` and `other` for equality. + + Returns + ------- + bool + The result if `self` and `other` are the same class + NotImplemented + If `other` is not the same class as `self` then returning + NotImplemented delegates the result to superclass.__eq__(subclass) + """ + # When comparing against self this will be faster + if other is self: + return True + + if isinstance(other, self.__class__): + # Compare Elements using values() and class members using __dict__ + # Convert values() to a list for compatibility between + # python 2 and 3 + # Sort values() by element tag + self_elem = sorted(list(self.values()), key=lambda x: x.tag) + other_elem = sorted(list(other.values()), key=lambda x: x.tag) + return self_elem == other_elem and self.__dict__ == other.__dict__ + + return NotImplemented diff --git a/pydicom/filewriter.py b/pydicom/filewriter.py index ceb809915..01d9a4911 100644 --- a/pydicom/filewriter.py +++ b/pydicom/filewriter.py @@ -226,7 +226,10 @@ def write_PN(fp, data_element, padding=b' ', encoding=None): val = data_element.value if isinstance(val[0], compat.text_type) or not in_py2: - val = [elem.encode(encoding) for elem in val] + try: + val = [elem.encode(encoding) for elem in val] + except TypeError: + val = [elem.encode(encoding[0]) for elem in val] val = b'\\'.join(val) diff --git a/setup.py b/setup.py index 579d38952..69f5ac2f5 100755 --- a/setup.py +++ b/setup.py @@ -1,12 +1,20 @@ #!/usr/bin/env python import os +import os.path import sys from glob import glob from setuptools import setup, find_packages +have_dicom = True +try: + import dicom +except ImportError: + have_dicom = False + # get __version__ from _version.py -ver_file = os.path.join('pydicom', '_version.py') +base_dir = os.path.dirname(os.path.realpath(__file__)) +ver_file = os.path.join(base_dir, 'pydicom', '_version.py') with open(ver_file) as f: exec(f.read()) @@ -32,6 +40,9 @@ an overview of how to use the pydicom library. needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv) pytest_runner = ['pytest-runner'] if needs_pytest else [] +_py_modules = [] +if not have_dicom: + _py_modules = ['dicom'] CLASSIFIERS = [ "License :: OSI Approved :: MIT License", @@ -94,6 +105,7 @@ opts = dict(name=NAME, keywords=KEYWORDS, classifiers=CLASSIFIERS, packages=find_packages(), + py_modules=_py_modules, package_data=PACKAGE_DATA, include_package_data=True, install_requires=REQUIRES,
Unlisted Dicom tags cause KeyError, and failure for all other purposes _From [[email protected]](https://code.google.com/u/102769889520564422744/) on September 18, 2010 09:27:51_ What steps will reproduce the problem? 1. For dicom files with tags that are not listed in the _dicom_dict.py file, the files can not be read, even if the dicom tags are not of any use to the application. 2. Try to read a dicom file that has a tag (FFFF,FFFF) or (3F03,1001) (I don't know or care what those tags hold) What is the expected output? What do you see instead? I would expect the program to ignore these tags, or to fill them out with some clearly bogus placeholders to indicate that they are going to silently be ignored. What I see instead is that the dicom.file_read() throws a KeyError stating that the key is not in the pre-defined dictionary, and the parts of the file that are extractable are not available for querying. What version of the product are you using? I tried both 0.9.4 and 0.9.5rc1. Please provide any additional information below. A hack that works for one of the data sets that was causing problems was to add the missing values to the dictionary. This is not very satisfying because I can not anticipate all the other instances of not reported tags that will encountered in the future. [johnsonhj@neuron mpy-svn-stats]$ tail -4 /Library/Python/2.6/site-packages/dicom/_dicom_dict.py '7Fxx0040': ('OW', '1', "Variable Coefficients SDDN", 'Retired'), '3F031001': ('NONE', '1', "Unknown Item", ''), 'FFFFFFFF': ('NONE', '1', "Unknown Item", ''), } # Proposal: In dicom/datadict.py replace: 41 def get_entry(tag): 42 """Return the tuple (VR, VM, name, is_retired) from the DICOM dictionary 43 ···· 44 If the entry is not in the main dictionary, check the masked ones, 45 e.g. repeating groups like 50xx, etc. 46 """ 47 tag = Tag(tag) 48 try: 49 return DicomDictionary[tag] 50 except KeyError: 51 mask_x = mask_match(tag) 52 if mask_x: 53 return RepeatersDictionary[mask_x] 54 else: **55** raise KeyError, "Tag &#37;s not found in DICOM dictionary" &#37; Tag(tag) 56 ============= with ======================================= 41 def get_entry(tag): 42 """Return the tuple (VR, VM, name, is_retired) from the DICOM dictionary 43 ···· 44 If the entry is not in the main dictionary, check the masked ones, 45 e.g. repeating groups like 50xx, etc. 46 """ 47 tag = Tag(tag) 48 try: 49 return DicomDictionary[tag] 50 except KeyError: 51 mask_x = mask_match(tag) 52 if mask_x: 53 return RepeatersDictionary[mask_x] 54 else: **55** return ('NONE', '1', "Unknown Item : "+str(tag), '') 56 _Original issue: http://code.google.com/p/pydicom/issues/detail?id=91_
pydicom/pydicom
diff --git a/pydicom/tests/test_dataset.py b/pydicom/tests/test_dataset.py index fee7082bc..9fe4e4851 100644 --- a/pydicom/tests/test_dataset.py +++ b/pydicom/tests/test_dataset.py @@ -403,32 +403,35 @@ class DatasetTests(unittest.TestCase): def testEqualityNoSequence(self): """Dataset: equality returns correct value with simple dataset""" + # Test empty dataset + assert Dataset() == Dataset() + d = Dataset() d.SOPInstanceUID = '1.2.3.4' d.PatientName = 'Test' - self.assertTrue(d == d) + assert d == d e = Dataset() e.PatientName = 'Test' e.SOPInstanceUID = '1.2.3.4' - self.assertTrue(d == e) + assert d == e e.SOPInstanceUID = '1.2.3.5' - self.assertFalse(d == e) + assert not d == e # Check VR del e.SOPInstanceUID e.add(DataElement(0x00080018, 'PN', '1.2.3.4')) - self.assertFalse(d == e) + assert not d == e # Check Tag del e.SOPInstanceUID e.StudyInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e # Check missing Element in self e.SOPInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e # Check missing Element in other d = Dataset() @@ -437,7 +440,7 @@ class DatasetTests(unittest.TestCase): e = Dataset() e.SOPInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e def testEqualityPrivate(self): """Dataset: equality returns correct value""" @@ -500,16 +503,14 @@ class DatasetTests(unittest.TestCase): def testEqualityUnknown(self): """Dataset: equality returns correct value with extra members """ + # Non-element class members are ignored in equality testing d = Dataset() d.SOPEustaceUID = '1.2.3.4' - self.assertTrue(d == d) + assert d == d e = Dataset() - e.SOPEustaceUID = '1.2.3.4' - self.assertTrue(d == e) - e.SOPEustaceUID = '1.2.3.5' - self.assertFalse(d == e) + assert d == e def testEqualityInheritance(self): """Dataset: equality returns correct value for subclass """ @@ -529,6 +530,19 @@ class DatasetTests(unittest.TestCase): self.assertFalse(d == e) self.assertFalse(e == d) + def test_equality_elements(self): + """Test that Dataset equality only checks DataElements.""" + d = Dataset() + d.SOPInstanceUID = '1.2.3.4' + d.PatientName = 'Test' + d.foo = 'foo' + assert d == d + + e = Dataset() + e.PatientName = 'Test' + e.SOPInstanceUID = '1.2.3.4' + assert d == e + def test_inequality(self): """Test inequality operator""" d = Dataset() @@ -682,6 +696,26 @@ class DatasetTests(unittest.TestCase): self.assertTrue( 'SOPInstanceUID' in ds['0x00080018':'0x00080019']) + def test_getitem_slice_ffff(self): + """Test slicing with (FFFF,FFFF)""" + # Issue #92 + ds = Dataset() + ds.CommandGroupLength = 120 # 0000,0000 + ds.CommandLengthToEnd = 111 # 0000,0001 + ds.Overlays = 12 # 0000,51B0 + ds.LengthToEnd = 12 # 0008,0001 + ds.SOPInstanceUID = '1.2.3.4' # 0008,0018 + ds.SkipFrameRangeFlag = 'TEST' # 0008,9460 + ds.add_new(0xFFFF0001, 'PN', 'CITIZEN^1') + ds.add_new(0xFFFF0002, 'PN', 'CITIZEN^2') + ds.add_new(0xFFFF0003, 'PN', 'CITIZEN^3') + ds.add_new(0xFFFFFFFE, 'PN', 'CITIZEN^4') + ds.add_new(0xFFFFFFFF, 'PN', 'CITIZEN^5') + + assert ds[:][0xFFFFFFFF].value == 'CITIZEN^5' + assert 0xFFFFFFFF not in ds[0x1000:0xFFFFFFFF] + assert 0xFFFFFFFF not in ds[(0x1000):(0xFFFF, 0xFFFF)] + def test_delitem_slice(self): """Test Dataset.__delitem__ using slices.""" ds = Dataset() diff --git a/pydicom/tests/test_filewriter.py b/pydicom/tests/test_filewriter.py index 9ed2347ea..f2bac9a81 100644 --- a/pydicom/tests/test_filewriter.py +++ b/pydicom/tests/test_filewriter.py @@ -180,6 +180,20 @@ class WriteFileTests(unittest.TestCase): ds.TransferSyntaxUID = '1.1' self.assertRaises(ValueError, ds.save_as, self.file_out) + def test_write_ffff_ffff(self): + """Test writing element (FFFF, FFFF) to file #92""" + fp = DicomBytesIO() + ds = Dataset() + ds.file_meta = Dataset() + ds.is_little_endian = True + ds.is_implicit_VR = True + ds.add_new(0xFFFFFFFF, 'LO', '123456') + ds.save_as(fp, write_like_original=True) + + fp.seek(0) + ds = dcmread(fp, force=True) + assert ds[0xFFFFFFFF].value == b'123456' + class ScratchWriteDateTimeTests(WriteFileTests): """Write and reread simple or multi-value DA/DT/TM data elements""" @@ -1826,7 +1840,6 @@ class TestWriteNumbers(object): class TestWritePN(object): """Test filewriter.write_PN""" - @pytest.mark.skip("Raises exception due to issue #489") def test_no_encoding_unicode(self): """If PN element has no encoding info, default is used""" fp = DicomBytesIO() @@ -1979,12 +1992,11 @@ class TestWriteNumbers(object): class TestWritePN(object): """Test filewriter.write_PN""" - @pytest.mark.skip("Raises exception due to issue #489") def test_no_encoding_unicode(self): """If PN element as no encoding info, default is used""" fp = DicomBytesIO() fp.is_little_endian = True - elem = DataElement(0x00100010, 'PN', u'\u03b8') + elem = DataElement(0x00100010, 'PN', u'\u00e8') write_PN(fp, elem) def test_no_encoding(self):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference", "has_added_files", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 4 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "numpy>=1.16.0", "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/pydicom/pydicom.git@da6f7917ca2a32a6886e023a71b0b095f5bc06c8#egg=pydicom pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pydicom channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==6.2 - numpy==1.19.5 - pytest-cov==4.0.0 - tomli==1.2.3 prefix: /opt/conda/envs/pydicom
[ "pydicom/tests/test_dataset.py::DatasetTests::testEqualityUnknown", "pydicom/tests/test_dataset.py::DatasetTests::test_equality_elements", "pydicom/tests/test_dataset.py::DatasetTests::test_getitem_slice_ffff", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_ffff_ffff", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_ffff_ffff" ]
[ "pydicom/tests/test_dataset.py::DatasetTests::test_get_item" ]
[ "pydicom/tests/test_dataset.py::DatasetTests::testAttributeErrorInProperty", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomAttr", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomAttrWeDontHave", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomCommandGroupLength", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteItemLong", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteItemTuple", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteNonExistingItem", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteOtherAttr", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityInheritance", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityNoSequence", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityNotDataset", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityPrivate", "pydicom/tests/test_dataset.py::DatasetTests::testEqualitySequence", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault1", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault2", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault3", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault4", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists1", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists2", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists3", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists4", "pydicom/tests/test_dataset.py::DatasetTests::testGetFromRaw", "pydicom/tests/test_dataset.py::DatasetTests::testHash", "pydicom/tests/test_dataset.py::DatasetTests::testMembership", "pydicom/tests/test_dataset.py::DatasetTests::testSetExistingDataElementByName", "pydicom/tests/test_dataset.py::DatasetTests::testSetNewDataElementByName", "pydicom/tests/test_dataset.py::DatasetTests::testSetNonDicom", "pydicom/tests/test_dataset.py::DatasetTests::testTagExceptionPrint", "pydicom/tests/test_dataset.py::DatasetTests::testTagExceptionWalk", "pydicom/tests/test_dataset.py::DatasetTests::testUpdate", "pydicom/tests/test_dataset.py::DatasetTests::test_NamedMemberUpdated", "pydicom/tests/test_dataset.py::DatasetTests::test__setitem__", "pydicom/tests/test_dataset.py::DatasetTests::test_add_repeater_elem_by_keyword", "pydicom/tests/test_dataset.py::DatasetTests::test_attribute_error_in_property_correct_debug", "pydicom/tests/test_dataset.py::DatasetTests::test_contains", "pydicom/tests/test_dataset.py::DatasetTests::test_data_element", "pydicom/tests/test_dataset.py::DatasetTests::test_delitem_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_dir", "pydicom/tests/test_dataset.py::DatasetTests::test_dir_filter", "pydicom/tests/test_dataset.py::DatasetTests::test_dir_subclass", "pydicom/tests/test_dataset.py::DatasetTests::test_empty_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_exit_exception", "pydicom/tests/test_dataset.py::DatasetTests::test_formatted_lines", "pydicom/tests/test_dataset.py::DatasetTests::test_get_pixel_array_already_have", "pydicom/tests/test_dataset.py::DatasetTests::test_get_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_getitem_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_getitem_slice_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_group_dataset", "pydicom/tests/test_dataset.py::DatasetTests::test_inequality", "pydicom/tests/test_dataset.py::DatasetTests::test_is_uncompressed_transfer_syntax", "pydicom/tests/test_dataset.py::DatasetTests::test_iterall", "pydicom/tests/test_dataset.py::DatasetTests::test_matching_tags", "pydicom/tests/test_dataset.py::DatasetTests::test_property", "pydicom/tests/test_dataset.py::DatasetTests::test_remove_private_tags", "pydicom/tests/test_dataset.py::DatasetTests::test_reshape_pixel_array_not_implemented", "pydicom/tests/test_dataset.py::DatasetTests::test_save_as", "pydicom/tests/test_dataset.py::DatasetTests::test_set_convert_private_elem_from_raw", "pydicom/tests/test_dataset.py::DatasetTests::test_setitem_slice_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_top", "pydicom/tests/test_dataset.py::DatasetTests::test_trait_names", "pydicom/tests/test_dataset.py::DatasetTests::test_walk", "pydicom/tests/test_dataset.py::DatasetTests::test_with", "pydicom/tests/test_dataset.py::DatasetElementsTests::testSequenceAssignment", "pydicom/tests/test_dataset.py::FileDatasetTests::test_creation_with_container", "pydicom/tests/test_dataset.py::FileDatasetTests::test_equality_file_meta", "pydicom/tests/test_filewriter.py::WriteFileTests::testCT", "pydicom/tests/test_filewriter.py::WriteFileTests::testJPEG2000", "pydicom/tests/test_filewriter.py::WriteFileTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::WriteFileTests::testMR", "pydicom/tests/test_filewriter.py::WriteFileTests::testMultiPN", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTDose", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTPlan", "pydicom/tests/test_filewriter.py::WriteFileTests::testUnicode", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::WriteFileTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testCT", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testJPEG2000", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMR", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMultiPN", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTDose", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTPlan", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testUnicode", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_multivalue_DA", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_empty_AT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UN_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_empty_LO", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_unknown_vr_raises", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_lut_descriptor", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_overlay", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_data", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_one", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_three", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_sequence", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_waveform_bits_allocated", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_not_ambiguous", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_pixel_data_not_ow_or_ob", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_big_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_little_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_raises", "pydicom/tests/test_filewriter.py::ScratchWriteTests::testImpl_LE_deflen_write", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_none_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_bad_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix_none", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_ds_changed", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_raises", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_media_storage_sop_class_uid_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_write_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_raise_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_add_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_standard", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_commandset_no_written", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_version", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_version_name_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_class_uid_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_ds_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_file_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_read_write_identical", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_empty_value", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_list", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_singleton", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_exception", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_big_endian", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding_unicode", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding", "pydicom/tests/test_filewriter.py::TestWriteDT::test_format_dt", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_incorrect_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_incorrect_data" ]
[]
MIT License
2,243
[ "setup.py", "LICENSE", "pydicom/dataset.py", "pydicom/filewriter.py", "dicom.py" ]
[ "setup.py", "LICENSE", "pydicom/dataset.py", "pydicom/filewriter.py", "dicom.py" ]
falconry__falcon-1226
76b15bb05daae4aeab5c75307386d3b0394fde42
2018-03-03 13:41:40
35d5220c7ee359ac6b03788107a194644eb00344
codecov[bot]: # [Codecov](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=h1) Report > Merging [#1226](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=desc) into [master](https://codecov.io/gh/falconry/falcon/commit/800396f0a8d9d13889386522c4bee3e4661d547f?src=pr&el=desc) will **not change** coverage. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/falconry/falcon/pull/1226/graphs/tree.svg?height=150&src=pr&token=zOtkfNQ3h6&width=650)](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #1226 +/- ## ====================================== Coverage 100% 100% ====================================== Files 38 38 Lines 2484 2491 +7 Branches 364 367 +3 ====================================== + Hits 2484 2491 +7 ``` | [Impacted Files](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [falcon/routing/static.py](https://codecov.io/gh/falconry/falcon/pull/1226/diff?src=pr&el=tree#diff-ZmFsY29uL3JvdXRpbmcvc3RhdGljLnB5) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=footer). Last update [800396f...d77e0ec](https://codecov.io/gh/falconry/falcon/pull/1226?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). CaselIT: I should've addressed the comments.
diff --git a/falcon/api.py b/falcon/api.py index c1ef8fa..65e0d76 100644 --- a/falcon/api.py +++ b/falcon/api.py @@ -371,7 +371,7 @@ class API(object): self._router.add_route(uri_template, method_map, resource, *args, **kwargs) - def add_static_route(self, prefix, directory, downloadable=False): + def add_static_route(self, prefix, directory, downloadable=False, fallback_filename=None): """Add a route to a directory of static files. Static routes provide a way to serve files directly. This @@ -383,6 +383,8 @@ class API(object): Serving files directly from the web server, rather than through the Python app, will always be more efficient, and therefore should be preferred in production deployments. + For security reasons, the directory and the fallback_filename (if provided) + should be read only for the account running the application. Static routes are matched in LIFO order. Therefore, if the same prefix is used for two routes, the second one will override the @@ -410,12 +412,16 @@ class API(object): downloadable (bool): Set to ``True`` to include a Content-Disposition header in the response. The "filename" directive is simply set to the name of the requested file. + fallback_filename (str): Fallback filename used when the requested file + is not found. Can be a relative path inside the prefix folder or any valid + absolute path. """ self._static_routes.insert( 0, - routing.StaticRoute(prefix, directory, downloadable=downloadable) + routing.StaticRoute(prefix, directory, downloadable=downloadable, + fallback_filename=fallback_filename) ) def add_sink(self, sink, prefix=r'/'): diff --git a/falcon/routing/static.py b/falcon/routing/static.py index ba3fab2..d659f99 100644 --- a/falcon/routing/static.py +++ b/falcon/routing/static.py @@ -24,6 +24,9 @@ class StaticRoute(object): downloadable (bool): Set to ``True`` to include a Content-Disposition header in the response. The "filename" directive is simply set to the name of the requested file. + fallback_filename (str): Fallback filename used when the requested file + is not found. Can be a relative path inside the prefix folder or any valid + absolute path. """ # NOTE(kgriffs): Don't allow control characters and reserved chars @@ -33,13 +36,20 @@ class StaticRoute(object): # minimizes how much can be included in the payload. _MAX_NON_PREFIXED_LEN = 512 - def __init__(self, prefix, directory, downloadable=False): + def __init__(self, prefix, directory, downloadable=False, fallback_filename=None): if not prefix.startswith('/'): raise ValueError("prefix must start with '/'") if not os.path.isabs(directory): raise ValueError('directory must be an absolute path') + if fallback_filename is None: + self._fallback_filename = None + else: + self._fallback_filename = os.path.join(directory, fallback_filename) + if not os.path.isfile(self._fallback_filename): + raise ValueError('fallback_filename is not a file') + # NOTE(kgriffs): Ensure it ends with a path separator to ensure # we only match on the complete segment. Don't raise an error # because most people won't expect to have to append a slash. @@ -52,7 +62,9 @@ class StaticRoute(object): def match(self, path): """Check whether the given path matches this route.""" - return path.startswith(self._prefix) + if self._fallback_filename is None: + return path.startswith(self._prefix) + return path.startswith(self._prefix) or path == self._prefix[:-1] def __call__(self, req, resp): """Resource responder for this route.""" @@ -61,7 +73,8 @@ class StaticRoute(object): # NOTE(kgriffs): Check surrounding whitespace and strip trailing # periods, which are illegal on windows - if (not without_prefix or + # NOTE(CaselIT): An empty filename is allowed when fallback_filename is provided + if (not (without_prefix or self._fallback_filename is not None) or without_prefix.strip().rstrip('.') != without_prefix or self._DISALLOWED_CHARS_PATTERN.search(without_prefix) or '\\' in without_prefix or @@ -86,7 +99,12 @@ class StaticRoute(object): try: resp.stream = io.open(file_path, 'rb') except IOError: - raise falcon.HTTPNotFound() + if self._fallback_filename is None: + raise falcon.HTTPNotFound() + try: + resp.stream = io.open(self._fallback_filename, 'rb') + except IOError: + raise falcon.HTTPNotFound() suffix = os.path.splitext(file_path)[1] resp.content_type = resp.options.static_media_types.get(
Add default file name in the StaticRoute Consider adding the ability to specify a default filename for a static route. I'm not sure if it would be best to send the file or raise an `HTTPMovedPermanently` (or a `HTTPPermanentRedirect`). Probably for performance reasons the redirect would be best (but in this case maybe `default_filename` should become `default_location`)
falconry/falcon
diff --git a/tests/test_static.py b/tests/test_static.py index 2fbb55c..b92d978 100644 --- a/tests/test_static.py +++ b/tests/test_static.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import io +import os import pytest @@ -106,6 +107,21 @@ def test_invalid_args(prefix, directory, client): client.app.add_static_route(prefix, directory) [email protected]('default', [ + 'not-existing-file', + # directories + '.', + '/tmp', +]) +def test_invalid_args_fallback_filename(client, default): + prefix, directory = '/static', '/var/www/statics' + with pytest.raises(ValueError, match='fallback_filename'): + StaticRoute(prefix, directory, fallback_filename=default) + + with pytest.raises(ValueError, match='fallback_filename'): + client.app.add_static_route(prefix, directory, fallback_filename=default) + + @pytest.mark.parametrize('uri_prefix, uri_path, expected_path, mtype', [ ('/static/', '/css/test.css', '/css/test.css', 'text/css'), ('/static', '/css/test.css', '/css/test.css', 'text/css'), @@ -199,3 +215,88 @@ def test_downloadable_not_found(client): response = client.simulate_request(path='/downloads/thing.zip') assert response.status == falcon.HTTP_404 + + [email protected]('uri, default, expected', [ + ('', 'default', 'default'), + ('other', 'default', 'default'), + ('index2', 'index', 'index2'), + ('absolute', '/foo/bar/index', '/foo/bar/index'), +]) +def test_fallback_filename(uri, default, expected, monkeypatch): + + def mockOpen(path, mode): + if default in path: + return path + raise IOError() + + monkeypatch.setattr(io, 'open', mockOpen) + monkeypatch.setattr('os.path.isfile', lambda file: default in file) + + sr = StaticRoute('/static', '/var/www/statics', fallback_filename=default) + + req_path = '/static/' + uri + + req = Request(testing.create_environ( + host='test.com', + path=req_path, + app='statics' + )) + resp = Response() + sr(req, resp) + + assert sr.match(req.path) + assert resp.stream == os.path.join('/var/www/statics', expected) + + [email protected]('strip_slash', [True, False]) [email protected]('path, fallback, static_exp, assert_axp', [ + ('/index', 'index.html', 'index', 'index'), + ('', 'index.html', 'index.html', None), + ('/', 'index.html', 'index.html', None), + ('/other', 'index.html', 'index.html', None), + ('/other', 'index.raise', None, None) +]) +def test_e2e_fallback_filename(client, monkeypatch, strip_slash, path, fallback, + static_exp, assert_axp): + + def mockOpen(path, mode): + if 'index' in path and 'raise' not in path: + return [path.encode('utf-8')] + raise IOError() + + monkeypatch.setattr(io, 'open', mockOpen) + monkeypatch.setattr('os.path.isfile', lambda file: 'index' in file) + + client.app.req_options.strip_url_path_trailing_slash = strip_slash + client.app.add_static_route('/static', '/opt/somesite/static', + fallback_filename=fallback) + client.app.add_static_route('/assets/', '/opt/somesite/assets') + + def test(prefix, directory, expected): + response = client.simulate_request(path=prefix + path) + if expected is None: + assert response.status == falcon.HTTP_404 + else: + assert response.status == falcon.HTTP_200 + assert response.text == directory + expected + + test('/static', '/opt/somesite/static/', static_exp) + test('/assets', '/opt/somesite/assets/', assert_axp) + + [email protected]('default, path, expected', [ + (None, '/static', False), + (None, '/static/', True), + (None, '/staticfoo', False), + (None, '/static/foo', True), + ('index2', '/static', True), + ('index2', '/static/', True), + ('index2', '/staticfoo', False), + ('index2', '/static/foo', True), +]) +def test_match(default, path, expected, monkeypatch): + monkeypatch.setattr('os.path.isfile', lambda file: True) + sr = StaticRoute('/static', '/var/www/statics', fallback_filename=default) + + assert sr.match(path) == expected
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 2 }
1.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/tests" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 -e git+https://github.com/falconry/falcon.git@76b15bb05daae4aeab5c75307386d3b0394fde42#egg=falcon fixtures==4.0.1 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 msgpack==1.0.5 packaging==21.3 pbr==6.1.1 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 python-mimeparse==1.6.0 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 testtools==2.6.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: falcon channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - coverage==6.2 - fixtures==4.0.1 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - msgpack==1.0.5 - packaging==21.3 - pbr==6.1.1 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-mimeparse==1.6.0 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - testtools==2.6.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/falcon
[ "tests/test_static.py::test_invalid_args_fallback_filename[not-existing-file]", "tests/test_static.py::test_invalid_args_fallback_filename[.]", "tests/test_static.py::test_invalid_args_fallback_filename[/tmp]", "tests/test_static.py::test_fallback_filename[-default-default]", "tests/test_static.py::test_fallback_filename[other-default-default]", "tests/test_static.py::test_fallback_filename[index2-index-index2]", "tests/test_static.py::test_fallback_filename[absolute-/foo/bar/index-/foo/bar/index]", "tests/test_static.py::test_e2e_fallback_filename[/index-index.html-index-index-True]", "tests/test_static.py::test_e2e_fallback_filename[/index-index.html-index-index-False]", "tests/test_static.py::test_e2e_fallback_filename[-index.html-index.html-None-True]", "tests/test_static.py::test_e2e_fallback_filename[-index.html-index.html-None-False]", "tests/test_static.py::test_e2e_fallback_filename[/-index.html-index.html-None-True]", "tests/test_static.py::test_e2e_fallback_filename[/-index.html-index.html-None-False]", "tests/test_static.py::test_e2e_fallback_filename[/other-index.html-index.html-None-True]", "tests/test_static.py::test_e2e_fallback_filename[/other-index.html-index.html-None-False]", "tests/test_static.py::test_e2e_fallback_filename[/other-index.raise-None-None-True]", "tests/test_static.py::test_e2e_fallback_filename[/other-index.raise-None-None-False]", "tests/test_static.py::test_match[None-/static-False]", "tests/test_static.py::test_match[None-/static/-True]", "tests/test_static.py::test_match[None-/staticfoo-False]", "tests/test_static.py::test_match[None-/static/foo-True]", "tests/test_static.py::test_match[index2-/static-True]", "tests/test_static.py::test_match[index2-/static/-True]", "tests/test_static.py::test_match[index2-/staticfoo-False]", "tests/test_static.py::test_match[index2-/static/foo-True]", "tests/test_uri_converters.py::test_uuid_converter[aef90600-660f-41e2-b929-3bf0177b412c-expected0]", "tests/test_uri_converters.py::test_uuid_converter[aef90600660f-41e2-b929-3bf0177b412c-expected1]", "tests/test_uri_converters.py::test_uuid_converter[aef90600660f41e2b9293bf0177b412c-expected2]", "tests/test_uri_converters.py::test_uuid_converter[urn:uuid:aef90600-660f-41e2-b929-3bf0177b412c-expected3]", "tests/test_uri_converters.py::test_uuid_converter[urn:uuid:aef90600660f41e2b9293bf0177b412c-expected4]", "tests/test_uri_converters.py::test_uuid_converter[aef90600-660f-41e2-b929-3bf0177b412c", "tests/test_uri_converters.py::test_uuid_converter[aef90600-660f-41e2-b929-3bf0177b412-None]", "tests/test_uri_converters.py::test_uuid_converter[a-None]", "tests/test_uri_converters.py::test_uuid_converter[aef90600-660f-41e2-b929-3bf0177b412g-None]", "tests/test_uri_converters.py::test_uuid_converter[aef90600_660f_41e2_b929_3bf0177b412c-None]", "tests/test_uri_templates.py::test_uuid_converter[/widgets/{widget_id:uuid}-/widgets/54ecb214-db2b-41c1-9ac4-2926ca435ad3-expected0]", "tests/test_uri_templates.py::test_uuid_converter[/widgets/{widget_id:uuid}/orders-/widgets/54ecb214db2b41c19ac42926ca435ad3/orders-expected1]", "tests/test_uri_templates.py::test_uuid_converter[/versions/diff/{left:uuid()}...{right:uuid()}-/versions/diff/54ecb214-db2b-41c1-9ac4-2926ca435ad3...bdf6817f-2e1f-48a6-b68a-75852b23332c-expected2]", "tests/test_uri_templates.py::test_uuid_converter[/versions/diff/{left:uuid}...{right:uuid()}-/versions/diff/54ecb214-db2b-41c1-9ac4-2926ca435ad3...bdf6817f-2e1f-48a6-b68a-75852b23332c-expected3]", "tests/test_uri_templates.py::test_uuid_converter[/versions/diff/{left:uuid()}...{right:uuid}-/versions/diff/54ecb214-db2b-41c1-9ac4-2926ca435ad3...bdf6817f-2e1f-48a6-b68a-75852b23332c-expected4]", "tests/test_uri_templates.py::test_uuid_converter[/widgets/{widget_id:uuid}/orders-/widgets/54ecb214db2b41c19ac42926ca435ad/orders-None]" ]
[ "tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer", "tests/test_httperror.py::TestHTTPError::test_custom_new_error_serializer", "tests/test_request_media.py::test_msgpack[application/msgpack]", "tests/test_request_media.py::test_msgpack[application/msgpack;", "tests/test_request_media.py::test_msgpack[application/x-msgpack]", "tests/test_request_media.py::test_invalid_msgpack", "tests/test_response_media.py::test_msgpack[application/msgpack]", "tests/test_response_media.py::test_msgpack[application/msgpack;", "tests/test_response_media.py::test_msgpack[application/x-msgpack]" ]
[ "tests/test_after_hooks.py::test_output_validator", "tests/test_after_hooks.py::test_serializer", "tests/test_after_hooks.py::test_hook_as_callable_class", "tests/test_after_hooks.py::test_resource_with_uri_fields[resource0]", "tests/test_after_hooks.py::test_resource_with_uri_fields[resource1]", "tests/test_after_hooks.py::test_wrapped_resource[resource0]", "tests/test_after_hooks.py::test_wrapped_resource[resource1]", "tests/test_after_hooks.py::test_wrapped_resource_with_hooks_aware_of_resource", "tests/test_before_hooks.py::test_multiple_resource_hooks[resource0]", "tests/test_before_hooks.py::test_multiple_resource_hooks[resource1]", "tests/test_before_hooks.py::test_input_validator", "tests/test_before_hooks.py::test_input_validator_inherited", "tests/test_before_hooks.py::test_param_validator", "tests/test_before_hooks.py::test_field_validator[resource0]", "tests/test_before_hooks.py::test_field_validator[resource1]", "tests/test_before_hooks.py::test_field_validator[resource2]", "tests/test_before_hooks.py::test_parser", "tests/test_before_hooks.py::test_wrapped_resource", "tests/test_before_hooks.py::test_wrapped_resource_with_hooks_aware_of_resource", "tests/test_boundedstream.py::test_not_writeable", "tests/test_cmd_print_api.py::test_traverse_with_verbose", "tests/test_cmd_print_api.py::test_traverse", "tests/test_cookies.py::test_response_base_case", "tests/test_cookies.py::test_response_disable_secure_globally", "tests/test_cookies.py::test_response_complex_case", "tests/test_cookies.py::test_cookie_expires_naive", "tests/test_cookies.py::test_cookie_expires_aware", "tests/test_cookies.py::test_cookies_setable", "tests/test_cookies.py::test_cookie_max_age_float_and_string[foofloat]", "tests/test_cookies.py::test_cookie_max_age_float_and_string[foostring]", "tests/test_cookies.py::test_response_unset_cookie", "tests/test_cookies.py::test_cookie_timezone", "tests/test_cookies.py::test_request_cookie_parsing", "tests/test_cookies.py::test_invalid_cookies_are_ignored", "tests/test_cookies.py::test_cookie_header_is_missing", "tests/test_cookies.py::test_unicode_inside_ascii_range", "tests/test_cookies.py::test_non_ascii_name[Unicode_\\xc3\\xa6\\xc3\\xb8]", "tests/test_cookies.py::test_non_ascii_name[Unicode_\\xc3\\x83\\xc2\\xa6\\xc3\\x83\\xc2\\xb8]", "tests/test_cookies.py::test_non_ascii_name[42]", "tests/test_cookies.py::test_non_ascii_value[Unicode_\\xc3\\xa6\\xc3\\xb8]", "tests/test_cookies.py::test_non_ascii_value[Unicode_\\xc3\\x83\\xc2\\xa6\\xc3\\x83\\xc2\\xb8]", "tests/test_cookies.py::test_non_ascii_value[42]", "tests/test_custom_router.py::test_custom_router_add_route_should_be_used", "tests/test_custom_router.py::test_custom_router_find_should_be_used", "tests/test_custom_router.py::test_can_pass_additional_params_to_add_route", "tests/test_custom_router.py::test_custom_router_takes_req_positional_argument", "tests/test_custom_router.py::test_custom_router_takes_req_keyword_argument", "tests/test_default_router.py::test_user_regression_versioned_url", "tests/test_default_router.py::test_user_regression_recipes", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People|{field}-/serviceRoot/People|susie-expected_params0]", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People[{field}]-/serviceRoot/People['calvin']-expected_params1]", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People({field})-/serviceRoot/People('hobbes')-expected_params2]", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People({field})-/serviceRoot/People('hob)bes')-expected_params3]", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People({field})(z)-/serviceRoot/People(hobbes)(z)-expected_params4]", "tests/test_default_router.py::test_user_regression_special_chars[/serviceRoot/People('{field}')-/serviceRoot/People('rosalyn')-expected_params5]", "tests/test_default_router.py::test_user_regression_special_chars[/^{field}-/^42-expected_params6]", "tests/test_default_router.py::test_user_regression_special_chars[/+{field}-/+42-expected_params7]", "tests/test_default_router.py::test_user_regression_special_chars[/foo/{first}_{second}/bar-/foo/abc_def_ghijk/bar-expected_params8]", "tests/test_default_router.py::test_user_regression_special_chars[/items/{x}?{y}-/items/1080?768-expected_params9]", "tests/test_default_router.py::test_user_regression_special_chars[/items/{x}|{y}-/items/1080|768-expected_params10]", "tests/test_default_router.py::test_user_regression_special_chars[/items/{x},{y}-/items/1080,768-expected_params11]", "tests/test_default_router.py::test_user_regression_special_chars[/items/{x}^^{y}-/items/1080^^768-expected_params12]", "tests/test_default_router.py::test_user_regression_special_chars[/items/{x}*{y}*-/items/1080*768*-expected_params13]", "tests/test_default_router.py::test_user_regression_special_chars[/thing-2/something+{field}+-/thing-2/something+42+-expected_params14]", "tests/test_default_router.py::test_user_regression_special_chars[/thing-2/something*{field}/notes-/thing-2/something*42/notes-expected_params15]", "tests/test_default_router.py::test_user_regression_special_chars[/thing-2/something+{field}|{q}/notes-/thing-2/something+else|z/notes-expected_params16]", "tests/test_default_router.py::test_user_regression_special_chars[serviceRoot/$metadata#Airports('{field}')/Name-serviceRoot/$metadata#Airports('KSFO')/Name-expected_params17]", "tests/test_default_router.py::test_not_str[uri_template0]", "tests/test_default_router.py::test_not_str[uri_template1]", "tests/test_default_router.py::test_not_str[uri_template2]", "tests/test_default_router.py::test_root_path", "tests/test_default_router.py::test_duplicate_field_names[/{field}{field}]", "tests/test_default_router.py::test_duplicate_field_names[/{field}...{field}]", "tests/test_default_router.py::test_duplicate_field_names[/{field}/{another}/{field}]", "tests/test_default_router.py::test_duplicate_field_names[/{field}/something/something/{field}/something]", "tests/test_default_router.py::test_match_entire_path[/items/thing-/items/t]", "tests/test_default_router.py::test_match_entire_path[/items/{x}|{y}|-/items/1080|768]", "tests/test_default_router.py::test_match_entire_path[/items/{x}*{y}foo-/items/1080*768foobar]", "tests/test_default_router.py::test_match_entire_path[/items/{x}*768*-/items/1080*768***]", "tests/test_default_router.py::test_conflict[/teams/{conflict}]", "tests/test_default_router.py::test_conflict[/emojis/signs/{id_too}]", "tests/test_default_router.py::test_conflict[/repos/{org}/{repo}/compare/{complex}:{vs}...{complex2}:{conflict}]", "tests/test_default_router.py::test_conflict[/teams/{id:int}/settings]", "tests/test_default_router.py::test_non_conflict[/repos/{org}/{repo}/compare/{simple_vs_complex}]", "tests/test_default_router.py::test_non_conflict[/repos/{complex}.{vs}.{simple}]", "tests/test_default_router.py::test_non_conflict[/repos/{org}/{repo}/compare/{complex}:{vs}...{complex2}/full]", "tests/test_default_router.py::test_invalid_field_name[/{}]", "tests/test_default_router.py::test_invalid_field_name[/repos/{org}/{repo}/compare/{}]", "tests/test_default_router.py::test_invalid_field_name[/repos/{complex}.{}.{thing}]", "tests/test_default_router.py::test_invalid_field_name[/{9v}]", "tests/test_default_router.py::test_invalid_field_name[/{524hello}/world]", "tests/test_default_router.py::test_invalid_field_name[/hello/{1world}]", "tests/test_default_router.py::test_invalid_field_name[/repos/{complex}.{9v}.{thing}/etc]", "tests/test_default_router.py::test_invalid_field_name[/{*kgriffs}]", "tests/test_default_router.py::test_invalid_field_name[/{@kgriffs}]", "tests/test_default_router.py::test_invalid_field_name[/repos/{complex}.{v}.{@thing}/etc]", "tests/test_default_router.py::test_invalid_field_name[/{-kgriffs}]", "tests/test_default_router.py::test_invalid_field_name[/repos/{complex}.{-v}.{thing}/etc]", "tests/test_default_router.py::test_invalid_field_name[/repos/{simple-thing}/etc]", "tests/test_default_router.py::test_invalid_field_name[/this", "tests/test_default_router.py::test_invalid_field_name[/this\\tand\\tthat/this\\nand\\nthat/{thing", "tests/test_default_router.py::test_invalid_field_name[/{thing\\t}/world]", "tests/test_default_router.py::test_invalid_field_name[/{\\nthing}/world]", "tests/test_default_router.py::test_invalid_field_name[/{th\\x0bing}/world]", "tests/test_default_router.py::test_invalid_field_name[/{", "tests/test_default_router.py::test_invalid_field_name[/{thing}/wo", "tests/test_default_router.py::test_invalid_field_name[/{thing}", "tests/test_default_router.py::test_invalid_field_name[/repos/{or", "tests/test_default_router.py::test_invalid_field_name[/repos/{org}/{repo}/compare/{th\\ting}]", "tests/test_default_router.py::test_print_src", "tests/test_default_router.py::test_override", "tests/test_default_router.py::test_literal_segment", "tests/test_default_router.py::test_dead_segment[/teams]", "tests/test_default_router.py::test_dead_segment[/emojis/signs]", "tests/test_default_router.py::test_dead_segment[/gists]", "tests/test_default_router.py::test_dead_segment[/gists/42]", "tests/test_default_router.py::test_malformed_pattern[/repos/racker/falcon/compare/foo]", "tests/test_default_router.py::test_malformed_pattern[/repos/racker/falcon/compare/foo/full]", "tests/test_default_router.py::test_literal", "tests/test_default_router.py::test_converters[/cvt/teams/007-expected_params0]", "tests/test_default_router.py::test_converters[/cvt/teams/1234/members-expected_params1]", "tests/test_default_router.py::test_converters[/cvt/teams/default/members/700-5-expected_params2]", "tests/test_default_router.py::test_converters[/cvt/repos/org/repo/compare/xkcd:353-expected_params3]", "tests/test_default_router.py::test_converters[/cvt/repos/org/repo/compare/gunmachan:1234...kumamon:5678/part-expected_params4]", "tests/test_default_router.py::test_converters[/cvt/repos/xkcd/353/compare/susan:0001/full-expected_params5]", "tests/test_default_router.py::test_converters_with_invalid_options[/foo/{bar:int(0)}]", "tests/test_default_router.py::test_converters_with_invalid_options[/foo/{bar:int(num_digits=0)}]", "tests/test_default_router.py::test_converters_with_invalid_options[/foo/{bar:int(-1)}/baz]", "tests/test_default_router.py::test_converters_with_invalid_options[/foo/{bar:int(num_digits=-1)}/baz]", "tests/test_default_router.py::test_converters_malformed_specification[/foo/{bar:}]", "tests/test_default_router.py::test_converters_malformed_specification[/foo/{bar:unknown}/baz]", "tests/test_default_router.py::test_variable", "tests/test_default_router.py::test_single_character_field_name", "tests/test_default_router.py::test_literal_vs_variable[/teams/default-19]", "tests/test_default_router.py::test_literal_vs_variable[/teams/default/members-7]", "tests/test_default_router.py::test_literal_vs_variable[/cvt/teams/default-31]", "tests/test_default_router.py::test_literal_vs_variable[/cvt/teams/default/members/1234-10-32]", "tests/test_default_router.py::test_literal_vs_variable[/teams/1234-6]", "tests/test_default_router.py::test_literal_vs_variable[/teams/1234/members-7]", "tests/test_default_router.py::test_literal_vs_variable[/gists/first-20]", "tests/test_default_router.py::test_literal_vs_variable[/gists/first/raw-18]", "tests/test_default_router.py::test_literal_vs_variable[/gists/first/pdf-21]", "tests/test_default_router.py::test_literal_vs_variable[/gists/1776/pdf-21]", "tests/test_default_router.py::test_literal_vs_variable[/emojis/signs/78-13]", "tests/test_default_router.py::test_literal_vs_variable[/emojis/signs/78/small.png-24]", "tests/test_default_router.py::test_literal_vs_variable[/emojis/signs/78/small(png)-25]", "tests/test_default_router.py::test_literal_vs_variable[/emojis/signs/78/small_png-26]", "tests/test_default_router.py::test_not_found[/this/does/not/exist]", "tests/test_default_router.py::test_not_found[/user/bogus]", "tests/test_default_router.py::test_not_found[/repos/racker/falcon/compare/johndoe:master...janedoe:dev/bogus]", "tests/test_default_router.py::test_not_found[/teams]", "tests/test_default_router.py::test_not_found[/teams/42/members/undefined]", "tests/test_default_router.py::test_not_found[/teams/42/undefined]", "tests/test_default_router.py::test_not_found[/teams/42/undefined/segments]", "tests/test_default_router.py::test_not_found[/teams/default/members/undefined]", "tests/test_default_router.py::test_not_found[/teams/default/members/thing/undefined]", "tests/test_default_router.py::test_not_found[/teams/default/members/thing/undefined/segments]", "tests/test_default_router.py::test_not_found[/teams/default/undefined]", "tests/test_default_router.py::test_not_found[/teams/default/undefined/segments]", "tests/test_default_router.py::test_not_found[/cvt/teams/default/members]", "tests/test_default_router.py::test_not_found[/cvt/teams/NaN]", "tests/test_default_router.py::test_not_found[/cvt/teams/default/members/NaN]", "tests/test_default_router.py::test_not_found[/emojis/signs]", "tests/test_default_router.py::test_not_found[/emojis/signs/0/small]", "tests/test_default_router.py::test_not_found[/emojis/signs/0/undefined]", "tests/test_default_router.py::test_not_found[/emojis/signs/0/undefined/segments]", "tests/test_default_router.py::test_not_found[/emojis/signs/20/small]", "tests/test_default_router.py::test_not_found[/emojis/signs/20/undefined]", "tests/test_default_router.py::test_not_found[/emojis/signs/42/undefined]", "tests/test_default_router.py::test_not_found[/emojis/signs/78/undefined]", "tests/test_default_router.py::test_subsegment_not_found", "tests/test_default_router.py::test_multivar", "tests/test_default_router.py::test_complex[-5]", "tests/test_default_router.py::test_complex[/full-10]", "tests/test_default_router.py::test_complex[/part-15]", "tests/test_default_router.py::test_complex_alt[-16-/repos/{org}/{repo}/compare/{usr0}:{branch0}]", "tests/test_default_router.py::test_complex_alt[/full-17-/repos/{org}/{repo}/compare/{usr0}:{branch0}/full]", "tests/test_default_router.py::test_options_converters_set", "tests/test_default_router.py::test_options_converters_update[spam]", "tests/test_default_router.py::test_options_converters_update[spam_2]", "tests/test_default_router.py::test_options_converters_invalid_name[has", "tests/test_default_router.py::test_options_converters_invalid_name[whitespace", "tests/test_default_router.py::test_options_converters_invalid_name[", "tests/test_default_router.py::test_options_converters_invalid_name[funky$character]", "tests/test_default_router.py::test_options_converters_invalid_name[42istheanswer]", "tests/test_default_router.py::test_options_converters_invalid_name[with-hyphen]", "tests/test_default_router.py::test_options_converters_invalid_name_on_update", "tests/test_deps.py::test_deps_mimeparse_correct_package", "tests/test_error.py::test_with_default_title_and_desc[HTTPBadRequest-400", "tests/test_error.py::test_with_default_title_and_desc[HTTPForbidden-403", "tests/test_error.py::test_with_default_title_and_desc[HTTPConflict-409", "tests/test_error.py::test_with_default_title_and_desc[HTTPLengthRequired-411", "tests/test_error.py::test_with_default_title_and_desc[HTTPPreconditionFailed-412", "tests/test_error.py::test_with_default_title_and_desc[HTTPRequestEntityTooLarge-413", "tests/test_error.py::test_with_default_title_and_desc[HTTPUriTooLong-414", "tests/test_error.py::test_with_default_title_and_desc[HTTPUnprocessableEntity-422", "tests/test_error.py::test_with_default_title_and_desc[HTTPLocked-423", "tests/test_error.py::test_with_default_title_and_desc[HTTPFailedDependency-424", "tests/test_error.py::test_with_default_title_and_desc[HTTPPreconditionRequired-428", "tests/test_error.py::test_with_default_title_and_desc[HTTPTooManyRequests-429", "tests/test_error.py::test_with_default_title_and_desc[HTTPRequestHeaderFieldsTooLarge-431", "tests/test_error.py::test_with_default_title_and_desc[HTTPUnavailableForLegalReasons-451", "tests/test_error.py::test_with_default_title_and_desc[HTTPInternalServerError-500", "tests/test_error.py::test_with_default_title_and_desc[HTTPNotImplemented-501", "tests/test_error.py::test_with_default_title_and_desc[HTTPBadGateway-502", "tests/test_error.py::test_with_default_title_and_desc[HTTPServiceUnavailable-503", "tests/test_error.py::test_with_default_title_and_desc[HTTPGatewayTimeout-504", "tests/test_error.py::test_with_default_title_and_desc[HTTPVersionNotSupported-505", "tests/test_error.py::test_with_default_title_and_desc[HTTPInsufficientStorage-507", "tests/test_error.py::test_with_default_title_and_desc[HTTPLoopDetected-508", "tests/test_error.py::test_with_default_title_and_desc[HTTPNetworkAuthenticationRequired-511", "tests/test_error.py::test_with_title_and_desc[HTTPBadRequest]", "tests/test_error.py::test_with_title_and_desc[HTTPForbidden]", "tests/test_error.py::test_with_title_and_desc[HTTPConflict]", "tests/test_error.py::test_with_title_and_desc[HTTPLengthRequired]", "tests/test_error.py::test_with_title_and_desc[HTTPPreconditionFailed]", "tests/test_error.py::test_with_title_and_desc[HTTPPreconditionRequired]", "tests/test_error.py::test_with_title_and_desc[HTTPUriTooLong]", "tests/test_error.py::test_with_title_and_desc[HTTPUnprocessableEntity]", "tests/test_error.py::test_with_title_and_desc[HTTPLocked]", "tests/test_error.py::test_with_title_and_desc[HTTPFailedDependency]", "tests/test_error.py::test_with_title_and_desc[HTTPRequestHeaderFieldsTooLarge]", "tests/test_error.py::test_with_title_and_desc[HTTPUnavailableForLegalReasons]", "tests/test_error.py::test_with_title_and_desc[HTTPInternalServerError]", "tests/test_error.py::test_with_title_and_desc[HTTPNotImplemented]", "tests/test_error.py::test_with_title_and_desc[HTTPBadGateway]", "tests/test_error.py::test_with_title_and_desc[HTTPServiceUnavailable]", "tests/test_error.py::test_with_title_and_desc[HTTPGatewayTimeout]", "tests/test_error.py::test_with_title_and_desc[HTTPVersionNotSupported]", "tests/test_error.py::test_with_title_and_desc[HTTPInsufficientStorage]", "tests/test_error.py::test_with_title_and_desc[HTTPLoopDetected]", "tests/test_error.py::test_with_title_and_desc[HTTPNetworkAuthenticationRequired]", "tests/test_error.py::test_with_retry_after[HTTPServiceUnavailable]", "tests/test_error.py::test_with_retry_after[HTTPTooManyRequests]", "tests/test_error.py::test_with_retry_after[HTTPRequestEntityTooLarge]", "tests/test_error.py::test_http_unauthorized_no_title_and_desc_and_challenges", "tests/test_error.py::test_http_unauthorized_with_title_and_desc_and_challenges", "tests/test_error.py::test_http_not_acceptable_no_title_and_desc_and_challenges", "tests/test_error.py::test_http_not_acceptable_with_title_and_desc_and_challenges", "tests/test_error.py::test_http_unsupported_media_type_no_title_and_desc_and_challenges", "tests/test_error.py::test_http_unsupported_media_type_with_title_and_desc_and_challenges", "tests/test_error.py::test_http_error_repr", "tests/test_error_handlers.py::TestErrorHandler::test_caught_error", "tests/test_error_handlers.py::TestErrorHandler::test_uncaught_error", "tests/test_error_handlers.py::TestErrorHandler::test_uncaught_error_else", "tests/test_error_handlers.py::TestErrorHandler::test_converted_error", "tests/test_error_handlers.py::TestErrorHandler::test_handle_not_defined", "tests/test_error_handlers.py::TestErrorHandler::test_subclass_error", "tests/test_error_handlers.py::TestErrorHandler::test_error_order_duplicate", "tests/test_error_handlers.py::TestErrorHandler::test_error_order_subclass", "tests/test_error_handlers.py::TestErrorHandler::test_error_order_subclass_masked", "tests/test_headers.py::TestHeaders::test_content_length", "tests/test_headers.py::TestHeaders::test_default_value", "tests/test_headers.py::TestHeaders::test_unset_header", "tests/test_headers.py::TestHeaders::test_required_header", "tests/test_headers.py::TestHeaders::test_no_content_length[204", "tests/test_headers.py::TestHeaders::test_no_content_length[304", "tests/test_headers.py::TestHeaders::test_content_header_missing", "tests/test_headers.py::TestHeaders::test_passthrough_request_headers", "tests/test_headers.py::TestHeaders::test_headers_as_list", "tests/test_headers.py::TestHeaders::test_default_media_type", "tests/test_headers.py::TestHeaders::test_override_default_media_type[text/plain;", "tests/test_headers.py::TestHeaders::test_override_default_media_type[text/plain-Hello", "tests/test_headers.py::TestHeaders::test_override_default_media_type_missing_encoding", "tests/test_headers.py::TestHeaders::test_response_header_helpers_on_get", "tests/test_headers.py::TestHeaders::test_unicode_location_headers", "tests/test_headers.py::TestHeaders::test_unicode_headers_convertable", "tests/test_headers.py::TestHeaders::test_response_set_and_get_header", "tests/test_headers.py::TestHeaders::test_response_append_header", "tests/test_headers.py::TestHeaders::test_vary_star", "tests/test_headers.py::TestHeaders::test_vary_header[vary0-accept-encoding]", "tests/test_headers.py::TestHeaders::test_vary_header[vary1-accept-encoding,", "tests/test_headers.py::TestHeaders::test_vary_header[vary2-accept-encoding,", "tests/test_headers.py::TestHeaders::test_content_type_no_body", "tests/test_headers.py::TestHeaders::test_no_content_type[204", "tests/test_headers.py::TestHeaders::test_no_content_type[304", "tests/test_headers.py::TestHeaders::test_custom_content_type", "tests/test_headers.py::TestHeaders::test_add_link_single", "tests/test_headers.py::TestHeaders::test_add_link_multiple", "tests/test_headers.py::TestHeaders::test_add_link_with_title", "tests/test_headers.py::TestHeaders::test_add_link_with_title_star", "tests/test_headers.py::TestHeaders::test_add_link_with_anchor", "tests/test_headers.py::TestHeaders::test_add_link_with_hreflang", "tests/test_headers.py::TestHeaders::test_add_link_with_hreflang_multi", "tests/test_headers.py::TestHeaders::test_add_link_with_type_hint", "tests/test_headers.py::TestHeaders::test_add_link_complex", "tests/test_headers.py::TestHeaders::test_content_length_options", "tests/test_hello.py::TestHelloWorld::test_env_headers_list_of_tuples", "tests/test_hello.py::TestHelloWorld::test_root_route", "tests/test_hello.py::TestHelloWorld::test_no_route", "tests/test_hello.py::TestHelloWorld::test_body[/body-resource0-<lambda>]", "tests/test_hello.py::TestHelloWorld::test_body[/bytes-resource1-<lambda>]", "tests/test_hello.py::TestHelloWorld::test_body[/data-resource2-<lambda>]", "tests/test_hello.py::TestHelloWorld::test_no_body_on_head", "tests/test_hello.py::TestHelloWorld::test_stream_chunked", "tests/test_hello.py::TestHelloWorld::test_stream_known_len", "tests/test_hello.py::TestHelloWorld::test_filelike", "tests/test_hello.py::TestHelloWorld::test_filelike_closing[ClosingBytesIO-True]", "tests/test_hello.py::TestHelloWorld::test_filelike_closing[NonClosingBytesIO-False]", "tests/test_hello.py::TestHelloWorld::test_filelike_using_helper", "tests/test_hello.py::TestHelloWorld::test_status_not_set", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_get", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_put", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_post_not_allowed", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_report", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_misc", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_methods_not_allowed_simple", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_methods_not_allowed_complex", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_method_not_allowed_with_param", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_default_on_options", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_on_options", "tests/test_http_method_routing.py::TestHttpMethodRouting::test_bogus_method", "tests/test_httperror.py::TestHTTPError::test_base_class", "tests/test_httperror.py::TestHTTPError::test_no_description_json", "tests/test_httperror.py::TestHTTPError::test_no_description_xml", "tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_json_or_xml", "tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer_no_body", "tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_anything", "tests/test_httperror.py::TestHTTPError::test_forbidden[application/json]", "tests/test_httperror.py::TestHTTPError::test_forbidden[application/vnd.company.system.project.resource+json;v=1.1]", "tests/test_httperror.py::TestHTTPError::test_forbidden[application/json-patch+json]", "tests/test_httperror.py::TestHTTPError::test_epic_fail_json", "tests/test_httperror.py::TestHTTPError::test_epic_fail_xml[text/xml]", "tests/test_httperror.py::TestHTTPError::test_epic_fail_xml[application/xml]", "tests/test_httperror.py::TestHTTPError::test_epic_fail_xml[application/vnd.company.system.project.resource+xml;v=1.1]", "tests/test_httperror.py::TestHTTPError::test_epic_fail_xml[application/atom+xml]", "tests/test_httperror.py::TestHTTPError::test_unicode_json", "tests/test_httperror.py::TestHTTPError::test_unicode_xml", "tests/test_httperror.py::TestHTTPError::test_401", "tests/test_httperror.py::TestHTTPError::test_404_without_body", "tests/test_httperror.py::TestHTTPError::test_404_with_body", "tests/test_httperror.py::TestHTTPError::test_405_without_body", "tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers", "tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers_double_check", "tests/test_httperror.py::TestHTTPError::test_405_with_body", "tests/test_httperror.py::TestHTTPError::test_410_without_body", "tests/test_httperror.py::TestHTTPError::test_410_with_body", "tests/test_httperror.py::TestHTTPError::test_411", "tests/test_httperror.py::TestHTTPError::test_413", "tests/test_httperror.py::TestHTTPError::test_temporary_413_integer_retry_after", "tests/test_httperror.py::TestHTTPError::test_temporary_413_datetime_retry_after", "tests/test_httperror.py::TestHTTPError::test_414", "tests/test_httperror.py::TestHTTPError::test_414_with_title", "tests/test_httperror.py::TestHTTPError::test_414_with_description", "tests/test_httperror.py::TestHTTPError::test_414_with_custom_kwargs", "tests/test_httperror.py::TestHTTPError::test_416", "tests/test_httperror.py::TestHTTPError::test_429_no_retry_after", "tests/test_httperror.py::TestHTTPError::test_429", "tests/test_httperror.py::TestHTTPError::test_429_datetime", "tests/test_httperror.py::TestHTTPError::test_503_integer_retry_after", "tests/test_httperror.py::TestHTTPError::test_503_datetime_retry_after", "tests/test_httperror.py::TestHTTPError::test_invalid_header", "tests/test_httperror.py::TestHTTPError::test_missing_header", "tests/test_httperror.py::TestHTTPError::test_invalid_param", "tests/test_httperror.py::TestHTTPError::test_missing_param", "tests/test_httperror.py::TestHTTPError::test_misc", "tests/test_httperror.py::TestHTTPError::test_title_default_message_if_none", "tests/test_httpstatus.py::TestHTTPStatus::test_raise_status_in_before_hook", "tests/test_httpstatus.py::TestHTTPStatus::test_raise_status_in_responder", "tests/test_httpstatus.py::TestHTTPStatus::test_raise_status_runs_after_hooks", "tests/test_httpstatus.py::TestHTTPStatus::test_raise_status_survives_after_hooks", "tests/test_httpstatus.py::TestHTTPStatus::test_raise_status_empty_body", "tests/test_httpstatus.py::TestHTTPStatusWithMiddleware::test_raise_status_in_process_request", "tests/test_httpstatus.py::TestHTTPStatusWithMiddleware::test_raise_status_in_process_resource", "tests/test_httpstatus.py::TestHTTPStatusWithMiddleware::test_raise_status_runs_process_response", "tests/test_media_handlers.py::test_base_handler_contract", "tests/test_middleware.py::TestRequestTimeMiddleware::test_skip_process_resource", "tests/test_middleware.py::TestRequestTimeMiddleware::test_add_invalid_middleware", "tests/test_middleware.py::TestRequestTimeMiddleware::test_response_middleware_raises_exception", "tests/test_middleware.py::TestRequestTimeMiddleware::test_log_get_request", "tests/test_middleware.py::TestTransactionIdMiddleware::test_generate_trans_id_with_request", "tests/test_middleware.py::TestSeveralMiddlewares::test_generate_trans_id_and_time_with_request", "tests/test_middleware.py::TestSeveralMiddlewares::test_legacy_middleware_called_with_correct_args", "tests/test_middleware.py::TestSeveralMiddlewares::test_middleware_execution_order", "tests/test_middleware.py::TestSeveralMiddlewares::test_independent_middleware_execution_order", "tests/test_middleware.py::TestSeveralMiddlewares::test_multiple_reponse_mw_throw_exception", "tests/test_middleware.py::TestSeveralMiddlewares::test_inner_mw_throw_exception", "tests/test_middleware.py::TestSeveralMiddlewares::test_inner_mw_with_ex_handler_throw_exception", "tests/test_middleware.py::TestSeveralMiddlewares::test_outer_mw_with_ex_handler_throw_exception", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_resp", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_independent_mw_executed_when_exception_in_resp", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_req", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_independent_mw_executed_when_exception_in_req", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_rsrc", "tests/test_middleware.py::TestSeveralMiddlewares::test_order_independent_mw_executed_when_exception_in_rsrc", "tests/test_middleware.py::TestRemoveBasePathMiddleware::test_base_path_is_removed_before_routing", "tests/test_middleware.py::TestResourceMiddleware::test_can_access_resource_params", "tests/test_middleware.py::TestErrorHandling::test_error_composed_before_resp_middleware_called", "tests/test_middleware.py::TestErrorHandling::test_http_status_raised_from_error_handler", "tests/test_options.py::TestRequestOptions::test_option_defaults", "tests/test_options.py::TestRequestOptions::test_options_toggle[keep_blank_qs_values]", "tests/test_options.py::TestRequestOptions::test_options_toggle[auto_parse_form_urlencoded]", "tests/test_options.py::TestRequestOptions::test_options_toggle[auto_parse_qs_csv]", "tests/test_options.py::TestRequestOptions::test_options_toggle[strip_url_path_trailing_slash]", "tests/test_options.py::TestRequestOptions::test_incorrect_options", "tests/test_query_params.py::TestQueryParams::test_none[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_none[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_blank[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_blank[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_simple[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_simple[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_percent_encoded[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_percent_encoded[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_simple_false[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_simple_false[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_simple_true[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_simple_true[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_complex_false[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_option_auto_parse_qs_csv_complex_false[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_bad_percentage[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_bad_percentage[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_allowed_names[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_allowed_names[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param-simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param-simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_int-simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_int-simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_uuid-simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_uuid-simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_bool-simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_bool-simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_list-simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_required[get_param_as_list-simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_int[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_int[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_int_neg[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_int_neg[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_uuid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_uuid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_boolean[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_boolean[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_boolean_blank[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_boolean_blank[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_type[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_type[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_type_blank[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_type_blank[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_transformer[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_list_transformer[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_param_property[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_param_property[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_form_keys[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_form_keys[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_keys_as_bool[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_keys_as_bool[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_keys_as_int[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_keys_as_int[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_form_keys_as_list[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_multiple_form_keys_as_list[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_valid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_valid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_missing_param[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_missing_param[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_valid_with_format[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_valid_with_format[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_store[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_store[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_invalid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_date_invalid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_valid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_valid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_missing_param[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_missing_param[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_valid_with_format[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_valid_with_format[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_store[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_store[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_invalid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_datetime_invalid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_valid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_valid[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_missing_param[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_missing_param[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_store[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_store[simulate_request_post_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_invalid[simulate_request_get_query_params]", "tests/test_query_params.py::TestQueryParams::test_get_dict_invalid[simulate_request_post_query_params]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_expected[POST]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_expected[PUT]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_expected[PATCH]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_expected[DELETE]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_expected[OPTIONS]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_not_expected[GET]", "tests/test_query_params.py::TestPostQueryParams::test_http_methods_body_not_expected[HEAD]", "tests/test_query_params.py::TestPostQueryParams::test_non_ascii", "tests/test_query_params.py::TestPostQueryParams::test_empty_body", "tests/test_query_params.py::TestPostQueryParams::test_empty_body_no_content_length", "tests/test_query_params.py::TestPostQueryParams::test_explicitly_disable_auto_parse", "tests/test_query_params.py::TestPostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default", "tests/test_redirects.py::TestRedirects::test_redirect[GET-301", "tests/test_redirects.py::TestRedirects::test_redirect[POST-302", "tests/test_redirects.py::TestRedirects::test_redirect[PUT-303", "tests/test_redirects.py::TestRedirects::test_redirect[DELETE-307", "tests/test_redirects.py::TestRedirects::test_redirect[HEAD-308", "tests/test_request_access_route.py::test_remote_addr_only", "tests/test_request_access_route.py::test_rfc_forwarded", "tests/test_request_access_route.py::test_malformed_rfc_forwarded", "tests/test_request_access_route.py::test_x_forwarded_for", "tests/test_request_access_route.py::test_x_real_ip", "tests/test_request_access_route.py::test_remote_addr", "tests/test_request_access_route.py::test_remote_addr_missing", "tests/test_request_attrs.py::TestRequestAttributes::test_missing_qs", "tests/test_request_attrs.py::TestRequestAttributes::test_empty", "tests/test_request_attrs.py::TestRequestAttributes::test_host", "tests/test_request_attrs.py::TestRequestAttributes::test_subdomain", "tests/test_request_attrs.py::TestRequestAttributes::test_reconstruct_url", "tests/test_request_attrs.py::TestRequestAttributes::test_nonlatin_path[/hello_\\u043f\\u0440\\u0438\\u0432\\u0435\\u0442]", "tests/test_request_attrs.py::TestRequestAttributes::test_nonlatin_path[/test/%E5%BB%B6%E5%AE%89]", "tests/test_request_attrs.py::TestRequestAttributes::test_nonlatin_path[/test/%C3%A4%C3%B6%C3%BC%C3%9F%E2%82%AC]", "tests/test_request_attrs.py::TestRequestAttributes::test_uri", "tests/test_request_attrs.py::TestRequestAttributes::test_uri_https", "tests/test_request_attrs.py::TestRequestAttributes::test_uri_http_1_0", "tests/test_request_attrs.py::TestRequestAttributes::test_relative_uri", "tests/test_request_attrs.py::TestRequestAttributes::test_client_accepts", "tests/test_request_attrs.py::TestRequestAttributes::test_client_accepts_bogus", "tests/test_request_attrs.py::TestRequestAttributes::test_client_accepts_props", "tests/test_request_attrs.py::TestRequestAttributes::test_client_prefers", "tests/test_request_attrs.py::TestRequestAttributes::test_range", "tests/test_request_attrs.py::TestRequestAttributes::test_range_unit", "tests/test_request_attrs.py::TestRequestAttributes::test_range_invalid", "tests/test_request_attrs.py::TestRequestAttributes::test_missing_attribute_header", "tests/test_request_attrs.py::TestRequestAttributes::test_content_length", "tests/test_request_attrs.py::TestRequestAttributes::test_bogus_content_length_nan", "tests/test_request_attrs.py::TestRequestAttributes::test_bogus_content_length_neg", "tests/test_request_attrs.py::TestRequestAttributes::test_date[Date-date]", "tests/test_request_attrs.py::TestRequestAttributes::test_date[If-Modified-Since-if_modified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_date[If-Unmodified-Since-if_unmodified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_invalid[Date-date]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_invalid[If-Modified-Since-if_modified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_invalid[If-Unmodified-Since-if_unmodified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_missing[date]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_missing[if_modified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_date_missing[if_unmodified_since]", "tests/test_request_attrs.py::TestRequestAttributes::test_attribute_headers", "tests/test_request_attrs.py::TestRequestAttributes::test_method", "tests/test_request_attrs.py::TestRequestAttributes::test_empty_path", "tests/test_request_attrs.py::TestRequestAttributes::test_content_type_method", "tests/test_request_attrs.py::TestRequestAttributes::test_content_length_method", "tests/test_request_attrs.py::TestRequestAttributes::test_port_explicit[HTTP/1.0]", "tests/test_request_attrs.py::TestRequestAttributes::test_port_explicit[HTTP/1.1]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_https[HTTP/1.0]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_https[HTTP/1.1]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_http[HTTP/1.0-True]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_http[HTTP/1.0-False]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_http[HTTP/1.1-True]", "tests/test_request_attrs.py::TestRequestAttributes::test_scheme_http[HTTP/1.1-False]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_default_port[HTTP/1.0]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_default_port[HTTP/1.1]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_nondefault_port[HTTP/1.0]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_nondefault_port[HTTP/1.1]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_from_env[HTTP/1.0]", "tests/test_request_attrs.py::TestRequestAttributes::test_netloc_from_env[HTTP/1.1]", "tests/test_request_attrs.py::TestRequestAttributes::test_app_present", "tests/test_request_attrs.py::TestRequestAttributes::test_app_blank", "tests/test_request_attrs.py::TestRequestAttributes::test_app_missing", "tests/test_request_body.py::TestRequestBody::test_empty_body", "tests/test_request_body.py::TestRequestBody::test_tiny_body", "tests/test_request_body.py::TestRequestBody::test_tiny_body_overflow", "tests/test_request_body.py::TestRequestBody::test_read_body", "tests/test_request_body.py::TestRequestBody::test_bounded_stream_property_empty_body", "tests/test_request_body.py::TestRequestBody::test_body_stream_wrapper", "tests/test_request_body.py::TestRequestBody::test_request_repr", "tests/test_request_context.py::TestRequestContext::test_default_request_context", "tests/test_request_context.py::TestRequestContext::test_custom_request_context", "tests/test_request_context.py::TestRequestContext::test_custom_request_context_failure", "tests/test_request_context.py::TestRequestContext::test_custom_request_context_request_access", "tests/test_request_forwarded.py::test_no_forwarded_headers", "tests/test_request_forwarded.py::test_x_forwarded_host", "tests/test_request_forwarded.py::test_x_forwarded_proto", "tests/test_request_forwarded.py::test_forwarded_host", "tests/test_request_forwarded.py::test_forwarded_multiple_params", "tests/test_request_forwarded.py::test_forwarded_missing_first_hop_host", "tests/test_request_forwarded.py::test_forwarded_quote_escaping", "tests/test_request_forwarded.py::test_escape_malformed_requests[for=1.2.3.4;by=\"-None]", "tests/test_request_forwarded.py::test_escape_malformed_requests[for=1.2.3.4;by=4\\\\.3.2.1thing=blah-4]", "tests/test_request_forwarded.py::test_escape_malformed_requests[for=1.2.3.4;by=\"\\\\4.3.2.1\"thing=blah-4.3.2.1]", "tests/test_request_forwarded.py::test_escape_malformed_requests[for=1.2.3.4;by=\"4.3.2.\\\\1\"thing=\"blah\"-4.3.2.1]", "tests/test_request_forwarded.py::test_escape_malformed_requests[for=1.2.3.4;by=\"4.3.\\\\2\\\\.1\"", "tests/test_request_media.py::test_json[None]", "tests/test_request_media.py::test_json[*/*]", "tests/test_request_media.py::test_json[application/json]", "tests/test_request_media.py::test_json[application/json;", "tests/test_request_media.py::test_unknown_media_type[nope/json]", "tests/test_request_media.py::test_invalid_json", "tests/test_request_media.py::test_invalid_stream_fails_gracefully", "tests/test_request_media.py::test_use_cached_media", "tests/test_response.py::test_response_set_content_type_set", "tests/test_response.py::test_response_set_content_type_not_set", "tests/test_response_body.py::TestResponseBody::test_append_body", "tests/test_response_body.py::TestResponseBody::test_response_repr", "tests/test_response_context.py::TestRequestContext::test_default_response_context", "tests/test_response_context.py::TestRequestContext::test_custom_response_context", "tests/test_response_context.py::TestRequestContext::test_custom_response_context_failure", "tests/test_response_context.py::TestRequestContext::test_custom_response_context_factory", "tests/test_response_media.py::test_json[*/*]", "tests/test_response_media.py::test_json[application/json;", "tests/test_response_media.py::test_non_ascii_json_serialization[]", "tests/test_response_media.py::test_non_ascii_json_serialization[I", "tests/test_response_media.py::test_non_ascii_json_serialization[document2]", "tests/test_response_media.py::test_non_ascii_json_serialization[document3]", "tests/test_response_media.py::test_non_ascii_json_serialization[document4]", "tests/test_response_media.py::test_unknown_media_type", "tests/test_response_media.py::test_use_cached_media", "tests/test_response_media.py::test_default_media_type", "tests/test_response_media.py::test_mimeparse_edgecases", "tests/test_sinks.py::TestDefaultRouting::test_single_default_pattern", "tests/test_sinks.py::TestDefaultRouting::test_single_simple_pattern", "tests/test_sinks.py::TestDefaultRouting::test_single_compiled_pattern", "tests/test_sinks.py::TestDefaultRouting::test_named_groups", "tests/test_sinks.py::TestDefaultRouting::test_multiple_patterns", "tests/test_sinks.py::TestDefaultRouting::test_with_route", "tests/test_sinks.py::TestDefaultRouting::test_route_precedence", "tests/test_sinks.py::TestDefaultRouting::test_route_precedence_with_id", "tests/test_sinks.py::TestDefaultRouting::test_route_precedence_with_both_id", "tests/test_slots.py::TestSlots::test_slots_request", "tests/test_slots.py::TestSlots::test_slots_response", "tests/test_static.py::test_bad_path[/static]", "tests/test_static.py::test_bad_path[/static/]", "tests/test_static.py::test_bad_path[/static/.]", "tests/test_static.py::test_bad_path[/static/..]", "tests/test_static.py::test_bad_path[/static/../.]", "tests/test_static.py::test_bad_path[/static/.././etc/passwd]", "tests/test_static.py::test_bad_path[/static/../etc/passwd]", "tests/test_static.py::test_bad_path[/static/css/../../secret]", "tests/test_static.py::test_bad_path[/static/css/../../etc/passwd]", "tests/test_static.py::test_bad_path[/static/./../etc/passwd]", "tests/test_static.py::test_bad_path[/static/css/../.\\\\056/etc/passwd]", "tests/test_static.py::test_bad_path[/static/./\\\\056./etc/passwd]", "tests/test_static.py::test_bad_path[/static/\\\\056\\\\056/etc/passwd]", "tests/test_static.py::test_bad_path[/static//test.css]", "tests/test_static.py::test_bad_path[/static//COM10]", "tests/test_static.py::test_bad_path[/static/path//test.css]", "tests/test_static.py::test_bad_path[/static/path///test.css]", "tests/test_static.py::test_bad_path[/static/path////test.css]", "tests/test_static.py::test_bad_path[/static/path/foo//test.css]", "tests/test_static.py::test_bad_path[/static/.\\x00ssh/authorized_keys]", "tests/test_static.py::test_bad_path[/static/.\\x1fssh/authorized_keys]", "tests/test_static.py::test_bad_path[/static/.\\x80ssh/authorized_keys]", "tests/test_static.py::test_bad_path[/static/.\\x9fssh/authorized_keys]", "tests/test_static.py::test_bad_path[/static/~/.ssh/authorized_keys]", "tests/test_static.py::test_bad_path[/static/.ssh/authorized_key?]", "tests/test_static.py::test_bad_path[/static/.ssh/authorized_key>foo]", "tests/test_static.py::test_bad_path[/static/.ssh/authorized_key|foo]", "tests/test_static.py::test_bad_path[/static/.ssh/authorized_key<foo]", "tests/test_static.py::test_bad_path[/static/something:something]", "tests/test_static.py::test_bad_path[/static/thing*.sql]", "tests/test_static.py::test_bad_path[/static/'thing'.sql]", "tests/test_static.py::test_bad_path[/static/\"thing\".sql]", "tests/test_static.py::test_bad_path[/static/something.]", "tests/test_static.py::test_bad_path[/static/something..]", "tests/test_static.py::test_bad_path[/static/something", "tests/test_static.py::test_bad_path[/static/", "tests/test_static.py::test_bad_path[/static/something\\t]", "tests/test_static.py::test_bad_path[/static/\\tsomething]", "tests/test_static.py::test_bad_path[/static/ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttx]", "tests/test_static.py::test_invalid_args[static-/var/www/statics]", "tests/test_static.py::test_invalid_args[/static-./var/www/statics]", "tests/test_static.py::test_invalid_args[/static-statics]", "tests/test_static.py::test_invalid_args[/static-../statics]", "tests/test_static.py::test_good_path[/static/-/css/test.css-/css/test.css-text/css]", "tests/test_static.py::test_good_path[/static-/css/test.css-/css/test.css-text/css]", "tests/test_static.py::test_good_path[/static-/tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt-/tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt-application/octet-stream]", "tests/test_static.py::test_good_path[/static-/.test.css-/.test.css-text/css]", "tests/test_static.py::test_good_path[/some/download/-/report.pdf-/report.pdf-application/pdf]", "tests/test_static.py::test_good_path[/some/download/-/Fancy", "tests/test_static.py::test_good_path[/some/download-/report.zip-/report.zip-application/zip]", "tests/test_static.py::test_good_path[/some/download-/foo/../report.zip-/report.zip-application/zip]", "tests/test_static.py::test_good_path[/some/download-/foo/../bar/../report.zip-/report.zip-application/zip]", "tests/test_static.py::test_good_path[/some/download-/foo/bar/../../report.zip-/report.zip-application/zip]", "tests/test_static.py::test_lifo", "tests/test_static.py::test_lifo_negative", "tests/test_static.py::test_downloadable", "tests/test_static.py::test_downloadable_not_found", "tests/test_uri_converters.py::test_int_converter[123-None-None-None-123]", "tests/test_uri_converters.py::test_int_converter[01-None-None-None-1]", "tests/test_uri_converters.py::test_int_converter[001-None-None-None-1]", "tests/test_uri_converters.py::test_int_converter[0-None-None-None-0]", "tests/test_uri_converters.py::test_int_converter[00-None-None-None-0]", "tests/test_uri_converters.py::test_int_converter[1-1-None-None-1]", "tests/test_uri_converters.py::test_int_converter[12-1-None-None-None0]", "tests/test_uri_converters.py::test_int_converter[12-2-None-None-120]", "tests/test_uri_converters.py::test_int_converter[1-1-1-1-1]", "tests/test_uri_converters.py::test_int_converter[1-1-1-None-1]", "tests/test_uri_converters.py::test_int_converter[1-1-1-2-1]", "tests/test_uri_converters.py::test_int_converter[1-1-2-None-None]", "tests/test_uri_converters.py::test_int_converter[1-1-2-1-None]", "tests/test_uri_converters.py::test_int_converter[2-1-1-2-2]", "tests/test_uri_converters.py::test_int_converter[2-1-2-2-2]", "tests/test_uri_converters.py::test_int_converter[3-1-1-2-None]", "tests/test_uri_converters.py::test_int_converter[12-1-None-None-None1]", "tests/test_uri_converters.py::test_int_converter[12-1-1-12-None]", "tests/test_uri_converters.py::test_int_converter[12-2-None-None-121]", "tests/test_uri_converters.py::test_int_converter[12-2-1-12-12]", "tests/test_uri_converters.py::test_int_converter[12-2-12-12-12]", "tests/test_uri_converters.py::test_int_converter[12-2-13-12-None]", "tests/test_uri_converters.py::test_int_converter[12-2-13-13-None]", "tests/test_uri_converters.py::test_int_converter_malformed[0x0F]", "tests/test_uri_converters.py::test_int_converter_malformed[something]", "tests/test_uri_converters.py::test_int_converter_malformed[]", "tests/test_uri_converters.py::test_int_converter_malformed[", "tests/test_uri_converters.py::test_int_converter_malformed[123", "tests/test_uri_converters.py::test_int_converter_malformed[123\\t]", "tests/test_uri_converters.py::test_int_converter_malformed[123\\n]", "tests/test_uri_converters.py::test_int_converter_malformed[123\\r]", "tests/test_uri_converters.py::test_int_converter_malformed[123\\x0b]", "tests/test_uri_converters.py::test_int_converter_malformed[123\\x0c]", "tests/test_uri_converters.py::test_int_converter_malformed[\\t123]", "tests/test_uri_converters.py::test_int_converter_malformed[\\n123]", "tests/test_uri_converters.py::test_int_converter_malformed[\\r123]", "tests/test_uri_converters.py::test_int_converter_malformed[\\x0b123]", "tests/test_uri_converters.py::test_int_converter_malformed[\\x0c123]", "tests/test_uri_converters.py::test_int_converter_invalid_config[0]", "tests/test_uri_converters.py::test_int_converter_invalid_config[-1]", "tests/test_uri_converters.py::test_int_converter_invalid_config[-10]", "tests/test_uri_converters.py::test_datetime_converter[07-03-17-%m-%d-%y-expected0]", "tests/test_uri_converters.py::test_datetime_converter[07-03-17", "tests/test_uri_converters.py::test_datetime_converter[2017-07-03T14:30:01Z-%Y-%m-%dT%H:%M:%SZ-expected2]", "tests/test_uri_converters.py::test_datetime_converter[2017-07-03T14:30:01-%Y-%m-%dT%H:%M:%S-expected3]", "tests/test_uri_converters.py::test_datetime_converter[2017_19-%Y_%H-expected4]", "tests/test_uri_converters.py::test_datetime_converter[2017-07-03T14:30:01-%Y-%m-%dT%H:%M:%SZ-None]", "tests/test_uri_converters.py::test_datetime_converter[", "tests/test_uri_converters.py::test_datetime_converter[07", "tests/test_uri_converters.py::test_datetime_converter_default_format", "tests/test_uri_converters.py::test_uuid_converter[", "tests/test_uri_templates.py::test_root_path", "tests/test_uri_templates.py::test_no_vars", "tests/test_uri_templates.py::test_special_chars", "tests/test_uri_templates.py::test_single[id]", "tests/test_uri_templates.py::test_single[id123]", "tests/test_uri_templates.py::test_single[widget_id]", "tests/test_uri_templates.py::test_int_converter[/{id:int}]", "tests/test_uri_templates.py::test_int_converter[/{id:int(3)}]", "tests/test_uri_templates.py::test_int_converter[/{id:int(min=123)}]", "tests/test_uri_templates.py::test_int_converter[/{id:int(min=123,", "tests/test_uri_templates.py::test_int_converter_rejections[/{id:int(2)}]", "tests/test_uri_templates.py::test_int_converter_rejections[/{id:int(min=124)}]", "tests/test_uri_templates.py::test_int_converter_rejections[/{id:int(num_digits=3,", "tests/test_uri_templates.py::test_datetime_converter[/{start_year:int}-to-{timestamp:dt}-/1961-to-1969-07-21T02:56:00Z-dt_expected0]", "tests/test_uri_templates.py::test_datetime_converter[/{start_year:int}-to-{timestamp:dt(\"%Y-%m-%d\")}-/1961-to-1969-07-21-dt_expected1]", "tests/test_uri_templates.py::test_datetime_converter[/{start_year:int}/{timestamp:dt(\"%Y-%m-%d", "tests/test_uri_templates.py::test_datetime_converter[/{start_year:int}-to-{timestamp:dt(\"%Y-%m\")}-/1961-to-1969-07-21-None]", "tests/test_uri_templates.py::test_uuid_converter_complex_segment", "tests/test_uri_templates.py::test_converter_custom[/{food:spam}-/something-expected0]", "tests/test_uri_templates.py::test_converter_custom[/{food:spam(\")\")}:{food_too:spam(\"()\")}-/bacon:eggs-expected1]", "tests/test_uri_templates.py::test_converter_custom[/({food:spam()}){food_too:spam(\"()\")}-/(bacon)eggs-expected2]", "tests/test_uri_templates.py::test_single_trailing_slash", "tests/test_uri_templates.py::test_multiple", "tests/test_uri_templates.py::test_empty_path_component[//]", "tests/test_uri_templates.py::test_empty_path_component[//begin]", "tests/test_uri_templates.py::test_empty_path_component[/end//]", "tests/test_uri_templates.py::test_empty_path_component[/in//side]", "tests/test_uri_templates.py::test_relative_path[]", "tests/test_uri_templates.py::test_relative_path[no]", "tests/test_uri_templates.py::test_relative_path[no/leading_slash]", "tests/test_uri_templates.py::test_same_level_complex_var[True]", "tests/test_uri_templates.py::test_same_level_complex_var[False]", "tests/test_uri_templates.py::test_adding_suffix_routes", "tests/test_uri_templates.py::test_custom_error_on_suffix_route_not_found", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_string_type_required[42]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_string_type_required[API]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_must_start_with_slash[this]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_must_start_with_slash[this/that]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[//]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[a//]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[//b]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[a//b]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[a/b//]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_template_may_not_contain_double_slash[a/b//c]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_root", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_no_fields[/hello]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_no_fields[/hello/world]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_no_fields[/hi/there/how/are/you]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_one_field", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_one_field_with_digits", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_one_field_with_prefixed_digits", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_two_fields[]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_two_fields[/]", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_three_fields", "tests/test_uri_templates_legacy.py::TestUriTemplates::test_malformed_field", "tests/test_utils.py::TestFalconUtils::test_deprecated_decorator", "tests/test_utils.py::TestFalconUtils::test_http_now", "tests/test_utils.py::TestFalconUtils::test_dt_to_http", "tests/test_utils.py::TestFalconUtils::test_http_date_to_dt", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_none", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_one", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_several", "tests/test_utils.py::TestFalconUtils::test_uri_encode", "tests/test_utils.py::TestFalconUtils::test_uri_encode_double", "tests/test_utils.py::TestFalconUtils::test_uri_encode_value", "tests/test_utils.py::TestFalconUtils::test_uri_decode", "tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_models_stdlib_quote", "tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_value_models_stdlib_quote_safe_tilde", "tests/test_utils.py::TestFalconUtils::test_prop_uri_decode_models_stdlib_unquote_plus", "tests/test_utils.py::TestFalconUtils::test_unquote_string", "tests/test_utils.py::TestFalconUtils::test_parse_query_string", "tests/test_utils.py::TestFalconUtils::test_parse_host", "tests/test_utils.py::TestFalconUtils::test_get_http_status", "tests/test_utils.py::test_simulate_request_protocol[https-CONNECT]", "tests/test_utils.py::test_simulate_request_protocol[https-DELETE]", "tests/test_utils.py::test_simulate_request_protocol[https-GET]", "tests/test_utils.py::test_simulate_request_protocol[https-HEAD]", "tests/test_utils.py::test_simulate_request_protocol[https-OPTIONS]", "tests/test_utils.py::test_simulate_request_protocol[https-PATCH]", "tests/test_utils.py::test_simulate_request_protocol[https-POST]", "tests/test_utils.py::test_simulate_request_protocol[https-PUT]", "tests/test_utils.py::test_simulate_request_protocol[https-TRACE]", "tests/test_utils.py::test_simulate_request_protocol[http-CONNECT]", "tests/test_utils.py::test_simulate_request_protocol[http-DELETE]", "tests/test_utils.py::test_simulate_request_protocol[http-GET]", "tests/test_utils.py::test_simulate_request_protocol[http-HEAD]", "tests/test_utils.py::test_simulate_request_protocol[http-OPTIONS]", "tests/test_utils.py::test_simulate_request_protocol[http-PATCH]", "tests/test_utils.py::test_simulate_request_protocol[http-POST]", "tests/test_utils.py::test_simulate_request_protocol[http-PUT]", "tests/test_utils.py::test_simulate_request_protocol[http-TRACE]", "tests/test_utils.py::test_simulate_free_functions[simulate_get]", "tests/test_utils.py::test_simulate_free_functions[simulate_head]", "tests/test_utils.py::test_simulate_free_functions[simulate_post]", "tests/test_utils.py::test_simulate_free_functions[simulate_put]", "tests/test_utils.py::test_simulate_free_functions[simulate_options]", "tests/test_utils.py::test_simulate_free_functions[simulate_patch]", "tests/test_utils.py::test_simulate_free_functions[simulate_delete]", "tests/test_utils.py::TestFalconTestingUtils::test_path_escape_chars_in_create_environ", "tests/test_utils.py::TestFalconTestingUtils::test_no_prefix_allowed_for_query_strings_in_create_environ", "tests/test_utils.py::TestFalconTestingUtils::test_none_header_value_in_create_environ", "tests/test_utils.py::TestFalconTestingUtils::test_decode_empty_result", "tests/test_utils.py::TestFalconTestingUtils::test_httpnow_alias_for_backwards_compat", "tests/test_utils.py::TestFalconTestingUtils::test_default_headers", "tests/test_utils.py::TestFalconTestingUtils::test_default_headers_with_override", "tests/test_utils.py::TestFalconTestingUtils::test_status", "tests/test_utils.py::TestFalconTestingUtils::test_wsgi_iterable_not_closeable", "tests/test_utils.py::TestFalconTestingUtils::test_path_must_start_with_slash", "tests/test_utils.py::TestFalconTestingUtils::test_cached_text_in_result", "tests/test_utils.py::TestFalconTestingUtils::test_simple_resource_body_json_xor", "tests/test_utils.py::TestFalconTestingUtils::test_query_string", "tests/test_utils.py::TestFalconTestingUtils::test_query_string_no_question", "tests/test_utils.py::TestFalconTestingUtils::test_query_string_in_path", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[16.0625]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[123456789]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[True]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[I", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[document5]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[document6]", "tests/test_utils.py::TestFalconTestingUtils::test_simulate_json_body[document7]", "tests/test_utils.py::TestCaseFancyAPI::test_something", "tests/test_utils.py::TestNoApiClass::test_something", "tests/test_utils.py::TestSetupApi::test_something", "tests/test_wsgi.py::TestWSGIServer::test_get", "tests/test_wsgi.py::TestWSGIServer::test_put", "tests/test_wsgi.py::TestWSGIServer::test_head_405", "tests/test_wsgi.py::TestWSGIServer::test_post", "tests/test_wsgi.py::TestWSGIServer::test_post_invalid_content_length", "tests/test_wsgi.py::TestWSGIServer::test_post_read_bounded_stream", "tests/test_wsgi.py::TestWSGIServer::test_post_read_bounded_stream_no_body", "tests/test_wsgi_errors.py::TestWSGIError::test_responder_logged_bytestring", "tests/test_wsgi_interface.py::TestWSGIInterface::test_srmock", "tests/test_wsgi_interface.py::TestWSGIInterface::test_pep3333", "tests/test_wsgiref_inputwrapper_with_size.py::TestWsgiRefInputWrapper::test_resources_can_read_request_stream_during_tests" ]
[]
Apache License 2.0
2,244
[ "falcon/routing/static.py", "falcon/api.py" ]
[ "falcon/routing/static.py", "falcon/api.py" ]
smarkets__marge-bot-89
9986daf294673ad58a06c7ca19125bc20c144c96
2018-03-03 15:10:37
9986daf294673ad58a06c7ca19125bc20c144c96
diff --git a/marge/git.py b/marge/git.py index 29a7684..df8cfc5 100644 --- a/marge/git.py +++ b/marge/git.py @@ -107,9 +107,9 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): raise return self.get_commit_hash() - def remove_branch(self, branch): - assert branch != 'master' - self.git('checkout', 'master', '--') + def remove_branch(self, branch, *, new_current_branch='master'): + assert branch != new_current_branch + self.git('checkout', new_current_branch, '--') self.git('branch', '-D', branch) def push_force(self, branch, source_repo_url=None): diff --git a/marge/job.py b/marge/job.py index 613a649..c7196c9 100644 --- a/marge/job.py +++ b/marge/job.py @@ -363,8 +363,8 @@ def update_from_target_branch_and_push( # A failure to clean up probably means something is fucked with the git repo # and likely explains any previous failure, so it will better to just # raise a GitError - if source_branch != 'master': - repo.remove_branch(source_branch) + if source_branch != target_branch: + repo.remove_branch(source_branch, new_current_branch=target_branch) else: assert source_repo_url is not None
Don't assume a branch called `master` exists when removing the merged branch It does not always merge request should be merged into `master`. E.g., in some git workflows, MR should be merged into `develop` branch. I propose to add configuration parameter `--branch-name-merge-into <string name>` for such situations.
smarkets/marge-bot
diff --git a/tests/test_git.py b/tests/test_git.py index 9ceaa1a..2ebc79c 100644 --- a/tests/test_git.py +++ b/tests/test_git.py @@ -112,6 +112,13 @@ class TestRepo(object): assert get_calls(mocked_run) == [] def test_remove_branch(self, mocked_run): + self.repo.remove_branch('some_branch', new_current_branch='devel') + assert get_calls(mocked_run) == [ + 'git -C /tmp/local/path checkout devel --', + 'git -C /tmp/local/path branch -D some_branch', + ] + + def test_remove_branch_default(self, mocked_run): self.repo.remove_branch('some_branch') assert get_calls(mocked_run) == [ 'git -C /tmp/local/path checkout master --', @@ -120,7 +127,7 @@ class TestRepo(object): def test_remove_master_branch_fails(self, unused_mocked_run): with pytest.raises(AssertionError): - self.repo.remove_branch('master') + self.repo.remove_branch('meister', new_current_branch='meister') def test_push_force(self, mocked_run): mocked_run.return_value = mocked_stdout(b'')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==3.3.9 certifi==2025.1.31 charset-normalizer==3.4.1 ConfigArgParse==1.7 coverage==7.8.0 dateparser==1.2.1 dill==0.3.9 exceptiongroup==1.2.2 flake8==7.2.0 humanize==4.12.2 idna==3.10 iniconfig==2.1.0 isort==6.0.1 -e git+https://github.com/smarkets/marge-bot.git@9986daf294673ad58a06c7ca19125bc20c144c96#egg=marge maya==0.6.1 mccabe==0.7.0 packaging==24.2 pendulum==3.0.0 platformdirs==4.3.7 pluggy==1.5.0 pycodestyle==2.13.0 pyflakes==3.3.1 pylint==3.3.6 pytest==8.3.5 pytest-cov==6.0.0 pytest-flake8==1.3.0 pytest-pylint==0.21.0 pytest-runner==6.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 six==1.17.0 snaptime==0.2.4 time-machine==2.16.0 tomli==2.2.1 tomlkit==0.13.2 typing_extensions==4.13.0 tzdata==2025.2 tzlocal==5.3.1 urllib3==2.3.0
name: marge-bot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==3.3.9 - certifi==2025.1.31 - charset-normalizer==3.4.1 - configargparse==1.7 - coverage==7.8.0 - dateparser==1.2.1 - dill==0.3.9 - exceptiongroup==1.2.2 - flake8==7.2.0 - humanize==4.12.2 - idna==3.10 - iniconfig==2.1.0 - isort==6.0.1 - maya==0.6.1 - mccabe==0.7.0 - packaging==24.2 - pendulum==3.0.0 - platformdirs==4.3.7 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyflakes==3.3.1 - pylint==3.3.6 - pytest==8.3.5 - pytest-cov==6.0.0 - pytest-flake8==1.3.0 - pytest-pylint==0.21.0 - pytest-runner==6.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.2 - regex==2024.11.6 - requests==2.32.3 - six==1.17.0 - snaptime==0.2.4 - time-machine==2.16.0 - tomli==2.2.1 - tomlkit==0.13.2 - typing-extensions==4.13.0 - tzdata==2025.2 - tzlocal==5.3.1 - urllib3==2.3.0 prefix: /opt/conda/envs/marge-bot
[ "tests/test_git.py::TestRepo::test_remove_branch", "tests/test_git.py::TestRepo::test_remove_master_branch_fails" ]
[ "tests/test_git.py::PYLINT" ]
[ "tests/test_git.py::flake-8::FLAKE8", "tests/test_git.py::TestRepo::test_clone", "tests/test_git.py::TestRepo::test_config_user_info", "tests/test_git.py::TestRepo::test_rebase_success", "tests/test_git.py::TestRepo::test_merge_success", "tests/test_git.py::TestRepo::test_reviewer_tagging_success", "tests/test_git.py::TestRepo::test_reviewer_tagging_failure", "tests/test_git.py::TestRepo::test_rebase_same_branch", "tests/test_git.py::TestRepo::test_merge_same_branch", "tests/test_git.py::TestRepo::test_remove_branch_default", "tests/test_git.py::TestRepo::test_push_force", "tests/test_git.py::TestRepo::test_push_force_fails_on_dirty", "tests/test_git.py::TestRepo::test_push_force_fails_on_untracked", "tests/test_git.py::TestRepo::test_get_commit_hash", "tests/test_git.py::TestRepo::test_passes_ssh_key", "tests/test_git.py::test_filter", "tests/test_git.py::test_filter_fails_on_empty_commit_messages", "tests/test_git.py::test_filter_fails_on_commit_messages_that_are_empty_apart_from_trailers", "tests/test_git.py::test_filter_ignore_first_line_trailer_in_commit_message_if_not_set" ]
[]
BSD 3-Clause "New" or "Revised" License
2,245
[ "marge/git.py", "marge/job.py" ]
[ "marge/git.py", "marge/job.py" ]
pydicom__pydicom-588
da6f7917ca2a32a6886e023a71b0b095f5bc06c8
2018-03-03 19:44:44
fcc63f0b96fb370b0eb60b2c765b469ce62e597c
diff --git a/pydicom/dataset.py b/pydicom/dataset.py index 1052b8587..8d12864bb 100644 --- a/pydicom/dataset.py +++ b/pydicom/dataset.py @@ -28,8 +28,8 @@ from pydicom.datadict import (tag_for_keyword, keyword_for_tag, repeater_has_keyword) from pydicom.tag import Tag, BaseTag, tag_in_exception from pydicom.dataelem import DataElement, DataElement_from_raw, RawDataElement -from pydicom.uid import (UncompressedPixelTransferSyntaxes, - ExplicitVRLittleEndian) +from pydicom.uid import (UncompressedPixelTransferSyntaxes, + ExplicitVRLittleEndian) import pydicom # for dcmwrite import pydicom.charset from pydicom.config import logger @@ -400,14 +400,13 @@ class Dataset(dict): return True if isinstance(other, self.__class__): - # Compare Elements using values() and class variables using - # __dict__ + # Compare Elements using values() # Convert values() to a list for compatibility between # python 2 and 3 # Sort values() by element tag self_elem = sorted(list(self.values()), key=lambda x: x.tag) other_elem = sorted(list(other.values()), key=lambda x: x.tag) - return self_elem == other_elem and self.__dict__ == other.__dict__ + return self_elem == other_elem return NotImplemented @@ -697,9 +696,9 @@ class Dataset(dict): Returns ------- - None + None Converted pixel data is stored internally in the dataset. - + If a compressed image format, the image is decompressed, and any related data elements are changed accordingly. """ @@ -720,7 +719,9 @@ class Dataset(dict): pixel_array = x.get_pixeldata(self) self._pixel_array = self._reshape_pixel_array(pixel_array) if x.needs_to_convert_to_RGB(self): - self._pixel_array = self._convert_YBR_to_RGB(self._pixel_array) + self._pixel_array = self._convert_YBR_to_RGB( + self._pixel_array + ) successfully_read_pixel_data = True break except Exception as e: @@ -743,31 +744,31 @@ class Dataset(dict): raise NotImplementedError(msg) # is this guaranteed to work if memory is re-used?? self._pixel_id = id(self.PixelData) - + def decompress(self): """Decompresses pixel data and modifies the Dataset in-place - If not a compressed tranfer syntax, then pixel data is converted - to a numpy array internally, but not returned. - - If compressed pixel data, then is decompressed using an image handler, - and internal state is updated appropriately: - - TransferSyntax is updated to non-compressed form - - is_undefined_length for pixel data is set False + If not a compressed tranfer syntax, then pixel data is converted + to a numpy array internally, but not returned. + + If compressed pixel data, then is decompressed using an image handler, + and internal state is updated appropriately: + - TransferSyntax is updated to non-compressed form + - is_undefined_length for pixel data is set False Returns ------- None - Raises + Raises ------ NotImplementedError If the pixel data was originally compressed but file is not - ExplicitVR LittleEndian as required by Dicom standard - """ + ExplicitVR LittleEndian as required by Dicom standard + """ self.convert_pixel_data() self.is_decompressed = True - # May have been undefined length pixel data, but won't be now + # May have been undefined length pixel data, but won't be now if 'PixelData' in self: self[0x7fe00010].is_undefined_length = False @@ -780,14 +781,13 @@ class Dataset(dict): # Check that current file as read does match expected if not self.is_little_endian or self.is_implicit_VR: msg = ("Current dataset does not match expected ExplicitVR " - "LittleEndian transfer syntax from a compressed " + "LittleEndian transfer syntax from a compressed " "transfer syntax") raise NotImplementedError(msg) - + # All is as expected, updated the Transfer Syntax self.file_meta.TransferSyntaxUID = ExplicitVRLittleEndian - @property def pixel_array(self): """Return the Pixel Data as a NumPy array. @@ -1233,3 +1233,29 @@ class FileDataset(Dataset): if self.filename and os.path.exists(self.filename): statinfo = os.stat(self.filename) self.timestamp = statinfo.st_mtime + + def __eq__(self, other): + """Compare `self` and `other` for equality. + + Returns + ------- + bool + The result if `self` and `other` are the same class + NotImplemented + If `other` is not the same class as `self` then returning + NotImplemented delegates the result to superclass.__eq__(subclass) + """ + # When comparing against self this will be faster + if other is self: + return True + + if isinstance(other, self.__class__): + # Compare Elements using values() and class members using __dict__ + # Convert values() to a list for compatibility between + # python 2 and 3 + # Sort values() by element tag + self_elem = sorted(list(self.values()), key=lambda x: x.tag) + other_elem = sorted(list(other.values()), key=lambda x: x.tag) + return self_elem == other_elem and self.__dict__ == other.__dict__ + + return NotImplemented diff --git a/pydicom/filewriter.py b/pydicom/filewriter.py index ceb809915..01d9a4911 100644 --- a/pydicom/filewriter.py +++ b/pydicom/filewriter.py @@ -226,7 +226,10 @@ def write_PN(fp, data_element, padding=b' ', encoding=None): val = data_element.value if isinstance(val[0], compat.text_type) or not in_py2: - val = [elem.encode(encoding) for elem in val] + try: + val = [elem.encode(encoding) for elem in val] + except TypeError: + val = [elem.encode(encoding[0]) for elem in val] val = b'\\'.join(val)
No encoding with write_PN raises TypeError #### Description `filewriter.write_PN()` with `encoding=None` (default) raises `TypeError` #### Steps/Code to Reproduce ```python >>> from pydicom.filebase import DicomBytesIO >>> from pydicom.dataelem import DataElement >>> from pydicom.filewriter import write_PN >>> fp = DicomBytesIO() >>> fp.is_little_endian = True >>> elem = DataElement(0x00100010, 'PN', u'\u03b8') >>> write_PN(fp, elem) ``` ``` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "../pydicom/pydicom/filewriter.py", line 228, in write_PN val = [elem.encode(encoding) for elem in val] TypeError: encode() argument 1 must be string, not list ``` Occurs because when `encoding=None`, `encoding = [default] * 3` is used. I'm not sure what the impact is in practise since `write_PN` only seems to get called internally with encoding not set to None. #### Versions Python 2.7
pydicom/pydicom
diff --git a/pydicom/tests/test_dataset.py b/pydicom/tests/test_dataset.py index fee7082bc..822b04e79 100644 --- a/pydicom/tests/test_dataset.py +++ b/pydicom/tests/test_dataset.py @@ -403,32 +403,35 @@ class DatasetTests(unittest.TestCase): def testEqualityNoSequence(self): """Dataset: equality returns correct value with simple dataset""" + # Test empty dataset + assert Dataset() == Dataset() + d = Dataset() d.SOPInstanceUID = '1.2.3.4' d.PatientName = 'Test' - self.assertTrue(d == d) + assert d == d e = Dataset() e.PatientName = 'Test' e.SOPInstanceUID = '1.2.3.4' - self.assertTrue(d == e) + assert d == e e.SOPInstanceUID = '1.2.3.5' - self.assertFalse(d == e) + assert not d == e # Check VR del e.SOPInstanceUID e.add(DataElement(0x00080018, 'PN', '1.2.3.4')) - self.assertFalse(d == e) + assert not d == e # Check Tag del e.SOPInstanceUID e.StudyInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e # Check missing Element in self e.SOPInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e # Check missing Element in other d = Dataset() @@ -437,7 +440,7 @@ class DatasetTests(unittest.TestCase): e = Dataset() e.SOPInstanceUID = '1.2.3.4' - self.assertFalse(d == e) + assert not d == e def testEqualityPrivate(self): """Dataset: equality returns correct value""" @@ -500,16 +503,14 @@ class DatasetTests(unittest.TestCase): def testEqualityUnknown(self): """Dataset: equality returns correct value with extra members """ + # Non-element class members are ignored in equality testing d = Dataset() d.SOPEustaceUID = '1.2.3.4' - self.assertTrue(d == d) + assert d == d e = Dataset() - e.SOPEustaceUID = '1.2.3.4' - self.assertTrue(d == e) - e.SOPEustaceUID = '1.2.3.5' - self.assertFalse(d == e) + assert d == e def testEqualityInheritance(self): """Dataset: equality returns correct value for subclass """ @@ -529,6 +530,19 @@ class DatasetTests(unittest.TestCase): self.assertFalse(d == e) self.assertFalse(e == d) + def test_equality_elements(self): + """Test that Dataset equality only checks DataElements.""" + d = Dataset() + d.SOPInstanceUID = '1.2.3.4' + d.PatientName = 'Test' + d.foo = 'foo' + assert d == d + + e = Dataset() + e.PatientName = 'Test' + e.SOPInstanceUID = '1.2.3.4' + assert d == e + def test_inequality(self): """Test inequality operator""" d = Dataset() diff --git a/pydicom/tests/test_filewriter.py b/pydicom/tests/test_filewriter.py index 9ed2347ea..6e829300b 100644 --- a/pydicom/tests/test_filewriter.py +++ b/pydicom/tests/test_filewriter.py @@ -1826,7 +1826,6 @@ class TestWriteNumbers(object): class TestWritePN(object): """Test filewriter.write_PN""" - @pytest.mark.skip("Raises exception due to issue #489") def test_no_encoding_unicode(self): """If PN element has no encoding info, default is used""" fp = DicomBytesIO() @@ -1979,12 +1978,11 @@ class TestWriteNumbers(object): class TestWritePN(object): """Test filewriter.write_PN""" - @pytest.mark.skip("Raises exception due to issue #489") def test_no_encoding_unicode(self): """If PN element as no encoding info, default is used""" fp = DicomBytesIO() fp.is_little_endian = True - elem = DataElement(0x00100010, 'PN', u'\u03b8') + elem = DataElement(0x00100010, 'PN', u'\u00e8') write_PN(fp, elem) def test_no_encoding(self):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 -e git+https://github.com/pydicom/pydicom.git@da6f7917ca2a32a6886e023a71b0b095f5bc06c8#egg=pydicom pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: pydicom channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/pydicom
[ "pydicom/tests/test_dataset.py::DatasetTests::testEqualityUnknown", "pydicom/tests/test_dataset.py::DatasetTests::test_equality_elements" ]
[ "pydicom/tests/test_dataset.py::DatasetTests::test_get_item" ]
[ "pydicom/tests/test_dataset.py::DatasetTests::testAttributeErrorInProperty", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomAttr", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomAttrWeDontHave", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteDicomCommandGroupLength", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteItemLong", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteItemTuple", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteNonExistingItem", "pydicom/tests/test_dataset.py::DatasetTests::testDeleteOtherAttr", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityInheritance", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityNoSequence", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityNotDataset", "pydicom/tests/test_dataset.py::DatasetTests::testEqualityPrivate", "pydicom/tests/test_dataset.py::DatasetTests::testEqualitySequence", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault1", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault2", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault3", "pydicom/tests/test_dataset.py::DatasetTests::testGetDefault4", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists1", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists2", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists3", "pydicom/tests/test_dataset.py::DatasetTests::testGetExists4", "pydicom/tests/test_dataset.py::DatasetTests::testGetFromRaw", "pydicom/tests/test_dataset.py::DatasetTests::testHash", "pydicom/tests/test_dataset.py::DatasetTests::testMembership", "pydicom/tests/test_dataset.py::DatasetTests::testSetExistingDataElementByName", "pydicom/tests/test_dataset.py::DatasetTests::testSetNewDataElementByName", "pydicom/tests/test_dataset.py::DatasetTests::testSetNonDicom", "pydicom/tests/test_dataset.py::DatasetTests::testTagExceptionPrint", "pydicom/tests/test_dataset.py::DatasetTests::testTagExceptionWalk", "pydicom/tests/test_dataset.py::DatasetTests::testUpdate", "pydicom/tests/test_dataset.py::DatasetTests::test_NamedMemberUpdated", "pydicom/tests/test_dataset.py::DatasetTests::test__setitem__", "pydicom/tests/test_dataset.py::DatasetTests::test_add_repeater_elem_by_keyword", "pydicom/tests/test_dataset.py::DatasetTests::test_attribute_error_in_property_correct_debug", "pydicom/tests/test_dataset.py::DatasetTests::test_contains", "pydicom/tests/test_dataset.py::DatasetTests::test_data_element", "pydicom/tests/test_dataset.py::DatasetTests::test_delitem_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_dir", "pydicom/tests/test_dataset.py::DatasetTests::test_dir_filter", "pydicom/tests/test_dataset.py::DatasetTests::test_dir_subclass", "pydicom/tests/test_dataset.py::DatasetTests::test_empty_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_exit_exception", "pydicom/tests/test_dataset.py::DatasetTests::test_formatted_lines", "pydicom/tests/test_dataset.py::DatasetTests::test_get_pixel_array_already_have", "pydicom/tests/test_dataset.py::DatasetTests::test_get_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_getitem_slice", "pydicom/tests/test_dataset.py::DatasetTests::test_getitem_slice_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_group_dataset", "pydicom/tests/test_dataset.py::DatasetTests::test_inequality", "pydicom/tests/test_dataset.py::DatasetTests::test_is_uncompressed_transfer_syntax", "pydicom/tests/test_dataset.py::DatasetTests::test_iterall", "pydicom/tests/test_dataset.py::DatasetTests::test_matching_tags", "pydicom/tests/test_dataset.py::DatasetTests::test_property", "pydicom/tests/test_dataset.py::DatasetTests::test_remove_private_tags", "pydicom/tests/test_dataset.py::DatasetTests::test_reshape_pixel_array_not_implemented", "pydicom/tests/test_dataset.py::DatasetTests::test_save_as", "pydicom/tests/test_dataset.py::DatasetTests::test_set_convert_private_elem_from_raw", "pydicom/tests/test_dataset.py::DatasetTests::test_setitem_slice_raises", "pydicom/tests/test_dataset.py::DatasetTests::test_top", "pydicom/tests/test_dataset.py::DatasetTests::test_trait_names", "pydicom/tests/test_dataset.py::DatasetTests::test_walk", "pydicom/tests/test_dataset.py::DatasetTests::test_with", "pydicom/tests/test_dataset.py::DatasetElementsTests::testSequenceAssignment", "pydicom/tests/test_dataset.py::FileDatasetTests::test_creation_with_container", "pydicom/tests/test_dataset.py::FileDatasetTests::test_equality_file_meta", "pydicom/tests/test_filewriter.py::WriteFileTests::testCT", "pydicom/tests/test_filewriter.py::WriteFileTests::testJPEG2000", "pydicom/tests/test_filewriter.py::WriteFileTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::WriteFileTests::testMR", "pydicom/tests/test_filewriter.py::WriteFileTests::testMultiPN", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTDose", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTPlan", "pydicom/tests/test_filewriter.py::WriteFileTests::testUnicode", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::WriteFileTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testCT", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testJPEG2000", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMR", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMultiPN", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTDose", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTPlan", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testUnicode", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_multivalue_DA", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_empty_AT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UN_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_empty_LO", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_unknown_vr_raises", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_lut_descriptor", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_overlay", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_data", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_one", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_three", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_sequence", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_waveform_bits_allocated", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_not_ambiguous", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_pixel_data_not_ow_or_ob", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_big_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_little_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_raises", "pydicom/tests/test_filewriter.py::ScratchWriteTests::testImpl_LE_deflen_write", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_none_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_bad_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix_none", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_ds_changed", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_raises", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_media_storage_sop_class_uid_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_write_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_raise_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_add_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_standard", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_commandset_no_written", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_version", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_version_name_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_class_uid_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_ds_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_file_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_read_write_identical", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_empty_value", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_list", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_singleton", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_exception", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_big_endian", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding_unicode", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding", "pydicom/tests/test_filewriter.py::TestWriteDT::test_format_dt", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_incorrect_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_incorrect_data" ]
[]
MIT License
2,246
[ "pydicom/dataset.py", "pydicom/filewriter.py" ]
[ "pydicom/dataset.py", "pydicom/filewriter.py" ]
pre-commit__pre-commit-718
ac3a37d1a0e3575bddf23fd9babf6e56202b2988
2018-03-03 23:24:53
ac3a37d1a0e3575bddf23fd9babf6e56202b2988
diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py index 83b97cb..9191222 100644 --- a/pre_commit/commands/install_uninstall.py +++ b/pre_commit/commands/install_uninstall.py @@ -2,15 +2,19 @@ from __future__ import print_function from __future__ import unicode_literals import io +import logging import os.path import sys from pre_commit import output +from pre_commit.util import cmd_output from pre_commit.util import make_executable from pre_commit.util import mkdirp from pre_commit.util import resource_filename +logger = logging.getLogger(__name__) + # This is used to identify the hook file we install PRIOR_HASHES = ( '4d9958c90bc262f47553e2c073f14cfe', @@ -36,6 +40,13 @@ def install( skip_on_missing_conf=False, ): """Install the pre-commit hooks.""" + if cmd_output('git', 'config', 'core.hooksPath', retcode=None)[1].strip(): + logger.error( + 'Cowardly refusing to install hooks with `core.hooksPath` set.\n' + 'hint: `git config --unset-all core.hooksPath`', + ) + return 1 + hook_path = runner.get_hook_path(hook_type) legacy_path = hook_path + '.legacy'
Handle when `core.hooksPath` is set? As we found in https://github.com/pre-commit/pre-commit-hooks/issues/250, pre-commit (despite being installed) will be silently skipped if `code.hooksPath` is set. A few options: - during `pre-commit install`, check this variable and warn - "" but error - install into the directory at `core.hooksPath` (but it may be outside the working dir? probably not the best idea to write to it)
pre-commit/pre-commit
diff --git a/tests/commands/install_uninstall_test.py b/tests/commands/install_uninstall_test.py index a49a3e4..f83708e 100644 --- a/tests/commands/install_uninstall_test.py +++ b/tests/commands/install_uninstall_test.py @@ -66,6 +66,14 @@ def test_install_hooks_directory_not_present(tempdir_factory): assert os.path.exists(runner.pre_commit_path) +def test_install_refuses_core_hookspath(tempdir_factory): + path = git_dir(tempdir_factory) + with cwd(path): + cmd_output('git', 'config', '--local', 'core.hooksPath', 'hooks') + runner = Runner(path, C.CONFIG_FILE) + assert install(runner) + + @xfailif_no_symlink def test_install_hooks_dead_symlink( tempdir_factory,
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aspy.yaml==1.3.0 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 cfgv==3.3.1 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 identify==2.4.4 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 mock==5.2.0 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 -e git+https://github.com/pre-commit/pre-commit.git@ac3a37d1a0e3575bddf23fd9babf6e56202b2988#egg=pre_commit py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-env==0.6.2 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: pre-commit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - aspy-yaml==1.3.0 - attrs==22.2.0 - cached-property==1.5.2 - cfgv==3.3.1 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - identify==2.4.4 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - mock==5.2.0 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-env==0.6.2 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/pre-commit
[ "tests/commands/install_uninstall_test.py::test_install_refuses_core_hookspath" ]
[ "tests/commands/install_uninstall_test.py::test_install_in_submodule_and_run", "tests/commands/install_uninstall_test.py::test_environment_not_sourced" ]
[ "tests/commands/install_uninstall_test.py::test_is_not_script", "tests/commands/install_uninstall_test.py::test_is_script", "tests/commands/install_uninstall_test.py::test_is_previous_pre_commit", "tests/commands/install_uninstall_test.py::test_install_pre_commit", "tests/commands/install_uninstall_test.py::test_install_hooks_directory_not_present", "tests/commands/install_uninstall_test.py::test_install_hooks_dead_symlink", "tests/commands/install_uninstall_test.py::test_uninstall_does_not_blow_up_when_not_there", "tests/commands/install_uninstall_test.py::test_uninstall", "tests/commands/install_uninstall_test.py::test_install_pre_commit_and_run", "tests/commands/install_uninstall_test.py::test_install_pre_commit_and_run_custom_path", "tests/commands/install_uninstall_test.py::test_commit_am", "tests/commands/install_uninstall_test.py::test_unicode_merge_commit_message", "tests/commands/install_uninstall_test.py::test_install_idempotent", "tests/commands/install_uninstall_test.py::test_failing_hooks_returns_nonzero", "tests/commands/install_uninstall_test.py::test_install_existing_hooks_no_overwrite", "tests/commands/install_uninstall_test.py::test_install_existing_hook_no_overwrite_idempotent", "tests/commands/install_uninstall_test.py::test_failing_existing_hook_returns_1", "tests/commands/install_uninstall_test.py::test_install_overwrite_no_existing_hooks", "tests/commands/install_uninstall_test.py::test_install_overwrite", "tests/commands/install_uninstall_test.py::test_uninstall_restores_legacy_hooks", "tests/commands/install_uninstall_test.py::test_replace_old_commit_script", "tests/commands/install_uninstall_test.py::test_uninstall_doesnt_remove_not_our_hooks", "tests/commands/install_uninstall_test.py::test_installs_hooks_with_hooks_True", "tests/commands/install_uninstall_test.py::test_install_hooks_command", "tests/commands/install_uninstall_test.py::test_installed_from_venv", "tests/commands/install_uninstall_test.py::test_pre_push_integration_failing", "tests/commands/install_uninstall_test.py::test_pre_push_integration_accepted", "tests/commands/install_uninstall_test.py::test_pre_push_new_upstream", "tests/commands/install_uninstall_test.py::test_pre_push_integration_empty_push", "tests/commands/install_uninstall_test.py::test_pre_push_legacy", "tests/commands/install_uninstall_test.py::test_commit_msg_integration_failing", "tests/commands/install_uninstall_test.py::test_commit_msg_integration_passing", "tests/commands/install_uninstall_test.py::test_commit_msg_legacy", "tests/commands/install_uninstall_test.py::test_install_disallow_mising_config", "tests/commands/install_uninstall_test.py::test_install_allow_mising_config", "tests/commands/install_uninstall_test.py::test_install_temporarily_allow_mising_config" ]
[]
MIT License
2,247
[ "pre_commit/commands/install_uninstall.py" ]
[ "pre_commit/commands/install_uninstall.py" ]
wright-group__WrightTools-524
592649ce55c9fa7847325c9e9b15b320a38f1389
2018-03-04 03:38:05
592649ce55c9fa7847325c9e9b15b320a38f1389
pep8speaks: Hello @untzag! Thanks for submitting the PR. - In the file [`WrightTools/artists/_base.py`](https://github.com/wright-group/WrightTools/blob/46fb51978262bd10092c6c65be63276bed5a8944/WrightTools/artists/_base.py), following are the PEP8 issues : > [Line 69:27](https://github.com/wright-group/WrightTools/blob/46fb51978262bd10092c6c65be63276bed5a8944/WrightTools/artists/_base.py#L69): [E225](https://duckduckgo.com/?q=pep8%20E225) missing whitespace around operator > [Line 560:1](https://github.com/wright-group/WrightTools/blob/46fb51978262bd10092c6c65be63276bed5a8944/WrightTools/artists/_base.py#L560): [E402](https://duckduckgo.com/?q=pep8%20E402) module level import not at top of file
diff --git a/WrightTools/_dataset.py b/WrightTools/_dataset.py index d0e0059..52efb9e 100644 --- a/WrightTools/_dataset.py +++ b/WrightTools/_dataset.py @@ -117,6 +117,14 @@ class Dataset(h5py.Dataset): out += ' {0}'.format(self.shape) return out + @property + def full(self): + arr = self[:] + for i in range(arr.ndim): + if arr.shape[i] == 1: + arr = np.repeat(arr, self.parent.shape[i], axis=i) + return arr + @property def fullpath(self): """Full path: file and internal structure.""" @@ -142,6 +150,11 @@ class Dataset(h5py.Dataset): """Parent.""" return self._parent + @property + def points(self): + """Squeezed array.""" + return np.squeeze(self[:]) + @property def units(self): """Units.""" diff --git a/WrightTools/artists/_base.py b/WrightTools/artists/_base.py index 75623b1..774c6ad 100644 --- a/WrightTools/artists/_base.py +++ b/WrightTools/artists/_base.py @@ -64,9 +64,9 @@ class Axes(matplotlib.axes.Axes): if autolabel in ['xy', 'both', 'x'] and not xlabel: xlabel = data.axes[0].label if autolabel in ['xy', 'both', 'y'] and not ylabel: - if data.dimensionality == 1: + if data.ndim == 1: ylabel = data.channels[channel_index].label - elif data.dimensionality == 2: + elif data.ndim == 2: ylabel = data.axes[1].label # apply if xlabel: @@ -175,8 +175,8 @@ class Axes(matplotlib.axes.Axes): # unpack data object, if given if isinstance(args[0], Data): data = args.pop(0) - if not data.dimensionality == 2: - raise wt_exceptions.DimensionalityError(2, data.dimensionality) + if not data.ndim == 2: + raise wt_exceptions.DimensionalityError(2, data.ndim) # arrays channel_index = wt_kit.get_index(data.channel_names, channel) signed = data.channels[channel_index].signed @@ -245,8 +245,8 @@ class Axes(matplotlib.axes.Axes): # unpack data object, if given if isinstance(args[0], Data): data = args.pop(0) - if not data.dimensionality == 2: - raise wt_exceptions.DimensionalityError(2, data.dimensionality) + if not data.ndim == 2: + raise wt_exceptions.DimensionalityError(2, data.ndim) # arrays channel_index = wt_kit.get_index(data.channel_names, channel) xi = data.axes[0].full @@ -359,8 +359,8 @@ class Axes(matplotlib.axes.Axes): # unpack data object, if given if isinstance(args[0], Data): data = args.pop(0) - if not data.dimensionality == 2: - raise wt_exceptions.DimensionalityError(2, data.dimensionality) + if not data.ndim == 2: + raise wt_exceptions.DimensionalityError(2, data.ndim) # arrays channel_index = wt_kit.get_index(data.channel_names, channel) xi = data.axes[0].full @@ -426,8 +426,8 @@ class Axes(matplotlib.axes.Axes): if hasattr(args[0], 'id'): # TODO: replace once class comparison works... data = args.pop(0) channel = kwargs.pop('channel', 0) - if not data.dimensionality == 1: - raise wt_exceptions.DimensionalityError(1, data.dimensionality) + if not data.ndim == 1: + raise wt_exceptions.DimensionalityError(1, data.ndim) # arrays channel_index = wt_kit.get_index(data.channel_names, channel) xi = data.axes[0][:] diff --git a/WrightTools/artists/_helpers.py b/WrightTools/artists/_helpers.py index 92fcbba..190371f 100644 --- a/WrightTools/artists/_helpers.py +++ b/WrightTools/artists/_helpers.py @@ -464,7 +464,7 @@ def pcolor_helper(xi, yi, zi): xi.shape = (xi.size, 1) if yi.ndim == 1: yi.shape = (1, yi.size) - shape = wt_kit.joint_shape([xi, yi]) + shape = wt_kit.joint_shape(xi, yi) # full def full(arr): diff --git a/WrightTools/data/_axis.py b/WrightTools/data/_axis.py index 3a6cc10..2553e96 100644 --- a/WrightTools/data/_axis.py +++ b/WrightTools/data/_axis.py @@ -126,7 +126,7 @@ class Axis(object): @property def shape(self): """Shape.""" - return wt_kit.joint_shape(self.variables) + return wt_kit.joint_shape(*self.variables) @property def size(self): @@ -146,8 +146,8 @@ class Axis(object): except (AssertionError, AttributeError): pattern = '|'.join(map(re.escape, operators)) keys = re.split(pattern, self.expression) - self._variables = [self.parent.variables[self.parent.variable_names.index(key)] - for key in keys] + indices = [self.parent.variable_names.index(key) for key in keys] + self._variables = [self.parent.variables[i] for i in indices] finally: return self._variables diff --git a/WrightTools/data/_data.py b/WrightTools/data/_data.py index 7ca0912..962afbf 100644 --- a/WrightTools/data/_data.py +++ b/WrightTools/data/_data.py @@ -57,8 +57,6 @@ class Data(Group): self._on_axes_updated() # the following are populated if not already recorded self.channel_names - self.constant_names - self.kind self.source self.variable_names @@ -97,28 +95,11 @@ class Data(Group): """Channels.""" return tuple(self[n] for n in self.channel_names) - @property - def constant_names(self): - """Constant names.""" - if 'constant_names' not in self.attrs.keys(): - self.attrs['constant_names'] = np.array([], dtype='S') - return tuple(s.decode() for s in self.attrs['constant_names']) - - @property - def constants(self): - """Constants.""" - return tuple(self[n] for n in self.constant_names) - @property def datasets(self): """Datasets.""" return tuple(v for _, v in self.items() if isinstance(v, h5py.Dataset)) - @property - def dimensionality(self): - """Get dimensionality of Data object.""" - return len(self._axes) - @property def info(self): """Retrieve info dictionary about a Data object.""" @@ -156,7 +137,7 @@ class Data(Group): try: assert self._shape is not None except (AssertionError, AttributeError): - self._shape = wt_kit.joint_shape(self.variables) + self._shape = wt_kit.joint_shape(*self.variables) finally: return self._shape @@ -334,7 +315,7 @@ class Data(Group): kept = args + list(at.keys()) kept_axes = [self._axes[self.axis_expressions.index(a)] for a in kept] removed_axes = [a for a in self._axes if a not in kept_axes] - removed_shape = wt_kit.joint_shape(removed_axes) + removed_shape = wt_kit.joint_shape(*removed_axes) if removed_shape == (): removed_shape = (1,) * self.ndim # iterate @@ -447,12 +428,12 @@ class Data(Group): -------- Axis.convert Convert a single axis object to compatable units. Call on an - axis object in data.axes or data.constants. + axis object in data.axes. """ # get kind of units units_kind = wt_units.kind(destination_units) # apply to all compatible axes - for axis in self.axes + self.constants: + for axis in self.axes: if axis.units_kind == units_kind: axis.convert(destination_units, convert_variables=convert_variables) if verbose: @@ -700,82 +681,99 @@ class Data(Group): if verbose: print('channel {0} leveled along axis {1}'.format(channel.natural_name, axis)) - def map_axis(self, axis, points, input_units='same', edge_tolerance=0., verbose=True): + def map_variable(self, variable, points, input_units='same', *, name=None, parent=None, + verbose=True): """Map points of an axis to new points using linear interpolation. Out-of-bounds points are written nan. Parameters ---------- - axis : int or str - The axis to map onto. - points : 1D array-like or int + variable : string + The variable to map onto. + points : array-like or int If array, the new points. If int, new points will have the same limits, with int defining the number of evenly spaced points between. input_units : str (optional) The units of the new points. Default is same, which assumes the new points have the same units as the axis. - edge_tolerance : float (optional) - Axis edge points that are within this amount of the new edge - points are coerced to the new edge points before interpolation. - Default is 0. + name : string (optional) + The name of the new data object. If None, generated from + natural_name. Default is None. + parent : WrightTools.Collection (optional) + Parent of new data object. If None, data is made at root of a + new temporary file. verbose : bool (optional) Toggle talkback. Default is True. + + Returns + ------- + WrightTools.Data + New data object. """ - raise NotImplementedError - # get axis index -------------------------------------------------------------------------- - if isinstance(axis, int): - axis_index = axis - elif isinstance(axis, str): - axis_index = self.axis_names.index(axis) - else: - raise TypeError("axis: expected {int, str}, got %s" % type(axis)) - axis = self._axes[axis_index] - # get points ------------------------------------------------------------------------------ + # get variable index + variable_index = wt_kit.get_index(self.variable_names, variable) + variable = self.variables[variable_index] + # get points if isinstance(points, int): - points = np.linspace(axis[0], axis[-1], points) - input_units = 'same' - else: - points = np.array(points) - # transform points to axis units ---------------------------------------------------------- + points = np.linspace(variable.min(), variable.max(), points) + points = np.array(points) + # points dimensionality + if points.ndim < variable.ndim: + for i, d in enumerate(variable.shape): + if d == 1: + points = np.expand_dims(points, axis=i) + # convert points if input_units == 'same': pass else: - points = wt_units.converter(points, input_units, axis.units) - # points must be ascending ---------------------------------------------------------------- - flipped = np.zeros(len(self._axes), dtype=np.bool) - for i in range(len(self._axes)): - if self._axes[i][0] > self._axes[i][-1]: - self.flip(i) - flipped[i] = True - # handle edge tolerance ------------------------------------------------------------------- - for index in [0, -1]: - old = axis[index] - new = points[index] - if new - edge_tolerance < old < new + edge_tolerance: - axis[index] = new - # interpn data ---------------------------------------------------------------------------- - old_points = [a[:] for a in self._axes] - new_points = [a[:] if a is not axis else points for a in self._axes] - if len(self._axes) == 1: - for channel in self.channels: - function = scipy.interpolate.interp1d(self._axes[0][:], channel[:]) - channel[:] = function(new_points[0]) + points = wt_units.converter(points, input_units, variable.units) + # construct new data object + special = ['name', 'axes', 'channel_names', 'variable_names'] + kwargs = {k: v for k, v in self.attrs.items() if k not in special} + if name is None: + name = '{0}_{1}_mapped'.format(self.natural_name, variable.natural_name) + kwargs['name'] = name + kwargs['parent'] = parent + out = Data(**kwargs) + # mapped variable + values = points + out.create_variable(values=values, **variable.attrs) + # orthogonal variables + for v in self.variables: + if wt_kit.orthogonal(v.shape, variable.shape): + out.create_variable(values=v[:], **v.attrs) + out.transform(*self.axis_expressions) + # interpolate + if self.ndim == 1: + + def interpolate(dataset, points): + function = scipy.interpolate.interp1d(variable[:], dataset[:]) + return function(points) + else: - xi = tuple(np.meshgrid(*new_points, indexing='ij')) - for channel in self.channels: - values = channel[:] - channel[:] = scipy.interpolate.interpn(old_points, values, xi, - method='linear', - bounds_error=False, - fill_value=np.nan) - # cleanup --------------------------------------------------------------------------------- - for i in range(len(self._axes)): - if not i == axis_index: - if flipped[i]: - self.flip(i) - axis[:] = points + pts = np.array([a.full.flatten() for a in self.axes]).T + out_pts = np.array([a.full.flatten() for a in out.axes]).T + + def interpolate(dataset, points): + values = dataset.full.flatten() + function = scipy.interpolate.LinearNDInterpolator(pts, values, rescale=True) + new = function(out_pts) + new.shape = out.shape + return new + + for v in self.variables: + if v.natural_name not in out.variable_names: + out.create_variable(values=interpolate(v, points), **v.attrs) + out.variable_names = self.variable_names # enforce old order + out._variables = None # force regeneration of variables @property + for channel in self.channels: + out.create_channel(values=interpolate(channel, points), **channel.attrs) + # finish + if verbose: + print('data mapped from {0} to {1}'.format(self.shape, out.shape)) + return out def offset(self, points, offsets, along, offset_axis, units='same', offset_units='same', mode='valid', @@ -1078,7 +1076,7 @@ class Data(Group): Uses the share_nans method found in wt.kit. """ def f(_, s, channels): - outs = wt_kit.share_nans([c[s] for c in channels]) + outs = wt_kit.share_nans(*[c[s] for c in channels]) for c, o in zip(channels, outs): c[s] = o diff --git a/WrightTools/kit/_array.py b/WrightTools/kit/_array.py index 9a4cadb..66cfb11 100644 --- a/WrightTools/kit/_array.py +++ b/WrightTools/kit/_array.py @@ -12,8 +12,16 @@ from .. import exceptions as wt_exceptions # --- define -------------------------------------------------------------------------------------- -__all__ = ['closest_pair', 'diff', 'fft', 'joint_shape', 'remove_nans_1D', 'share_nans', - 'smooth_1D', 'unique', 'valid_index'] +__all__ = ['closest_pair', + 'diff', + 'fft', + 'joint_shape', + 'orthogonal', + 'remove_nans_1D', + 'share_nans', + 'smooth_1D', + 'unique', + 'valid_index'] # --- functions ----------------------------------------------------------------------------------- @@ -144,59 +152,78 @@ def fft(xi, yi, axis=0): return xi, yi -def joint_shape(arrs): - """Given a list of arrays, return the joint shape. +def joint_shape(*args): + """Given a set of arrays, return the joint shape. Parameters ---------- - arrs : list of array-like - Input arrays. + args : array-likes Returns ------- tuple of int Joint shape. """ - if len(arrs) == 0: + if len(args) == 0: return () shape = [] - shapes = [a.shape for a in arrs] - ndim = arrs[0].ndim + shapes = [a.shape for a in args] + ndim = args[0].ndim for i in range(ndim): shape.append(max([s[i] for s in shapes])) return tuple(shape) -def remove_nans_1D(arrs): - """Remove nans in a list of 1D arrays. +def orthogonal(*args): + """Determine if a set of arrays are orthogonal. + + Parameters + ---------- + args : array-likes or array shapes + + Returns + ------- + bool + Array orthogonality condition. + """ + for i, arg in enumerate(args): + if hasattr(arg, 'shape'): + args[i] = arg.shape + for s in zip(*args): + if np.product(s) != max(s): + return False + return True + + +def remove_nans_1D(*args): + """Remove nans in a set of 1D arrays. Removes indicies in all arrays if any array is nan at that index. All input arrays must have the same size. Parameters ---------- - arrs : list of 1D arrays - The arrays to remove nans from + args : 1D arrays Returns ------- - list - List of 1D arrays in same order as given, with nan indicies removed. + tuple + Tuple of 1D arrays in same order as given, with nan indicies removed. """ # find all indicies to keep bads = np.array([]) - for arr in arrs: + for arr in args: bad = np.array(np.where(np.isnan(arr))).flatten() bads = np.hstack((bad, bads)) - if hasattr(arrs, 'shape') and len(arrs.shape) == 1: - goods = [i for i in np.arange(arrs.shape[0]) if i not in bads] + if hasattr(args, 'shape') and len(args.shape) == 1: + goods = [i for i in np.arange(args.shape[0]) if i not in bads] else: - goods = [i for i in np.arange(len(arrs[0])) if i not in bads] + goods = [i for i in np.arange(len(args[0])) if i not in bads] # apply - return [a[goods] for a in arrs] + return tuple(a[goods] for a in args) -def share_nans(arrs): +def share_nans(*arrs): """Take a list of nD arrays and return a new list of nD arrays. The new list is in the same order as the old list. @@ -205,8 +232,7 @@ def share_nans(arrs): Parameters ---------- - arrs : list of nD arrays - The arrays to syncronize nans from + *arrs : nD arrays. Returns ------- @@ -216,7 +242,7 @@ def share_nans(arrs): nans = np.zeros((arrs[0].shape)) for arr in arrs: nans *= arr - return [a + nans for a in arrs] + return tuple([a + nans for a in arrs]) def smooth_1D(arr, n=10): diff --git a/docs/data.rst b/docs/data.rst index 76455be..365e49a 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -40,50 +40,50 @@ It is possible to create data objects directly in special circumstances, as show def my_resonance(xi, yi, intensity=1, FWHM=500, x0=7000): def single(arr, intensity=intensity, FWHM=FWHM, x0=x0): return intensity*(0.5*FWHM)**2/((xi-x0)**2+(0.5*FWHM)**2) - return single(xi)[:, None] * single(yi)[None, :] - xi = np.linspace(6000, 8000, 75) - yi = np.linspace(6000, 8000, 75) + return single(xi) * single(yi) + xi = np.linspace(6000, 8000, 75)[:, None] + yi = np.linspace(6000, 8000, 75)[None, :] zi = my_resonance(xi, yi) # package into data object data = wt.Data(name='example') - xi.shape = (75, 1) - data.add_variable(name='w1', units='wn', values=xi) - yi.shape = (1, 75) - data.add_variable(name='w2', units='wn', values=yi) - data.add_channel(name='signal', values=zi) - data.transform(['w1', 'w2']) + data.create_variable(name='w1', units='wn', values=xi) + data.create_variable(name='w2', units='wn', values=yi) + data.create_channel(name='signal', values=zi) + data.transform('w1', 'w2') Structure & properties ---------------------- So what is a data object anyway? To put it simply, ``Data`` is a collection of ``Axis`` and ``Channel`` objects. +``Axis`` objects are composed of ``Variable`` objects. =============== ============================ -attribute contains +attribute tuple of... --------------- ---------------------------- data.axes wt.data.Axis objects data.channels wt.data.Channel objects +data.variables wt.data.Variable objects =============== ============================ +See also `Data.axis_expressions`, `Data.channel_names` and `Data.variable_names`. + Axis ```` Axes are the coordinates of the dataset. They have the following key attributes: -=============== ========================================================== -attribute description ---------------- ---------------------------------------------------------- -axis.label LaTeX-formatted label, appropriate for plotting -axis.min coordinates minimum, in current units -axis.max coordinates maximum, in current units -axis.name axis name -axis.points coordinates array, in current units -axis.units current axis units (change with ``axis.convert``) -=============== ========================================================== - -Axes can also be constants (data.constants), in which case they contain a single value in points. -This is crucial for keeping track of low dimensional data within a high dimensional experimental space. +================= ========================================================== +attribute description +----------------- ---------------------------------------------------------- +axis.label LaTeX-formatted label, appropriate for plotting +axis.min() coordinates minimum, in current units +axis.max() coordinates maximum, in current units +axis.natural_name axis name +axis.units current axis units (change with ``axis.convert``) +axis.variables component variables +axis.expression expression +================= ========================================================== Channel ``````` @@ -94,13 +94,12 @@ Channels contain the n-dimensional data itself. They have the following key attr attribute description --------------- ---------------------------------------------------------- channel.label LaTeX-formatted label, appropriate for plotting -channel.mag channel magnitude (furthest deviation from null) -channel.max channel maximum -channel.min channel minimum +channel.mag() channel magnitude (furthest deviation from null) +channel.max() channel maximum +channel.min() channel minimum channel.name channel name channel.null channel null (value of zero signal) channel.signed flag to indicate if channel is signed -channel.values n-dimensional array =============== ========================================================== Data @@ -121,15 +120,10 @@ The natural syntax is recommended, as it tends to result in more readable code. >>> data.pyro2 == data.channels[2] True -The order of the ``data.axes`` list is crucial, as the coordinate arrays must be kept aligned with the shape of the corresponding n-dimensional data arrays. - -In contrast, the order of ``data.channels`` is arbitrary. +The order of axes and channels is arbitrary. However many methods within WrightTools operate on the zero-indexed channel by default. For this reason, you can bring your favorite channel to zero-index using :meth:`~WrightTools.data.Data.bring_to_front`. -At many points throughout WrightTools you will need to refer to a particular axis or channel. -In such a case, you can always refer by name (string) or index (integer). - Units aware & interpolation ready --------------------------------- @@ -144,12 +138,10 @@ Here we list some of the capabilities that are enabled by this behavior. ================================================== ================================================================================ method description -------------------------------------------------- -------------------------------------------------------------------------------- -:meth:`~WrightTools.data.Data.divide` divide one channel by another, interpolating the divisor :meth:`~WrightTools.data.Data.heal` use interpolation to guess the value of NaNs within a channel :meth:`~WrightTools.data.join` join together multiple data objects, accounting for dimensionality and overlap -:meth:`~WrightTools.data.Data.map_axis` re-map axis coordinates +:meth:`~WrightTools.data.Data.map_variable` re-map data coordinates :meth:`~WrightTools.data.Data.offset` offset one axis based on another -:meth:`~WrightTools.data.Data.subtract` subtract one channel from another, interpolating the subtrahend ================================================== ================================================================================ Dimensionality without the cursing @@ -169,7 +161,6 @@ method description :meth:`~WrightTools.data.Data.chop` chop data into a list of lower dimensional data :meth:`~WrightTools.data.Data.collapse` destroy one dimension of data using a mathematical strategy :meth:`~WrightTools.data.Data.split` split data at a series of coordinates, without reducing dimensionality -:meth:`~WrightTools.data.Data.transpose` change the order of data axes ================================================== ================================================================================ WrightTools seamlessly handles dimensionality throughout. @@ -186,9 +177,7 @@ A selection of important methods follows. method description -------------------------------------------------- -------------------------------------------------------------------------------- :meth:`~WrightTools.data.Data.clip` clip values outside of a given range -:meth:`~WrightTools.data.Data.dOD` transform into dOD units :meth:`~WrightTools.data.Data.level` level the edge of data along a certain axis -:meth:`~WrightTools.data.Data.m` apply m-factor corrections [#carlson1989]_ :meth:`~WrightTools.data.Data.normalize` normalize a channel such that mag --> 1 and null --> 0 :meth:`~WrightTools.data.Data.revert` revert the data object to an earlier state :meth:`~WrightTools.data.Data.scale` apply a scaling to a channel, such as square root or log @@ -198,14 +187,6 @@ method description ================================================== ================================================================================ .. _JASCO: https://jascoinc.com/products/spectroscopy/ - .. _NISE: https://github.com/wright-group/NISE - .. _PyCMDS: https://github.com/wright-group/PyCMDS - .. _Shimadzu: http://www.ssi.shimadzu.com/products/productgroup.cfm?subcatlink=uvvisspectro - -.. [#carlson1989] **Absorption and Coherent Interference Effects in Multiply Resonant Four-Wave Mixing Spectroscopy** - Roger J. Carlson, and John C. Wright - *Applied Spectroscopy* **1989** 43, 1195--1208 - `doi:10.1366/0003702894203408 <http://dx.doi.org/10.1366/0003702894203408>`_ diff --git a/docs/index.rst b/docs/index.rst index 2f3561d..b9334ef 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -38,10 +38,10 @@ Contents wt5 data artists - fit + .. fit kit units - diagrams + .. diagrams datasets contributing api/modules diff --git a/docs/install.rst b/docs/install.rst index 9d39933..9edd8d3 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -9,7 +9,7 @@ conda-forge Conda_ is a multilingual package/environment manager. It seamlessly handles non-Python library dependencies which many scientific Python tools rely upon. Conda is reccomended, especially for Windows users. -If you don't have Python yet, start by `installing Anaconda`_ +If you don't have Python yet, start by `installing Anaconda`_ or `miniconda`_. `conda-forge`_ is a community-driven conda channel. `conda-forge contains a WrightTools feedstock`_. @@ -44,5 +44,6 @@ To upgrade: .. _installing Anaconda: https://www.continuum.io/downloads .. _conda-forge: https://conda-forge.org/ .. _conda-forge contains a WrightTools feedstock: https://github.com/conda-forge/wrighttools-feedstock +.. _miniconda: https://conda.io/miniconda.html .. _pip: https://pypi.python.org/pypi/pip .. _WrightTools is hosted on PyPI: https://pypi.org/project/WrightTools/ diff --git a/docs/units.rst b/docs/units.rst index e906dbb..7e01952 100644 --- a/docs/units.rst +++ b/docs/units.rst @@ -12,7 +12,7 @@ You can use it directly, if you wish. >>> wt.units.converter(2., 'eV', 'nm') 620.0 -This same units system enables the units-aware properties throughout WrightTools, as in ``Axis`` and ``Curve``. +This same units system enables the units-aware properties throughout WrightTools. In WrightTools, units are organized into kinds. It is always possible to convert between units of the same kind, and never possible to convert between kinds.
recover map_axis
wright-group/WrightTools
diff --git a/tests/data/map_axis.py b/tests/data/map_variable.py similarity index 53% rename from tests/data/map_axis.py rename to tests/data/map_variable.py index a5ff8a6..c9d1119 100644 --- a/tests/data/map_axis.py +++ b/tests/data/map_variable.py @@ -1,11 +1,9 @@ -"""test map_axis""" +"""Test map_variable.""" # --- import -------------------------------------------------------------------------------------- -import pytest - import numpy as np import WrightTools as wt @@ -15,35 +13,30 @@ from WrightTools import datasets # --- test ---------------------------------------------------------------------------------------- [email protected]() def test_array(): p = datasets.JASCO.PbSe_batch_1 data = wt.data.from_JASCO(p) assert data.shape == (1801,) new = np.linspace(6000, 8000, 55) - data.map_axis(0, new, 'wn') + mapped = data.map_variable('energy', new, 'wn') assert data.axes[0][:].all() == new.all() data.close() [email protected]() -def test_edge_tolerance(): - ps = datasets.KENT.LDS821_TRSF - data = wt.data.from_KENT(ps, ignore=['wm', 'd1', 'd2']) - new = np.linspace(1250, 1600, 101) - data.map_axis('w2', new, edge_tolerance=1) - assert data.w2[:].all() == new.all() - assert not np.isnan(data.channels[0][:]).any() - data.close() - - [email protected]() def test_int(): p = datasets.PyCMDS.wm_w2_w1_000 data = wt.data.from_PyCMDS(p) assert data.shape == (35, 11, 11) - data.map_axis(1, 5) - assert data.shape == (35, 5, 11) - data.map_axis(2, 25) - assert data.shape == (35, 5, 25) + mapped = data.map_variable('w2', 5) + assert mapped.shape == (35, 5, 11) + mapped = data.map_variable('w1', 25) + assert mapped.shape == (35, 11, 25) data.close() + + +# --- run ----------------------------------------------------------------------------------------- + + +if __name__ == '__main__': + test_array() + test_int() diff --git a/tests/data/split.py b/tests/data/split.py index c6bea9a..6c280d9 100644 --- a/tests/data/split.py +++ b/tests/data/split.py @@ -118,7 +118,7 @@ def test_split_axis_name(): a.close() [email protected]() # constants required [email protected]() def test_split_constant(): p = datasets.PyCMDS.wm_w2_w1_000 a = wt.data.from_PyCMDS(p) diff --git a/tests/kit/joint_shape.py b/tests/kit/joint_shape.py index a9afe81..60b85f5 100644 --- a/tests/kit/joint_shape.py +++ b/tests/kit/joint_shape.py @@ -15,11 +15,11 @@ import WrightTools as wt def test_5x7(): arr1 = np.empty((5, 1)) arr2 = np.empty((1, 7)) - assert wt.kit.joint_shape([arr1, arr2]) == (5, 7) + assert wt.kit.joint_shape(arr1, arr2) == (5, 7) def test_3x4x5(): arr1 = np.empty((1, 4, 1)) arr2 = np.empty((3, 1, 5)) arr3 = np.empty((1, 1, 1)) - assert wt.kit.joint_shape([arr1, arr2, arr3]) == (3, 4, 5) + assert wt.kit.joint_shape(arr1, arr2, arr3) == (3, 4, 5) diff --git a/tests/kit/remove_nans_1D.py b/tests/kit/remove_nans_1D.py index 29b90f8..31d15ab 100644 --- a/tests/kit/remove_nans_1D.py +++ b/tests/kit/remove_nans_1D.py @@ -15,13 +15,13 @@ import WrightTools as wt def test_simple(): arr = np.arange(-4, 6, dtype=float) arr[arr < 0] = np.nan - assert wt.kit.remove_nans_1D([arr])[0].all() == np.arange(0, 6, dtype=float).all() + assert wt.kit.remove_nans_1D(arr)[0].all() == np.arange(0, 6, dtype=float).all() def test_list(): arrs = [np.random.random(21) for _ in range(5)] arrs[0][0] = np.nan arrs[1][-1] = np.nan - arrs = wt.kit.remove_nans_1D(arrs) + arrs = wt.kit.remove_nans_1D(*arrs) for arr in arrs: assert arr.size == 19 diff --git a/tests/kit/share_nans.py b/tests/kit/share_nans.py index 730a298..40b78d3 100644 --- a/tests/kit/share_nans.py +++ b/tests/kit/share_nans.py @@ -15,6 +15,13 @@ import WrightTools as wt def test_5(): arrs = [np.random.random(5) for _ in range(12)] arrs[2][2] = np.nan - arrs = wt.kit.share_nans(arrs) + arrs = wt.kit.share_nans(*arrs) for arr in arrs: assert np.isnan(arr[2]) + + +# --- run ----------------------------------------------------------------------------------------- + + +if __name__ == '__main__': + test_5()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 10 }
2.13
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "sphinx", "sphinx-gallery", "sphinx-rtd-theme" ], "pre_install": [ "apt-get update", "apt-get install -y libfreetype6-dev hdf5-tools libhdf5-dev libopenblas-dev" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 appdirs==1.4.4 attrs==22.2.0 Babel==2.11.0 cached-property==1.5.2 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cycler==0.11.0 docutils==0.18.1 h5py==3.1.0 idna==3.10 imageio==2.15.0 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 numexpr==2.8.1 numpy==1.19.5 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-gallery==0.10.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tidy_headers==1.0.3 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 -e git+https://github.com/wright-group/WrightTools.git@592649ce55c9fa7847325c9e9b15b320a38f1389#egg=WrightTools zipp==3.6.0
name: WrightTools channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - appdirs==1.4.4 - attrs==22.2.0 - babel==2.11.0 - cached-property==1.5.2 - charset-normalizer==2.0.12 - coverage==6.2 - cycler==0.11.0 - docutils==0.18.1 - h5py==3.1.0 - idna==3.10 - imageio==2.15.0 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - numexpr==2.8.1 - numpy==1.19.5 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-gallery==0.10.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tidy-headers==1.0.3 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/WrightTools
[ "tests/kit/joint_shape.py::test_5x7", "tests/kit/joint_shape.py::test_3x4x5", "tests/kit/remove_nans_1D.py::test_simple", "tests/kit/remove_nans_1D.py::test_list", "tests/kit/share_nans.py::test_5" ]
[ "tests/data/map_variable.py::test_array", "tests/data/map_variable.py::test_int" ]
[]
[]
MIT License
2,248
[ "WrightTools/kit/_array.py", "WrightTools/artists/_base.py", "docs/units.rst", "WrightTools/_dataset.py", "WrightTools/artists/_helpers.py", "docs/data.rst", "docs/install.rst", "WrightTools/data/_axis.py", "docs/index.rst", "WrightTools/data/_data.py" ]
[ "WrightTools/kit/_array.py", "WrightTools/artists/_base.py", "docs/units.rst", "WrightTools/_dataset.py", "WrightTools/artists/_helpers.py", "docs/data.rst", "docs/install.rst", "WrightTools/data/_axis.py", "docs/index.rst", "WrightTools/data/_data.py" ]
pika__pika-987
f00d871f95dce7fb57b77dabc5232ec5c26ab835
2018-03-04 11:12:40
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
diff --git a/.travis.yml b/.travis.yml index 7296c15..46eb54f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -49,8 +49,12 @@ before_script: - /bin/sh "$TRAVIS_BUILD_DIR/rabbitmq_server-$RABBITMQ_VERSION/sbin/rabbitmqctl" status script: + # See https://github.com/travis-ci/travis-ci/issues/1066 and https://github.com/pika/pika/pull/984#issuecomment-370565220 + # as to why 'set -e' and 'set +e' are added here + - set -e - nosetests - PIKA_TEST_TLS=true nosetests + - set +e after_success: - aws s3 cp .coverage "s3://com-gavinroy-travis/pika/$TRAVIS_BUILD_NUMBER/.coverage.${TRAVIS_PYTHON_VERSION}" diff --git a/docs/examples/tls_mutual_authentication.rst b/docs/examples/tls_mutual_authentication.rst index 9cd8dec..8d0e164 100644 --- a/docs/examples/tls_mutual_authentication.rst +++ b/docs/examples/tls_mutual_authentication.rst @@ -18,14 +18,13 @@ tls_example.py:: logging.basicConfig(level=logging.INFO) - cp = pika.ConnectionParameters( - ssl=True, - ssl_options=dict( - ssl_version=ssl.PROTOCOL_TLSv1, - ca_certs="/Users/me/tls-gen/basic/testca/cacert.pem", - keyfile="/Users/me/tls-gen/basic/client/key.pem", - certfile="/Users/me/tls-gen/basic/client/cert.pem", - cert_reqs=ssl.CERT_REQUIRED)) + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations('/Users/me/tls-gen/basic/testca/cacert.pem') + context.load_cert_chain('/Users/me/tls-gen/basic/client/cert.pem', + '/Users/me/tls-gen/basic/client/key.pem') + + cp = pika.ConnectionParameters(ssl_options=pika.SSLOptions(context)) conn = pika.BlockingConnection(cp) ch = conn.channel() diff --git a/docs/examples/tls_server_uathentication.rst b/docs/examples/tls_server_authentication.rst similarity index 89% rename from docs/examples/tls_server_uathentication.rst rename to docs/examples/tls_server_authentication.rst index f2c56f1..79b58b9 100644 --- a/docs/examples/tls_server_uathentication.rst +++ b/docs/examples/tls_server_authentication.rst @@ -18,12 +18,11 @@ tls_example.py:: logging.basicConfig(level=logging.INFO) - cp = pika.ConnectionParameters( - ssl=True, - ssl_options=dict( - ssl_version=ssl.PROTOCOL_TLSv1, - ca_certs="/Users/me/tls-gen/basic/testca/cacert.pem", - cert_reqs=ssl.CERT_REQUIRED)) + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations('/Users/me/tls-gen/basic/testca/cacert.pem') + + cp = pika.ConnectionParameters(ssl_options=pika.SSLOptions(context)) conn = pika.BlockingConnection(cp) ch = conn.channel() diff --git a/examples/twisted_service.py b/examples/twisted_service.py index ced89a0..3057121 100644 --- a/examples/twisted_service.py +++ b/examples/twisted_service.py @@ -52,14 +52,14 @@ class PikaService(service.MultiService): def connect(self): f = PikaFactory(self.parameters) - if self.parameters.ssl: + if self.parameters.ssl_options: s = ssl.ClientContextFactory() serv = internet.SSLClient(host=self.parameters.host, port=self.parameters.port, factory=f, contextFactory=s) else: serv = internet.TCPClient(host=self.parameters.host, port=self.parameters.port, factory=f) serv.factory = f f.service = serv - name = '%s%s:%d' % ('ssl:' if self.parameters.ssl else '', self.parameters.host, self.parameters.port) + name = '%s%s:%d' % ('ssl:' if self.parameters.ssl_options else '', self.parameters.host, self.parameters.port) serv.__repr__ = lambda : '<AMQP Connection to %s>' % name serv.setName(name) serv.parent = self diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index 6a0db68..a79a577 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -57,9 +57,6 @@ class BaseConnection(connection.Connection): raise ValueError( 'Expected instance of Parameters, not %r' % parameters) - # Let the developer know we could not import SSL - if parameters and parameters.ssl and not ssl: - raise RuntimeError("SSL specified but it is not available") self.base_events = self.READ | self.ERROR self.event_state = self.base_events self.ioloop = ioloop @@ -215,7 +212,7 @@ class BaseConnection(connection.Connection): pika.tcp_socket_opts.set_sock_opts(self.params.tcp_options, self.socket) # Wrap socket if using SSL - if self.params.ssl: + if self.params.ssl_options is not None: self.socket = self._wrap_socket(self.socket) ssl_text = " with SSL" else: @@ -240,7 +237,7 @@ class BaseConnection(connection.Connection): return error # Handle SSL Connection Negotiation - if self.params.ssl and self.DO_HANDSHAKE: + if self.params.ssl_options is not None and self.DO_HANDSHAKE: try: self._do_ssl_handshake() except ssl.SSLError as error: @@ -322,7 +319,7 @@ class BaseConnection(connection.Connection): if exception.errno in self.ERRORS_TO_ABORT: LOGGER.error("Fatal Socket Error: %r", exception) - elif self.params.ssl and isinstance(exception, ssl.SSLError): + elif isinstance(exception, ssl.SSLError): if exception.errno == ssl.SSL_ERROR_WANT_READ: # TODO doesn't seem right: this logic updates event state, but @@ -384,7 +381,7 @@ class BaseConnection(connection.Connection): try: while True: try: - if self.params.ssl: + if self.params.ssl_options is not None: # TODO Why using read instead of recv on ssl socket? data = self.socket.read(self._buffer_size) else: @@ -505,37 +502,11 @@ class BaseConnection(connection.Connection): :rtype: ssl.SSLSocket """ - ssl_options = self.params.ssl_options or {} - # our wrapped return sock - ssl_sock = None - - if isinstance(ssl_options, connection.SSLOptions): - context = ssl.SSLContext(ssl_options.ssl_version) - context.verify_mode = ssl_options.verify_mode - if ssl_options.certfile is not None: - context.load_cert_chain( - certfile=ssl_options.certfile, - keyfile=ssl_options.keyfile, - password=ssl_options.key_password) - - # only one of either cafile or capath have to defined - if ssl_options.cafile is not None or ssl_options.capath is not None: - context.load_verify_locations( - cafile=ssl_options.cafile, - capath=ssl_options.capath, - cadata=ssl_options.cadata) - - if ssl_options.ciphers is not None: - context.set_ciphers(ssl_options.ciphers) - - ssl_sock = context.wrap_socket( - sock, - server_side=ssl_options.server_side, - do_handshake_on_connect=ssl_options.do_handshake_on_connect, - suppress_ragged_eofs=ssl_options.suppress_ragged_eofs, - server_hostname=ssl_options.server_hostname) - else: - ssl_sock = ssl.wrap_socket( - sock, do_handshake_on_connect=self.DO_HANDSHAKE, **ssl_options) - - return ssl_sock + ssl_options = self.params.ssl_options + + return ssl_options.context.wrap_socket( + sock, + server_side=False, + do_handshake_on_connect=self.DO_HANDSHAKE, + suppress_ragged_eofs=True, + server_hostname=ssl_options.server_hostname) diff --git a/pika/compat.py b/pika/compat.py index ad14aaa..c5b4ae2 100644 --- a/pika/compat.py +++ b/pika/compat.py @@ -23,7 +23,8 @@ except AttributeError: if not PY2: # these were moved around for Python 3 from urllib.parse import (quote as url_quote, unquote as url_unquote, - urlencode) + urlencode, parse_qs as url_parse_qs, + urlparse) # Python 3 does not have basestring anymore; we include # *only* the str here as this is used for textual data. @@ -111,6 +112,8 @@ if not PY2: else: from urllib import quote as url_quote, unquote as url_unquote, urlencode + from urlparse import parse_qs as url_parse_qs, urlparse + basestring = basestring str_or_bytes = basestring xrange = xrange diff --git a/pika/connection.py b/pika/connection.py index c784cc2..287bf81 100644 --- a/pika/connection.py +++ b/pika/connection.py @@ -3,21 +3,16 @@ # pylint: disable=C0302 import ast -import sys import collections import copy import logging import math import numbers import platform +import socket import warnings import ssl -if sys.version_info > (3,): - import urllib.parse as urlparse # pylint: disable=E0611,F0401 -else: - import urlparse - from pika import __version__ from pika import callback as pika_callback import pika.channel @@ -28,6 +23,7 @@ from pika import heartbeat as pika_heartbeat from pika import spec +import pika.compat from pika.compat import (xrange, basestring, # pylint: disable=W0622 url_unquote, dictkeys, dict_itervalues, dict_iteritems) @@ -55,23 +51,6 @@ class InternalCloseReasons(object): class Parameters(object): # pylint: disable=R0902 """Base connection parameters class definition - :param bool backpressure_detection: `DEFAULT_BACKPRESSURE_DETECTION` - :param float|None blocked_connection_timeout: - `DEFAULT_BLOCKED_CONNECTION_TIMEOUT` - :param int channel_max: `DEFAULT_CHANNEL_MAX` - :param int connection_attempts: `DEFAULT_CONNECTION_ATTEMPTS` - :param credentials: `DEFAULT_CREDENTIALS` - :param int frame_max: `DEFAULT_FRAME_MAX` - :param int heartbeat: `DEFAULT_HEARTBEAT_TIMEOUT` - :param str host: `DEFAULT_HOST` - :param str locale: `DEFAULT_LOCALE` - :param int port: `DEFAULT_PORT` - :param float retry_delay: `DEFAULT_RETRY_DELAY` - :param float socket_timeout: `DEFAULT_SOCKET_TIMEOUT` - :param bool ssl: `DEFAULT_SSL` - :param dict ssl_options: `DEFAULT_SSL_OPTIONS` - :param str virtual_host: `DEFAULT_VIRTUAL_HOST` - :param int tcp_options: `DEFAULT_TCP_OPTIONS` """ # Declare slots to protect against accidental assignment of an invalid @@ -90,7 +69,6 @@ class Parameters(object): # pylint: disable=R0902 '_port', '_retry_delay', '_socket_timeout', - '_ssl', '_ssl_options', '_virtual_host', '_tcp_options' @@ -166,9 +144,6 @@ class Parameters(object): # pylint: disable=R0902 self._socket_timeout = None self.socket_timeout = self.DEFAULT_SOCKET_TIMEOUT - self._ssl = None - self.ssl = self.DEFAULT_SSL - self._ssl_options = None self.ssl_options = self.DEFAULT_SSL_OPTIONS @@ -186,7 +161,7 @@ class Parameters(object): # pylint: disable=R0902 """ return ('<%s host=%s port=%s virtual_host=%s ssl=%s>' % (self.__class__.__name__, self.host, self.port, - self.virtual_host, self.ssl)) + self.virtual_host, bool(self.ssl_options))) @property def backpressure_detection(self): @@ -494,47 +469,26 @@ class Parameters(object): # pylint: disable=R0902 (value,)) self._socket_timeout = value - @property - def ssl(self): - """ - :returns: boolean indicating whether to connect via SSL. Defaults to - `DEFAULT_SSL`. - - """ - return self._ssl - - @ssl.setter - def ssl(self, value): - """ - :param bool value: boolean indicating whether to connect via SSL - - """ - if not isinstance(value, bool): - raise TypeError('ssl must be a bool, but got %r' % (value,)) - self._ssl = value - @property def ssl_options(self): """ - :returns: None or a dict of options to pass to `ssl.wrap_socket`. - Defaults to `DEFAULT_SSL_OPTIONS`. - + :returns: None for plaintext or `pika.SSLOptions` instance for SSL/TLS. + :rtype: `pika.SSLOptions`|None """ return self._ssl_options @ssl_options.setter def ssl_options(self, value): """ - :param value: None, a dict of options to pass to `ssl.wrap_socket` or - a SSLOptions object for advanced setup. + :param `pika.SSLOptions`|None value: None for plaintext or + `pika.SSLOptions` instance for SSL/TLS. Defaults to None. """ - if not isinstance(value, (dict, SSLOptions, type(None))): + if not isinstance(value, (SSLOptions, type(None))): raise TypeError( - 'ssl_options must be a dict, None or an SSLOptions but got %r' + 'ssl_options must be None or SSLOptions but got %r' % (value, )) - # Copy the mutable object to avoid accidental side-effects - self._ssl_options = copy.deepcopy(value) + self._ssl_options = value @property @@ -560,13 +514,14 @@ class Parameters(object): # pylint: disable=R0902 def tcp_options(self): """ :returns: None or a dict of options to pass to the underlying socket + :rtype: dict|None """ return self._tcp_options @tcp_options.setter def tcp_options(self, value): """ - :param bool value: None or a dict of options to pass to the underlying + :param dict|None value: None or a dict of options to pass to the underlying socket. Currently supported are TCP_KEEPIDLE, TCP_KEEPINTVL, TCP_KEEPCNT and TCP_USER_TIMEOUT. Availability of these may depend on your platform. """ @@ -597,7 +552,6 @@ class ConnectionParameters(Parameters): channel_max=_DEFAULT, frame_max=_DEFAULT, heartbeat=_DEFAULT, - ssl=_DEFAULT, ssl_options=_DEFAULT, connection_attempts=_DEFAULT, retry_delay=_DEFAULT, @@ -624,9 +578,8 @@ class ConnectionParameters(Parameters): with the connection instance and the heartbeat timeout proposed by broker as its arguments. The callback should return a non-negative integer that will be used to override the broker's proposal. - :param bool ssl: Enable SSL - :param dict ssl_options: None or a dict of arguments to be passed to - ssl.wrap_socket + :param `pika.SSLOptions`|None ssl_options: None for plaintext or + `pika.SSLOptions` instance for SSL/TLS. Defaults to None. :param int connection_attempts: Maximum number of retry attempts :param int|float retry_delay: Time to wait in seconds, before the next :param int|float socket_timeout: Use for high latency networks @@ -701,17 +654,14 @@ class ConnectionParameters(Parameters): if socket_timeout is not self._DEFAULT: self.socket_timeout = socket_timeout - if ssl is not self._DEFAULT: - self.ssl = ssl - if ssl_options is not self._DEFAULT: self.ssl_options = ssl_options # Set port after SSL status is known if port is not self._DEFAULT: self.port = port - elif ssl is not self._DEFAULT: - self.port = self.DEFAULT_SSL_PORT if self.ssl else self.DEFAULT_PORT + else: + self.port = self.DEFAULT_SSL_PORT if self.ssl_options else self.DEFAULT_PORT if virtual_host is not self._DEFAULT: self.virtual_host = virtual_host @@ -754,10 +704,11 @@ class URLParameters(Parameters): the broker's value is accepted. 0 turns heartbeat off. - locale: Override the default `en_US` locale value - - ssl: - Toggle SSL, possible values are `t`, `f` - ssl_options: - Arguments passed to :meth:`ssl.wrap_socket` + None for plaintext; for SSL: dict of public ssl context-related + arguments that may be passed to :meth:`ssl.SSLSocket` as kwargs, + except `sock`, `server_side`,`do_handshake_on_connect`, `family`, + `type`, `proto`, `fileno`. - retry_delay: The number of seconds to sleep before attempting to connect on connection failure. @@ -804,12 +755,15 @@ class URLParameters(Parameters): # TODO Is support for the alternative http(s) schemes intentional? - parts = urlparse.urlparse(url) + parts = pika.compat.urlparse(url) if parts.scheme == 'https': - self.ssl = True + # Create default context which will get overridden by the + # ssl_options URL arg, if any + self.ssl_options = pika.SSLOptions( + context=ssl.create_default_context()) elif parts.scheme == 'http': - self.ssl = False + self.ssl_options = None elif parts.scheme: raise ValueError('Unexpected URL scheme %r; supported scheme ' 'values: amqp, amqps' % (parts.scheme,)) @@ -821,7 +775,8 @@ class URLParameters(Parameters): if parts.port is not None: self.port = parts.port else: - self.port = self.DEFAULT_SSL_PORT if self.ssl else self.DEFAULT_PORT + self.port = (self.DEFAULT_SSL_PORT if self.ssl_options + else self.DEFAULT_PORT) if parts.username is not None: self.credentials = pika_credentials.PlainCredentials(url_unquote(parts.username), @@ -833,7 +788,7 @@ class URLParameters(Parameters): # Handle query string values, validating and assigning them - self._all_url_query_values = urlparse.parse_qs(parts.query) + self._all_url_query_values = pika.compat.url_parse_qs(parts.query) for name, value in dict_iteritems(self._all_url_query_values): try: @@ -947,66 +902,60 @@ class URLParameters(Parameters): self.socket_timeout = socket_timeout def _set_url_ssl_options(self, value): - """Deserialize and apply the corresponding query string arg""" - self.ssl_options = ast.literal_eval(value) + """Deserialize and apply the corresponding query string arg + + """ + options = ast.literal_eval(value) + if options is None: + if self.ssl_options is not None: + raise ValueError( + 'Specified ssl_options=None URL arg is inconsistent with ' + 'the specified https URL scheme.') + else: + # Convert options to pika.SSLOptions via ssl.SSLSocket() + sock = socket.socket() + try: + ssl_sock = ssl.SSLSocket(sock=sock, **options) + try: + self.ssl_options = pika.SSLOptions( + context=ssl_sock.context, + server_hostname=ssl_sock.server_hostname) + finally: + ssl_sock.close() + finally: + sock.close() + + def _set_url_tcp_options(self, value): """Deserialize and apply the corresponding query string arg""" self.tcp_options = ast.literal_eval(value) + class SSLOptions(object): """Class used to provide parameters for optional fine grained control of SSL socket wrapping. - :param string keyfile: The key file to pass to SSLContext.load_cert_chain - :param string key_password: The key password to passed to - SSLContext.load_cert_chain - :param string certfile: The certificate file to passed to - SSLContext.load_cert_chain - :param bool server_side: Passed to SSLContext.wrap_socket - :param verify_mode: Passed to SSLContext.wrap_socket - :param ssl_version: Passed to SSLContext init, defines the ssl - version to use - :param string cafile: The CA file passed to - SSLContext.load_verify_locations - :param string capath: The CA path passed to - SSLContext.load_verify_locations - :param string cadata: The CA data passed to - SSLContext.load_verify_locations - :param do_handshake_on_connect: Passed to SSLContext.wrap_socket - :param suppress_ragged_eofs: Passed to SSLContext.wrap_socket - :param ciphers: Passed to SSLContext.set_ciphers - :param server_hostname: SSLContext.wrap_socket, used to enable SNI """ - def __init__(self, - keyfile=None, - key_password=None, - certfile=None, - server_side=False, - verify_mode=ssl.CERT_NONE, - ssl_version=ssl.PROTOCOL_SSLv23, - cafile=None, - capath=None, - cadata=None, - do_handshake_on_connect=True, - suppress_ragged_eofs=True, - ciphers=None, - server_hostname=None): - self.keyfile = keyfile - self.key_password = key_password - self.certfile = certfile - self.server_side = server_side - self.verify_mode = verify_mode - self.ssl_version = ssl_version - self.cafile = cafile - self.capath = capath - self.cadata = cadata - self.do_handshake_on_connect = do_handshake_on_connect - self.suppress_ragged_eofs = suppress_ragged_eofs - self.ciphers = ciphers + # Protect against accidental assignment of an invalid attribute + __slots__ = ('context', 'server_hostname') + + def __init__(self, context, server_hostname=None): + """ + :param ssl.SSLContext context: SSLContext instance + :param str|None server_hostname: SSLContext.wrap_socket, used to enable + SNI + """ + if not isinstance(context, ssl.SSLContext): + raise TypeError( + 'context must be of ssl.SSLContext type, but got {!r}'.format( + context)) + + self.context = context self.server_hostname = server_hostname + class Connection(object): """This is the core class that implements communication with RabbitMQ. This class should not be invoked directly but rather through the use of an @@ -1076,8 +1025,18 @@ class Connection(object): self.heartbeat = None # Set our configuration options - self.params = (copy.deepcopy(parameters) if parameters is not None else - ConnectionParameters()) + if parameters is not None: + # NOTE: Work around inability to copy ssl.SSLContext contained in + # our SSLOptions; ssl.SSLContext fails to implement __getnewargs__ + saved_ssl_options = parameters.ssl_options + parameters.ssl_options = None + try: + self.params = copy.deepcopy(parameters) + self.params.ssl_options = saved_ssl_options + finally: + parameters.ssl_options = saved_ssl_options + else: + self.params = ConnectionParameters() # Define our callback dictionary self.callbacks = pika_callback.CallbackManager()
Change pika to accept ssl.SSLContext instead of dict or homegrown SSLOptions attributes for 1.0.0 Pre 1.0.0, pika accepts either a dict or a homegrown `pika.connection.SSLOptions` instance and then [converts it to `ssl.SSLContext` (in case of `SSLOptions`) or passes the dict as kwargs to `ssl.wrap_socket()`](https://github.com/pika/pika/blob/09270712bf5912566df8af10ae68a1ddaea4f4e1/pika/adapters/base_connection.py#L512-L539). I can't think of a good reason to march on to 1.0.0 with this status quo. It's unnecessary and limiting. User's should just be able to pass an `ssl.SSLContext` instance which gives them most flexibility, including the ability to cache ssl contexts. For example, `asyncio` in Python 3 accepts only `ssl.SSLContext` instances. Proposal for pika 1.0.0: 1. Modify `pika.connection.SSLOptions` constructor to accept only a non-None `ssl.SSLContext` instance and an optional `server_hostname` arg 2. `server_hostname` defaults to None and is used for CN validation - see discussion in issue #464.
pika/pika
diff --git a/tests/acceptance/async_test_base.py b/tests/acceptance/async_test_base.py index 1df0eb0..caf237d 100644 --- a/tests/acceptance/async_test_base.py +++ b/tests/acceptance/async_test_base.py @@ -37,13 +37,12 @@ class AsyncTestCase(unittest.TestCase): if self.should_test_tls(): self.logger.info('testing using TLS/SSL connection to port 5671') self.parameters.port = 5671 - self.parameters.ssl = True - self.parameters.ssl_options = dict( - ssl_version=ssl.PROTOCOL_TLSv1, - ca_certs="testdata/certs/ca_certificate.pem", - keyfile="testdata/certs/client_key.pem", - certfile="testdata/certs/client_certificate.pem", - cert_reqs=ssl.CERT_REQUIRED) + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations('testdata/certs/ca_certificate.pem') + context.load_cert_chain('testdata/certs/client_certificate.pem', + 'testdata/certs/client_key.pem') + self.parameters.ssl_options = pika.SSLOptions(context) self._timed_out = False super(AsyncTestCase, self).setUp() diff --git a/tests/unit/base_connection_tests.py b/tests/unit/base_connection_tests.py index d35b3a2..6c23500 100644 --- a/tests/unit/base_connection_tests.py +++ b/tests/unit/base_connection_tests.py @@ -4,13 +4,13 @@ Tests for pika.base_connection.BaseConnection """ import socket +import ssl +import sys import unittest import mock import pika -import sys -import ssl import pika.tcp_socket_opts from pika.adapters import base_connection @@ -46,9 +46,8 @@ class BaseConnectionTests(unittest.TestCase): params = pika.ConnectionParameters(tcp_options=tcp_options) self.assertEqual(params.tcp_options, tcp_options) - with mock.patch.dict('pika.tcp_socket_opts._SUPPORTED_TCP_OPTIONS', { - 'TCP_KEEPIDLE': socket.TCP_KEEPIDLE - }): + with mock.patch.dict('pika.tcp_socket_opts._SUPPORTED_TCP_OPTIONS', + {'TCP_KEEPIDLE': socket.TCP_KEEPIDLE}): sock_mock = mock.Mock() pika.tcp_socket_opts.set_sock_opts(params.tcp_options, sock_mock) @@ -84,56 +83,13 @@ class BaseConnectionTests(unittest.TestCase): socket.SO_KEEPALIVE, 1) self.assertNotIn(keepalive_call, sock_mock.method_calls) - def test_ssl_wrap_socket_with_none_ssl_options(self): - - params = pika.ConnectionParameters(ssl_options=None) - self.assertIsNone(params.ssl_options) - - with mock.patch('pika.connection.Connection.connect'): - conn = base_connection.BaseConnection(parameters=params) - - with mock.patch('pika.adapters.base_connection' - '.ssl.wrap_socket') as wrap_socket_mock: - sock_mock = mock.Mock() - conn._wrap_socket(sock_mock) - - wrap_socket_mock.assert_called_once_with( - sock_mock, do_handshake_on_connect=conn.DO_HANDSHAKE) - - def test_ssl_wrap_socket_with_dict_ssl_options(self): - - ssl_options = dict(ssl='options', handshake=False) - params = pika.ConnectionParameters(ssl_options=ssl_options) - self.assertEqual(params.ssl_options, ssl_options) - - with mock.patch('pika.connection.Connection.connect'): - conn = base_connection.BaseConnection(parameters=params) - - with mock.patch('pika.adapters.base_connection' - '.ssl.wrap_socket') as wrap_socket_mock: - sock_mock = mock.Mock() - conn._wrap_socket(sock_mock) - - wrap_socket_mock.assert_called_once_with( - sock_mock, - do_handshake_on_connect=conn.DO_HANDSHAKE, - ssl='options', - handshake=False) - - @mock.patch('ssl.SSLContext.load_cert_chain') - @mock.patch('ssl.SSLContext.load_verify_locations') - @mock.patch('ssl.SSLContext.set_ciphers') @mock.patch('ssl.SSLContext.wrap_socket') - @unittest.skipIf(sys.version_info < (2,7,0), 'Unavailable ssl features') + @unittest.skipIf(sys.version_info < (2, 7, 0), 'Unavailable ssl features') def test_ssl_wrap_socket_with_default_ssl_options_obj(self, - wrap_socket_mock, - set_ciphers_mock, - load_verify_mock, - load_certs_mock): - ssl_options = pika.SSLOptions() + wrap_socket_mock): + ssl_options = pika.SSLOptions(context=ssl.create_default_context()) params = pika.ConnectionParameters(ssl_options=ssl_options) - #self.assertEqual(params.ssl_options, ssl_options) - + self.assertIs(params.ssl_options, ssl_options) with mock.patch('pika.connection.Connection.connect'): conn = base_connection.BaseConnection(parameters=params) @@ -141,40 +97,18 @@ class BaseConnectionTests(unittest.TestCase): sock_mock = mock.Mock() conn._wrap_socket(sock_mock) - load_certs_mock.assert_not_called() - load_verify_mock.assert_not_called() - # the __init__ of SSLContext calls set_ciphers, - # hence the 'called once' - set_ciphers_mock.assert_called_once() wrap_socket_mock.assert_called_once_with( sock_mock, server_side=False, do_handshake_on_connect=conn.DO_HANDSHAKE, suppress_ragged_eofs=True, - server_hostname=None - ) + server_hostname=None) - @mock.patch('ssl.SSLContext.load_cert_chain') - @mock.patch('ssl.SSLContext.load_verify_locations') - @mock.patch('ssl.SSLContext.set_ciphers') @mock.patch('ssl.SSLContext.wrap_socket') - @unittest.skipIf(sys.version_info < (2,7,0), 'Unavailable ssl features') - def test_ssl_wrap_socket_with_ssl_options_obj(self, - wrap_socket_mock, - set_ciphers_mock, - load_verify_mock, - load_certs_mock): - ssl_options = pika.SSLOptions(certfile='/some/cert/file.crt', - keyfile='/some/key/file.crt', - key_password='pa55w0rd', - cafile='/some/ca/file.crt', - capath='/some/ca/path', - cadata='/some/data/or/something', - ciphers='ciphers', - server_hostname='some.virtual.host', - do_handshake_on_connect=False, - suppress_ragged_eofs=False, - server_side=True) + @unittest.skipIf(sys.version_info < (2, 7, 0), 'Unavailable ssl features') + def test_ssl_wrap_socket_with_ssl_options_obj(self, wrap_socket_mock): + ssl_options = pika.SSLOptions(context=ssl.create_default_context(), + server_hostname='some.virtual.host') params = pika.ConnectionParameters(ssl_options=ssl_options) #self.assertEqual(params.ssl_options, ssl_options) @@ -185,18 +119,9 @@ class BaseConnectionTests(unittest.TestCase): sock_mock = mock.Mock() conn._wrap_socket(sock_mock) - load_certs_mock.assert_called_once_with(certfile='/some/cert/file.crt', - keyfile='/some/key/file.crt', - password='pa55w0rd') - load_verify_mock.assert_called_once_with(cafile='/some/ca/file.crt', - capath='/some/ca/path', - cadata='/some/data/or/something') - # the constructor of SSLContext calls set_ciphers as well - set_ciphers_mock.assert_called_with('ciphers') wrap_socket_mock.assert_called_once_with( sock_mock, - server_side=True, - do_handshake_on_connect=False, - suppress_ragged_eofs=False, - server_hostname='some.virtual.host' - ) + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname='some.virtual.host') diff --git a/tests/unit/connection_parameters_tests.py b/tests/unit/connection_parameters_tests.py index b71c8c3..159a54a 100644 --- a/tests/unit/connection_parameters_tests.py +++ b/tests/unit/connection_parameters_tests.py @@ -4,6 +4,7 @@ Test `pika.connection.Parameters`, `pika.connection.ConnectionParameters`, and """ import copy +import ssl import unittest import warnings @@ -65,8 +66,6 @@ class _ParametersTestsBase(unittest.TestCase): kls.DEFAULT_RETRY_DELAY, 'socket_timeout': kls.DEFAULT_SOCKET_TIMEOUT, - 'ssl': - kls.DEFAULT_SSL, 'ssl_options': kls.DEFAULT_SSL_OPTIONS, 'virtual_host': @@ -345,36 +344,18 @@ class ParametersTests(_ParametersTestsBase): with self.assertRaises(ValueError): params.socket_timeout = 0 - def test_ssl(self): - params = connection.Parameters() - - params.ssl = False - self.assertEqual(params.ssl, False) - - params.ssl = True - self.assertEqual(params.ssl, True) - - with self.assertRaises(TypeError): - params.backpressure_detection = 1 - - with self.assertRaises(TypeError): - params.ssl = 'True' - - with self.assertRaises(TypeError): - params.ssl = 'f' - def test_ssl_options(self): params = connection.Parameters() - opt = dict(key='value', key2=2, key3=dict(a=1)) - params.ssl_options = copy.deepcopy(opt) - self.assertEqual(params.ssl_options, opt) + ssl_options = connection.SSLOptions(ssl.create_default_context()) + params.ssl_options = ssl_options + self.assertIs(params.ssl_options, ssl_options) params.ssl_options = None self.assertIsNone(params.ssl_options) with self.assertRaises(TypeError): - params.ssl_options = str(opt) + params.ssl_options = dict() def test_virtual_host(self): params = connection.Parameters() @@ -414,27 +395,30 @@ class ConnectionParametersTests(_ParametersTestsBase): self.assert_default_parameter_values(connection.ConnectionParameters()) def test_explicit_ssl_with_default_port(self): - params = connection.ConnectionParameters(ssl=True) + params = connection.ConnectionParameters( + ssl_options=connection.SSLOptions(ssl.create_default_context())) - self.assertEqual(params.ssl, True) + self.assertIsNotNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_SSL_PORT) def test_explicit_ssl_with_explict_port(self): - params = connection.ConnectionParameters(ssl=True, port=99) + params = connection.ConnectionParameters( + ssl_options=connection.SSLOptions(ssl.create_default_context()), + port=99) - self.assertEqual(params.ssl, True) + self.assertIsNotNone(params.ssl_options) self.assertEqual(params.port, 99) def test_explicit_non_ssl_with_default_port(self): - params = connection.ConnectionParameters(ssl=False) + params = connection.ConnectionParameters(ssl_options=None) - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_PORT) def test_explicit_non_ssl_with_explict_port(self): - params = connection.ConnectionParameters(ssl=False, port=100) + params = connection.ConnectionParameters(ssl_options=None, port=100) - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, 100) def test_good_connection_parameters(self): @@ -455,10 +439,7 @@ class ConnectionParametersTests(_ParametersTestsBase): 'port': 5678, 'retry_delay': 3, 'socket_timeout': 100.5, - 'ssl': True, - 'ssl_options': { - 'ssl': 'options' - }, + 'ssl_options': None, 'virtual_host': u'vvhost', 'tcp_options': { 'TCP_USER_TIMEOUT': 1000 @@ -560,38 +541,38 @@ class URLParametersTests(_ParametersTestsBase): params = connection.URLParameters('') self.assert_default_parameter_values(params) - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_PORT) def test_no_ssl(self): params = connection.URLParameters('http://') - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_PORT) self.assert_default_parameter_values(params) params = connection.URLParameters('amqp://') - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_PORT) self.assert_default_parameter_values(params) def test_ssl(self): params = connection.URLParameters('https://') - self.assertEqual(params.ssl, True) + self.assertIsNotNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_SSL_PORT) params = connection.URLParameters('amqps://') - self.assertEqual(params.ssl, True) + self.assertIsNotNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_SSL_PORT) # Make sure the other parameters unrelated to SSL have default values params = connection.URLParameters('amqps://') - params.ssl = False + params.ssl_options = None params.port = params.DEFAULT_PORT self.assert_default_parameter_values(params) def test_no_url_scheme_defaults_to_plaintext(self): params = connection.URLParameters('//') - self.assertEqual(params.ssl, False) + self.assertIsNone(params.ssl_options) self.assertEqual(params.port, params.DEFAULT_PORT) def test_good_parameters(self): @@ -605,8 +586,19 @@ class URLParametersTests(_ParametersTestsBase): 'locale': 'en_UK', 'retry_delay': 3, 'socket_timeout': 100.5, + # NOTE: just straight ssl.CERT_OPTIONAL breaks on python 3.6 and 3.7 + # during ast.literal_eval() of the urlencoded dict as invalid syntax + # on <VerifyMode.CERT_NONE: 1>: + # {'cert_reqs': <VerifyMode.CERT_NONE: 1>, 'server_hostname': 'blah.blah.com'} 'ssl_options': { - 'ssl': 'options' + 'keyfile': None, + 'certfile': None, + 'ssl_version': int(ssl.PROTOCOL_SSLv23), + 'ca_certs': None, + 'cert_reqs': int(ssl.CERT_NONE), + 'npn_protocols': None, + 'ciphers': None, + 'server_hostname': 'blah.blah.com' }, 'tcp_options': { 'TCP_USER_TIMEOUT': 1000, @@ -615,7 +607,7 @@ class URLParametersTests(_ParametersTestsBase): } for backpressure in ('t', 'f'): - test_params = copy.deepcopy(query_args) + test_params = dict(query_args) test_params['backpressure_detection'] = backpressure virtual_host = '/' query_string = urlencode(test_params) @@ -632,17 +624,23 @@ class URLParametersTests(_ParametersTestsBase): expected_value = query_args[t_param] actual_value = getattr(params, t_param) - self.assertEqual( - actual_value, - expected_value, - msg='Expected %s=%r, but got %r' % - (t_param, expected_value, actual_value)) + if t_param == 'ssl_options': + self.assertEqual(actual_value.context.verify_mode, + expected_value['cert_reqs']) + self.assertEqual(actual_value.server_hostname, + expected_value['server_hostname']) + else: + self.assertEqual( + actual_value, + expected_value, + msg='Expected %s=%r, but got %r' % + (t_param, expected_value, actual_value)) self.assertEqual(params.backpressure_detection, backpressure == 't') # check all values from base URL - self.assertEqual(params.ssl, True) + self.assertIsNotNone(params.ssl_options) self.assertEqual(params.credentials.username, 'myuser') self.assertEqual(params.credentials.password, 'mypass') self.assertEqual(params.host, 'www.test.com')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 7 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 nose==1.3.7 packaging==21.3 -e git+https://github.com/pika/pika.git@f00d871f95dce7fb57b77dabc5232ec5c26ab835#egg=pika pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 Twisted==15.3.0 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0 zope.interface==5.5.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - twisted==15.3.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pika
[ "tests/unit/base_connection_tests.py::BaseConnectionTests::test_ssl_wrap_socket_with_default_ssl_options_obj", "tests/unit/base_connection_tests.py::BaseConnectionTests::test_ssl_wrap_socket_with_ssl_options_obj", "tests/unit/connection_parameters_tests.py::ParametersTests::test_default_property_values", "tests/unit/connection_parameters_tests.py::ParametersTests::test_ssl_options", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_default_property_values", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_explicit_ssl_with_default_port", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_explicit_ssl_with_explict_port", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_good_connection_parameters", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_default_property_values", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_good_parameters", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_no_ssl", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_ssl" ]
[]
[ "tests/unit/base_connection_tests.py::BaseConnectionTests::test_repr", "tests/unit/base_connection_tests.py::BaseConnectionTests::test_should_raise_value_exception_with_no_params_func_instead", "tests/unit/base_connection_tests.py::BaseConnectionTests::test_tcp_options_with_dict_tcp_options", "tests/unit/base_connection_tests.py::BaseConnectionTests::test_tcp_options_with_invalid_tcp_options", "tests/unit/base_connection_tests.py::BaseConnectionTests::test_tcp_options_with_none_tcp_options", "tests/unit/connection_parameters_tests.py::ParametersTests::test_backpressure_detection", "tests/unit/connection_parameters_tests.py::ParametersTests::test_blocked_connection_timeout", "tests/unit/connection_parameters_tests.py::ParametersTests::test_channel_max", "tests/unit/connection_parameters_tests.py::ParametersTests::test_connection_attempts", "tests/unit/connection_parameters_tests.py::ParametersTests::test_credentials", "tests/unit/connection_parameters_tests.py::ParametersTests::test_frame_max", "tests/unit/connection_parameters_tests.py::ParametersTests::test_heartbeat", "tests/unit/connection_parameters_tests.py::ParametersTests::test_host", "tests/unit/connection_parameters_tests.py::ParametersTests::test_locale", "tests/unit/connection_parameters_tests.py::ParametersTests::test_port", "tests/unit/connection_parameters_tests.py::ParametersTests::test_retry_delay", "tests/unit/connection_parameters_tests.py::ParametersTests::test_socket_timeout", "tests/unit/connection_parameters_tests.py::ParametersTests::test_tcp_options", "tests/unit/connection_parameters_tests.py::ParametersTests::test_virtual_host", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_bad_type_connection_parameters", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_callable_heartbeat", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_deprecated_heartbeat_interval", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_explicit_non_ssl_with_default_port", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_explicit_non_ssl_with_explict_port", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_parameters_accept_plain_string_locale", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_parameters_accept_unicode_locale", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_parameters_accepts_plain_string_virtualhost", "tests/unit/connection_parameters_tests.py::ConnectionParametersTests::test_parameters_accepts_unicode_string_virtualhost", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_accepts_blank_username_and_password", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_accepts_plain_string", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_accepts_unicode_string", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_deprecated_heartbeat_interval", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_no_url_scheme_defaults_to_plaintext", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_url_decodes_username_and_password", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_uses_default_port_if_not_specified", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_uses_default_username_and_password_if_not_specified", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_uses_default_virtual_host_if_not_specified", "tests/unit/connection_parameters_tests.py::URLParametersTests::test_uses_default_virtual_host_if_only_slash_is_specified" ]
[]
BSD 3-Clause "New" or "Revised" License
2,249
[ "docs/examples/tls_server_uathentication.rst", "pika/compat.py", ".travis.yml", "docs/examples/tls_mutual_authentication.rst", "pika/connection.py", "pika/adapters/base_connection.py", "examples/twisted_service.py" ]
[ "pika/compat.py", ".travis.yml", "docs/examples/tls_mutual_authentication.rst", "docs/examples/tls_server_authentication.rst", "pika/connection.py", "pika/adapters/base_connection.py", "examples/twisted_service.py" ]
epochblue__nanogen-5
263ca7f785530adfafc3ce5afa496b6c3f465816
2018-03-04 20:06:50
dfa6393e9c03290146be9d9e0676a40f84e517a5
diff --git a/.gitignore b/.gitignore index 119f9d6..ae20bd2 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ *.egg-info build dist +.pytest_cache # nanogen files example/_site diff --git a/nanogen/cli.py b/nanogen/cli.py index b8cac8f..b78f549 100644 --- a/nanogen/cli.py +++ b/nanogen/cli.py @@ -7,7 +7,7 @@ from nanogen import version from nanogen import models -blog = models.Blog() +blog = models.Blog(os.getcwd()) @click.group() @@ -42,7 +42,7 @@ def new(title): try: blog.new_post(title) except ValueError as ve: - click.ClickException(ve.message) + click.ClickException(str(ve)) @cli.command() @click.argument('title') @@ -51,7 +51,7 @@ def draft(title): try: blog.new_post(title, draft=True) except ValueError as ve: - click.ClickException(ve.message) + click.ClickException(str(ve)) @cli.command() diff --git a/nanogen/models.py b/nanogen/models.py index 481ebae..8acb8bc 100644 --- a/nanogen/models.py +++ b/nanogen/models.py @@ -35,12 +35,16 @@ class Post(object): self.raw_content = p.read() lines = self.raw_content.strip().splitlines() - self.title = lines[0].lstrip('#') + self.title = lines[0].lstrip('#').strip() self.markdown_content = '\n'.join(lines[2:]).strip() self.html_content = renderer.markdown(self.markdown_content) def __repr__(self): - return u'{}(path={})'.format(self.__class__.__name__, self.path) + return u'{}(base_path={}, path_to_file={})'.format( + self.__class__.__name__, + self.base_path, + self.path + ) @property def pub_date(self): @@ -64,19 +68,19 @@ class Post(object): @property def permalink(self): dt = self.pub_date - return '/{}/{:02d}/{}'.format(dt.year, dt.month, self.html_filename) + return os.path.join(str(dt.year), '{:02d}'.format(dt.month), self.html_filename) class Blog(object): - PATHS = { - 'cwd': os.getcwd(), - 'site': os.path.join(os.getcwd(), '_site'), - 'posts': os.path.join(os.getcwd(), '_posts'), - 'drafts': os.path.join(os.getcwd(), '_drafts'), - 'layout': os.path.join(os.getcwd(), '_layout') - } - - def __init__(self): + def __init__(self, base_dir): + self.PATHS = { + 'cwd': base_dir, + 'site': os.path.join(base_dir, '_site'), + 'posts': os.path.join(base_dir, '_posts'), + 'drafts': os.path.join(base_dir, '_drafts'), + 'layout': os.path.join(base_dir, '_layout') + } + self.config = self.parse_config() self.posts = self.collect_posts() @@ -198,7 +202,7 @@ class Blog(object): subprocess.call(['mkdir', d]) # Generate template blog configuration file - config_path = os.path.join(self.PATHS['layout'], 'blog.cfg') + config_path = os.path.join(self.PATHS['cwd'], 'blog.cfg') if not os.path.exists(config_path): with open(config_path, 'w') as f: text = textwrap.dedent("""\ diff --git a/nanogen/utils.py b/nanogen/utils.py index cd96f41..82bc975 100644 --- a/nanogen/utils.py +++ b/nanogen/utils.py @@ -22,6 +22,9 @@ def is_valid_post_file(basename): post_pattern = r'^\d{4}-\d{2}-\d{2}-.*' markdown_extensions = ['md', 'markdown', 'mdown'] + if '.' not in basename: + return False + filename, ext = os.path.basename(basename).rsplit('.', 1) ignorable = filename.startswith('_') diff --git a/setup.py b/setup.py index 56de568..ab1ec73 100644 --- a/setup.py +++ b/setup.py @@ -3,6 +3,17 @@ from setuptools import setup from nanogen.version import version +install_requires = [ + 'click==6.7', + 'mistune==0.8.3', + 'Jinja2==2.10', + 'Pygments==2.2.0' +] + +dev_requires = [ + 'pytest==3.4.1' +] + entry_points = { 'console_scripts': ['nanogen = nanogen.cli:cli'] } @@ -23,12 +34,10 @@ setup(name='nanogen', license='MIT', url='https://github.com/epochblue/nanogen', py_modules=['nanogen'], - install_requires=[ - 'click==6.7', - 'mistune==0.8.3', - 'Jinja2==2.10', - 'Pygments==2.2.0' - ], + install_requires=install_requires, + extras_require={ + 'dev': dev_requires, + }, entry_points=entry_points, keywords=['command line', 'static generator', 'blog'], classifiers=[
Add tests `nanogen` needs some manner of tests to ensure correctness, but right now it has zero. That's just not OK, y'all.
epochblue/nanogen
diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..604c2ac --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,280 @@ +import datetime +import os +from unittest import mock + +from nanogen import models + +example_post = """\ +# Test Post + +And this is my _markdown_ **content**. + +Look, it also has: + +* an +* unordered +* list +""" + +example_config = """\ +[site] +author = Example user +email = [email protected] +description = A test description +url = http://www.example.com +title = Test Example +""" + + +def test_post(tmpdir): + f = tmpdir.mkdir('blog').join('2018-01-01-test-post.md') + f.write(example_post) + + file_path = os.path.join(tmpdir, 'blog', '2018-01-01-test-post.md') + p = models.Post(tmpdir, file_path) + + assert p.filename == '2018-01-01-test-post.md' + assert p.title == 'Test Post' + assert p.raw_content == example_post + expected_markdown = example_post.strip().splitlines() + assert p.markdown_content == '\n'.join(expected_markdown[2:]) + assert p.pub_date == datetime.datetime(2018, 1, 1, 0, 0, 0) + assert p.slug == 'test-post' + assert p.html_filename == 'test-post.html' + assert p.permapath == os.path.join(tmpdir, '2018', '01', 'test-post.html') + assert p.permalink == os.path.join('2018', '01', 'test-post.html') + + +def test_blog_create(tmpdir): + path = tmpdir.mkdir('blog') + config_file = path.join('blog.cfg') + config_file.write(example_config) + blog = models.Blog(path) + assert len(blog.posts) == 0 + assert blog.config['site']['author'] == 'Example user' + assert blog.config['site']['email'] == '[email protected]' + assert blog.config['site']['description'] == 'A test description' + assert blog.config['site']['url'] == 'http://www.example.com' + assert blog.config['site']['title'] == 'Test Example' + + +def test_blog_init(tmpdir): + path = tmpdir.mkdir('blog') + blog = models.Blog(path) + blog.init() + + listing = [os.path.basename(file) for file in path.listdir()] + assert len(listing) == 4 + assert 'blog.cfg' in listing + assert '_layout' in listing + assert '_posts' in listing + assert '_drafts' in listing + + +def test_blog_new_post(tmpdir): + path = tmpdir.mkdir('blog') + blog = models.Blog(path) + blog.init() + + before_posts = blog.collect_posts() + assert len(before_posts) == 0 + + with mock.patch('subprocess.call'): + blog.new_post('Test title', draft=False) + + after_posts = blog.collect_posts() + assert len(after_posts) == 1 + today = datetime.date.today() + expected_filename = f'{today.year}-{today.month:02d}-{today.day:02d}-test-title.md' + assert after_posts[0].filename == expected_filename + + +def test_blog_new_draft(tmpdir): + path = tmpdir.mkdir('blog') + blog = models.Blog(path) + blog.init() + + before_posts = blog.collect_posts() + assert len(before_posts) == 0 + + with mock.patch('subprocess.call'): + blog.new_post('Test title', draft=True) + + after_posts = blog.collect_posts() + assert len(after_posts) == 0 + + +def test_blog_copy_static_files(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Add a static file to the projet + blog = models.Blog(path) + blog.init() + css_file = path.join('_layout').mkdir('static').join('example.css') + css_file.write('# CSS goes here') + blog.copy_static_files() + + site_static_path = site_path.join('static') + static_files = [os.path.basename(file) for file in site_static_path.listdir()] + assert 'example.css' in static_files + + +def test_blog_generate_posts(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Set up a nanogen blog for posts + blog = models.Blog(path) + blog.init() + + with mock.patch('subprocess.call'): + blog.new_post('Test title 1', draft=False) + + post_template = path.join('_layout').join('post.html') + post_template.write("""\ + <!doctype html> + <html> + <body>Single post template would go here.</body> + </html> + """) + + blog_config = path.join('_layout').join('blog.cfg') + blog_config.write(example_config) + + # Refresh the blog instance to better emulate real-world usage + blog = models.Blog(path) + blog.generate_posts() + + today = datetime.date.today() + expected_post_dir = site_path.join(f'{today.year}').join(f'{today.month:02d}') + generated_posts = [os.path.basename(file) for file in expected_post_dir.listdir()] + assert len(generated_posts) == 1 + assert 'test-title-1.html' in generated_posts + + +def test_blog_generate_index_page(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Set up a nanogen blog for posts + blog = models.Blog(path) + blog.init() + + with mock.patch('subprocess.call'): + blog.new_post('Test title 1', draft=False) + + index_template = path.join('_layout').join('index.html') + index_template.write("""\ + <!doctype html> + <html> + <body>Index template would go here.</body> + </html> + """) + + blog_config = path.join('_layout').join('blog.cfg') + blog_config.write('[site]') + + # Refresh the blog instance to better emulate real-world usage + blog = models.Blog(path) + blog.generate_index_page() + + site_dir = [os.path.basename(file) for file in site_path.listdir()] + assert 'index.html' in site_dir + + +def test_blog_generate_feeds_no_feed_files(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Set up a nanogen blog for posts + blog = models.Blog(path) + blog.init() + + blog_config = path.join('_layout').join('blog.cfg') + blog_config.write(example_config) + + # Refresh the blog instance to better emulate real-world usage + blog = models.Blog(path) + blog.generate_feeds() + + site_dir = [os.path.basename(file) for file in site_path.listdir()] + assert 'rss.xml' not in site_dir + assert 'feed.json' not in site_dir + + +def test_blog_feeds(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Set up a nanogen blog for posts + blog = models.Blog(path) + blog.init() + + with mock.patch('subprocess.call'): + blog.new_post('Test title 1', draft=False) + + rss_template = path.join('_layout').join('rss.xml') + rss_template.write("""RSS Feed template would go here.""") + + json_template = path.join('_layout').join('feed.json') + json_template.write("""JSON Feed template would go here.""") + + blog_config = path.join('_layout').join('blog.cfg') + blog_config.write(example_config) + + # Refresh the blog instance to better emulate real-world usage + blog = models.Blog(path) + blog.generate_feeds() + + site_dir = [os.path.basename(file) for file in site_path.listdir()] + assert 'rss.xml' in site_dir + assert 'feed.json' in site_dir + + +def test_blog_build_and_clean(tmpdir): + def test_blog_generate_index_page(tmpdir): + path = tmpdir.mkdir('blog') + site_path = path.mkdir('_site') + + # Set up a nanogen blog for posts + blog = models.Blog(path) + blog.init() + + with mock.patch('subprocess.call'): + blog.new_post('Test title 1', draft=False) + + post_template = path.join('_layout').join('post.html') + post_template.write("""\ + <!doctype html> + <html> + <body>Post template would go here.</body> + </html> + """) + + index_template = path.join('_layout').join('index.html') + index_template.write("""\ + <!doctype html> + <html> + <body>Index template would go here.</body> + </html> + """) + + blog_config = path.join('_layout').join('blog.cfg') + blog_config.write(example_config) + + # Refresh the blog instance to better emulate real-world usage + blog = models.Blog(path) + blog.build() + + site_dir = [os.path.basename(file) for file in site_path.listdir()] + assert 'index.html' in site_dir + + today = datetime.date.today() + expected_post_dir = site_path.join(f'{today.year}').join(f'{today.month:02d}') + generated_posts = [os.path.basename(file) for file in expected_post_dir.listdir()] + assert len(generated_posts) == 1 + assert 'test-title-1.html' in generated_posts + + blog.clean() + assert not os.path.isdir(site_path) diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..88c64a2 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,16 @@ +from nanogen import utils + + +def test_slugify(): + assert utils.slugify('This is a test') == 'this-is-a-test' + assert utils.slugify('this is ^&* a test') == 'this-is-----a-test' + + +def test_is_valid_post_file(): + assert not utils.is_valid_post_file('asldkjf') + assert not utils.is_valid_post_file('asldkjf.md') + assert not utils.is_valid_post_file('12-34-56-example.md') + assert not utils.is_valid_post_file('_2018-01-01-example-file.md') + assert not utils.is_valid_post_file('2018-01-01-example-file.html') + assert utils.is_valid_post_file('2018-01-01-example-file.md') +
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 5 }
2.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==6.7 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==2.10 MarkupSafe==2.0.1 mistune==0.8.3 -e git+https://github.com/epochblue/nanogen.git@263ca7f785530adfafc3ce5afa496b6c3f465816#egg=nanogen packaging==21.3 pluggy==1.0.0 py==1.11.0 Pygments==2.2.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: nanogen channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==6.7 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==2.10 - markupsafe==2.0.1 - mistune==0.8.3 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pygments==2.2.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nanogen
[ "tests/test_models.py::test_post", "tests/test_models.py::test_blog_create", "tests/test_models.py::test_blog_init", "tests/test_models.py::test_blog_new_post", "tests/test_models.py::test_blog_new_draft", "tests/test_models.py::test_blog_copy_static_files", "tests/test_models.py::test_blog_generate_posts", "tests/test_models.py::test_blog_generate_index_page", "tests/test_models.py::test_blog_generate_feeds_no_feed_files", "tests/test_models.py::test_blog_feeds", "tests/test_utils.py::test_is_valid_post_file" ]
[]
[ "tests/test_models.py::test_blog_build_and_clean", "tests/test_utils.py::test_slugify" ]
[]
MIT License
2,250
[ "nanogen/models.py", "setup.py", ".gitignore", "nanogen/cli.py", "nanogen/utils.py" ]
[ "nanogen/models.py", "setup.py", ".gitignore", "nanogen/cli.py", "nanogen/utils.py" ]
wolever__parameterized-53
4520ef06da539a9780cef4e6d59a3966c0c6de86
2018-03-05 13:44:35
4520ef06da539a9780cef4e6d59a3966c0c6de86
diff --git a/parameterized/parameterized.py b/parameterized/parameterized.py index 1d83404..96de811 100644 --- a/parameterized/parameterized.py +++ b/parameterized/parameterized.py @@ -40,6 +40,29 @@ else: _param = namedtuple("param", "args kwargs") + +def reapply_patches_if_need(func): + + def dummy_wrapper(orgfunc): + @wraps(orgfunc) + def dummy_func(*args, **kwargs): + return orgfunc(*args, **kwargs) + return dummy_func + + if hasattr(func, 'patchings'): + func = dummy_wrapper(func) + tmp_patchings = func.patchings + delattr(func, 'patchings') + for patch_obj in tmp_patchings: + func = patch_obj.decorate_callable(func) + return func + + +def delete_patches_if_need(func): + if hasattr(func, 'patchings'): + func.patchings[:] = [] + + class param(_param): """ Represents a single parameter to a test case. @@ -426,8 +449,16 @@ class parameterized(object): paramters = cls.input_as_callable(input)() for num, p in enumerate(paramters): name = name_func(f, num, p) - frame_locals[name] = cls.param_as_standalone_func(p, f, name) + # If the original function has patches applied by 'mock.patch', + # re-construct all patches on the just former decoration layer + # of param_as_standalone_func so as not to share + # patch objects between new functions + nf = reapply_patches_if_need(f) + frame_locals[name] = cls.param_as_standalone_func(p, nf, name) frame_locals[name].__doc__ = doc_func(f, num, p) + # Delete original patches to prevent new function from evaluating + # original patching object as well as re-constructed patches. + delete_patches_if_need(f) f.__test__ = False return parameterized_expand_wrapper diff --git a/tox.ini b/tox.ini index c75a0ab..c58219f 100644 --- a/tox.ini +++ b/tox.ini @@ -4,6 +4,7 @@ envlist=py{27,35,36,py}-{nose,nose2,pytest,unit,unit2} [testenv] deps= nose + mock nose2: nose2 pytest: pytest>=2 unit2: unittest2
prameterized.expand make mock.patch class decorator evaluated against same function multiple times If we use parameterized.expand with mock.patch class decorator, mock.patch decorator is evaluated against same function object multiple times and messed up arguments. Suppose I have a following code ``` import os from mock import patch from parameterized import parameterized @patch("os.listdir") class Test(): @parameterized.expand([[1,2], [3,4], [5,6]]) @patch("sys.path") def test1(self, t1, t2, mock_syspath, mock_listdir): pass ``` I expected above test code to be succeeded, but actually above code raise following error ``` @wraps(func) def patched(*args, **keywargs): extra_args = [] entered_patchers = [] exc_info = tuple() try: for patching in patched.patchings: arg = patching.__enter__() entered_patchers.append(patching) if patching.attribute_name is not None: keywargs.update(arg) elif patching.new is DEFAULT: extra_args.append(arg) args += tuple(extra_args) > return func(*args, **keywargs) E TypeError: test1() takes exactly 5 arguments (7 given) ``` The root cause of failure is that mock.patch class decorator add mock.patch function decorator for each methods in class and the function be created by parameterised.expand decorator is also decorated but obviously function object to be wrapped by standalone_func is same among each function created, thus mock.patch function decorator unintentionally executed against same function object and add argument for the times of the number of parameterised argument. That's why the number of argument miss match exception raised in above test code. mock module is often used in test and python 3 add it under unittest module so I think it's better to support above case.
wolever/parameterized
diff --git a/parameterized/test.py b/parameterized/test.py index d5bcd8f..2cfded3 100644 --- a/parameterized/test.py +++ b/parameterized/test.py @@ -1,6 +1,7 @@ # coding=utf-8 import inspect +import mock from unittest import TestCase from nose.tools import assert_equal from nose.plugins.skip import SkipTest @@ -101,6 +102,74 @@ def custom_naming_func(custom_tag): return custom_naming_func [email protected]("os.getpid") +class TestParameterizedExpandWithMockPatchForClass(TestCase): + expect([ + "test_one_function_patch_decorator('foo1', 'umask', 'getpid')", + "test_one_function_patch_decorator('foo0', 'umask', 'getpid')", + "test_one_function_patch_decorator(42, 'umask', 'getpid')", + ]) + + @parameterized.expand([(42, ), "foo0", param("foo1")]) + @mock.patch("os.umask") + def test_one_function_patch_decorator(self, foo, mock_umask, mock_getpid): + missing_tests.remove("test_one_function_patch_decorator(%r, %r, %r)" % + (foo, mock_umask._mock_name, + mock_getpid._mock_name)) + + expect([ + "test_multiple_function_patch_decorator" + "(42, 51, 'umask', 'fdopen', 'getpid')", + "test_multiple_function_patch_decorator" + "('foo0', 'bar0', 'umask', 'fdopen', 'getpid')", + "test_multiple_function_patch_decorator" + "('foo1', 'bar1', 'umask', 'fdopen', 'getpid')", + ]) + + @parameterized.expand([(42, 51), ("foo0", "bar0"), param("foo1", "bar1")]) + @mock.patch("os.fdopen") + @mock.patch("os.umask") + def test_multiple_function_patch_decorator(self, foo, bar, mock_umask, + mock_fdopen, mock_getpid): + missing_tests.remove("test_multiple_function_patch_decorator" + "(%r, %r, %r, %r, %r)" % + (foo, bar, mock_umask._mock_name, + mock_fdopen._mock_name, mock_getpid._mock_name)) + + +class TestParameterizedExpandWithNoMockPatchForClass(TestCase): + expect([ + "test_one_function_patch_decorator('foo1', 'umask')", + "test_one_function_patch_decorator('foo0', 'umask')", + "test_one_function_patch_decorator(42, 'umask')", + ]) + + @parameterized.expand([(42, ), "foo0", param("foo1")]) + @mock.patch("os.umask") + def test_one_function_patch_decorator(self, foo, mock_umask): + missing_tests.remove("test_one_function_patch_decorator(%r, %r)" % + (foo, mock_umask._mock_name)) + + expect([ + "test_multiple_function_patch_decorator" + "(42, 51, 'umask', 'fdopen')", + "test_multiple_function_patch_decorator" + "('foo0', 'bar0', 'umask', 'fdopen')", + "test_multiple_function_patch_decorator" + "('foo1', 'bar1', 'umask', 'fdopen')", + ]) + + @parameterized.expand([(42, 51), ("foo0", "bar0"), param("foo1", "bar1")]) + @mock.patch("os.fdopen") + @mock.patch("os.umask") + def test_multiple_function_patch_decorator(self, foo, bar, mock_umask, + mock_fdopen): + missing_tests.remove("test_multiple_function_patch_decorator" + "(%r, %r, %r, %r)" % + (foo, bar, mock_umask._mock_name, + mock_fdopen._mock_name)) + + class TestParamerizedOnTestCase(TestCase): expect([ "test_on_TestCase('foo0', bar=None)",
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "mock", "nose2", "pytest", "unittest2" ], "pre_install": [], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 iniconfig==2.1.0 linecache2==1.0.0 mock==5.2.0 nose==1.3.7 nose2==0.15.1 packaging==24.2 -e git+https://github.com/wolever/parameterized.git@4520ef06da539a9780cef4e6d59a3966c0c6de86#egg=parameterized pluggy==1.5.0 pytest==8.3.5 six==1.17.0 tomli==2.2.1 traceback2==1.4.0 unittest2==1.1.0
name: parameterized channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - linecache2==1.0.0 - mock==5.2.0 - nose==1.3.7 - nose2==0.15.1 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - six==1.17.0 - tomli==2.2.1 - traceback2==1.4.0 - unittest2==1.1.0 prefix: /opt/conda/envs/parameterized
[ "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_multiple_function_patch_decorator_0", "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_multiple_function_patch_decorator_1_foo0", "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_multiple_function_patch_decorator_2_foo1", "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_one_function_patch_decorator_0", "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_one_function_patch_decorator_1_foo0", "parameterized/test.py::TestParameterizedExpandWithMockPatchForClass::test_one_function_patch_decorator_2_foo1" ]
[ "parameterized/test.py::TestParameterizedExpandDocstring::test_custom_doc_func_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_default_values_get_correct_value_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_empty_docstring_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_multiline_documentation_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_single_line_docstring_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_unicode_docstring_0_foo", "parameterized/test.py::TestParameterizedExpandDocstring::test_with_leading_newline_0_foo" ]
[ "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_multiple_function_patch_decorator_0", "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_multiple_function_patch_decorator_1_foo0", "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_multiple_function_patch_decorator_2_foo1", "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_one_function_patch_decorator_0", "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_one_function_patch_decorator_1_foo0", "parameterized/test.py::TestParameterizedExpandWithNoMockPatchForClass::test_one_function_patch_decorator_2_foo1", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase2_custom_name_42", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase2_custom_name_foo0", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase2_custom_name_foo1", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase2_custom_name_foo2", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase_0", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase_1_foo0", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase_2_foo1", "parameterized/test.py::TestParamerizedOnTestCase::test_on_TestCase_3_foo2", "parameterized/test.py::test_warns_when_using_parameterized_with_TestCase", "parameterized/test.py::test_helpful_error_on_invalid_parameters", "parameterized/test.py::test_helpful_error_on_non_iterable_input", "parameterized/test.py::TestOldStyleClass::test_old_style_classes_0_foo", "parameterized/test.py::TestOldStyleClass::test_old_style_classes_1_bar" ]
[]
BSD-2-Clause
2,253
[ "parameterized/parameterized.py", "tox.ini" ]
[ "parameterized/parameterized.py", "tox.ini" ]
melexis__warnings-plugin-56
1a913d2e8b2d770ed4ec98eeb4238893d69ef66d
2018-03-05 16:11:19
f0767be9f616ff41982ee3858ffe99554882cee7
diff --git a/.travis.yml b/.travis.yml index ecc0bf8..7df4c41 100644 --- a/.travis.yml +++ b/.travis.yml @@ -55,6 +55,19 @@ notifications: on_failure: always deploy: + # test pypi + - provider: pypi + distributions: sdist bdist_wheel + server: https://test.pypi.org/legacy/ + user: bavo.van.achte + password: + secure: cKCBgEUOSUnlPbOxHCrXENlVgdMGnjNC+7nnutp/1xF8VEDF3aj9Br4u5LKrAYs0sm0AvnCyjhPvfKGPwyRDdfGBjoG06G+L+1hcfpgBlItmdSBqB8RxMm2B76si1ZlVI9gC58hlk/agFr2vik/mLXsH23rafB/2UwfB3ItTTx2J14xC5jlaqYR/srMJUi8YO5z6mGGLokfcz0KhYUHegOna38UcARM8rkAC2Je0xrPKZMlCoTI84dqwnFPW4zn3g/B5s3s18gmZu4fE4+J1g0PNMvxhbDP1TIBzPSWXLBv+YPSKrIT6+Q4R/kfDJFzLn3SmDDnNOpD/OC8ssqVJOcQL3HhKQ7EAcxX9W+/Rt7mIpdJdDXohiPrBl9EdRYbhB+KiPeo/dekAV6loUP/8cHuEgjcW/gE8t+HIqWsa5SO9yK7Sz8Ym+0ENdzS1df0iPOj2ebR3kb1iwINdFi7zIG6Utvlf7w1A2Qtx1xfI2+woPU+GOgQrpwdw64Wl1uo4l0kqTpFkytIG7BEVWC+zPPzqddi+3Ulf9AkWSjNDTqYafxZ9oqBJ5q7WPH8zyPQHotcHbnziTAnv7qRa+CTFLeME/KXNT8egToLK75G367lANTFIhMm8eSDS7wAxFWHacq8j68wNb38Yj1Rv1WMHQh14sxOkzQ4hVEV0xYY7Bj8= + before_deploy: + - "env SETUP_TOOLS_SCM_PRETEND_VERSION='{next_version}.dev{distance}'" + on: + branch: master + python: 3.6 + tags: false # production pypi - provider: pypi distributions: sdist bdist_wheel diff --git a/src/mlx/warnings.py b/src/mlx/warnings.py index 7f23c10..894f799 100644 --- a/src/mlx/warnings.py +++ b/src/mlx/warnings.py @@ -213,6 +213,7 @@ class WarningsPlugin: self.warn_min = 0 self.warn_max = 0 self.count = 0 + self.printout = False def activate_checker(self, checker): ''' @@ -244,6 +245,8 @@ class WarningsPlugin: if len(self.checkerList) == 0: print("No checkers activated. Please use activate_checker function") else: + if self.printout: + print(content) for name, checker in self.checkerList.items(): checker.check(content) @@ -309,6 +312,16 @@ class WarningsPlugin: return 0 + def toggle_printout(self, printout): + ''' Toggle printout of all the parsed content + + Useful for command input where we want to print content as well + + Args: + printout: True enables the printout, False provides more silent mode + ''' + self.printout = printout + def warnings_wrapper(args): parser = argparse.ArgumentParser(prog='mlx-warnings') @@ -319,6 +332,8 @@ def warnings_wrapper(args): parser.add_argument('-v', '--verbose', dest='verbose', action='store_true') parser.add_argument('--command', dest='command', action='store_true', help='Treat program arguments as command to execute to obtain data') + parser.add_argument('--ignore-retval', dest='ignore', action='store_true', + help='Ignore return value of the executed command') parser.add_argument('-m', '--maxwarnings', type=int, required=False, default=0, help='Maximum amount of warnings accepted') parser.add_argument('--minwarnings', type=int, required=False, default=0, @@ -338,7 +353,11 @@ def warnings_wrapper(args): cmd = args.logfile if args.flags: cmd.extend(args.flags) - warnings_command(warnings, cmd) + warnings.toggle_printout(True) + retval = warnings_command(warnings, cmd) + + if (not args.ignore) and (retval != 0): + return retval else: warnings_logfile(warnings, args.logfile) @@ -347,6 +366,24 @@ def warnings_wrapper(args): def warnings_command(warnings, cmd): + ''' Execute command to obtain input for parsing for warnings + + Usually log files are output of the commands. To avoid this additional step + this function runs a command instead and parses the stderr and stdout of the + command for warnings. + + Args: + warnings (WarningsPlugin): Object for warnings where errors should be logged + cmd: Command list, which should be executed to obtain input for parsing + ignore: Flag to ignore return value of the command + + Return: + retval: Return value of executed command + + Raises: + OSError: When program is not installed. + ''' + try: print("Executing: ", end='') print(cmd) @@ -356,19 +393,16 @@ def warnings_command(warnings, cmd): # Check stdout if out: try: - print(out.decode(encoding="utf-8")) warnings.check(out.decode(encoding="utf-8")) except AttributeError as e: warnings.check(out) - print(out) # Check stderr if err: try: warnings.check(err.decode(encoding="utf-8")) - print(err.decode(encoding="utf-8"), file=sys.stderr) except AttributeError as e: warnings.check(err) - print(err, file=sys.stderr) + return proc.returncode except OSError as e: if e.errno == os.errno.ENOENT: print("It seems like program " + str(cmd) + " is not installed.")
Report command return value by default If command exists with non 0 value, we just check if there are no warnings and we might mark a field as passed. Confirm the return value of the command and use that as a default return value. Provide flag to disregard that.
melexis/warnings-plugin
diff --git a/tests/test_integration.py b/tests/test_integration.py index 4500dfb..9bc771a 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -50,6 +50,14 @@ class TestIntegration(TestCase): with self.assertRaises(OSError): warnings_wrapper(['--sphinx', '--command', 'blahahahaha', 'tests/sphinx_single_warning.txt']) + def test_command_revtal_err(self): + retval = warnings_wrapper(['--sphinx', '--command', 'false']) + self.assertEqual(1, retval) + + def test_command_revtal_err_supress(self): + retval = warnings_wrapper(['--sphinx', '--ignore-retval', '--command', 'false']) + self.assertEqual(0, retval) + def test_wildcarded_arguments(self): # note: no shell expansion simulation (e.g. as in windows) retval = warnings_wrapper(['--junit', 'tests/junit*.xml'])
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 2 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-travis-fold" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work junitparser==3.2.0 -e git+https://github.com/melexis/warnings-plugin.git@1a913d2e8b2d770ed4ec98eeb4238893d69ef66d#egg=mlx.warnings more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 pytest-travis-fold==1.3.0 setuptools-scm==6.4.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: warnings-plugin channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==6.2 - junitparser==3.2.0 - pytest-cov==4.0.0 - pytest-travis-fold==1.3.0 - setuptools-scm==6.4.2 - tomli==1.2.3 prefix: /opt/conda/envs/warnings-plugin
[ "tests/test_integration.py::TestIntegration::test_command_revtal_err", "tests/test_integration.py::TestIntegration::test_command_revtal_err_supress" ]
[]
[ "tests/test_integration.py::TestIntegration::test_command_to_stderr", "tests/test_integration.py::TestIntegration::test_command_with_its_own_arguments", "tests/test_integration.py::TestIntegration::test_faulty_command", "tests/test_integration.py::TestIntegration::test_help", "tests/test_integration.py::TestIntegration::test_max", "tests/test_integration.py::TestIntegration::test_max_but_still_ok", "tests/test_integration.py::TestIntegration::test_min", "tests/test_integration.py::TestIntegration::test_min_but_still_ok", "tests/test_integration.py::TestIntegration::test_no_parser_selection", "tests/test_integration.py::TestIntegration::test_single_argument", "tests/test_integration.py::TestIntegration::test_single_command_argument", "tests/test_integration.py::TestIntegration::test_two_arguments", "tests/test_integration.py::TestIntegration::test_two_command_arguments", "tests/test_integration.py::TestIntegration::test_version", "tests/test_integration.py::TestIntegration::test_wildcarded_arguments" ]
[]
Apache License 2.0
2,254
[ "src/mlx/warnings.py", ".travis.yml" ]
[ "src/mlx/warnings.py", ".travis.yml" ]
grabbles__grabbit-54
546ea88e8c04f263c2fecad2bc7314d59a5c0587
2018-03-06 01:19:44
5a588731d1a4a42a6b67f09ede110d7770845ed0
diff --git a/.coveragerc b/.coveragerc index a72e893..777a95e 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,5 +1,5 @@ [run] -source = - grabbit/ +include = grabbit/ omit = - */tests/* + */tests/* + */external/* diff --git a/.travis.yml b/.travis.yml index 57590d2..96455a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,7 @@ install: - pip install -e '.' script: - - PYTHONPATH=$PWD coverage run `which py.test` grabbit - - py.test --cov-report term-missing --cov=grabbit - - cd examples; PYTHONPATH=.. runipy *.ipynb # test example notebooks to run and not fail + - py.test --pyargs grabbit --cov-report term-missing --cov=grabbit after_success: - coveralls diff --git a/grabbit/core.py b/grabbit/core.py index 9cb60dc..6f5ba4e 100644 --- a/grabbit/core.py +++ b/grabbit/core.py @@ -122,29 +122,50 @@ class File(object): class Domain(object): def __init__(self, name, config, root): + """ + A set of rules that applies to one or more directories + within a Layout. + + Args: + name (str): The name of the Domain. + config (dict): The configuration dictionary that defines the + entities and paths for the current domain. + root (str, list): The root directory or directories to which the + Domain's rules applies. Can be either a single path, or a list. + """ self.name = name self.config = config self.root = root self.entities = {} self.files = [] - self.filtering_regex = {} self.path_patterns = [] - if 'index' in config: - self.filtering_regex = config['index'] - if self.filtering_regex.get('include') and \ - self.filtering_regex.get('exclude'): - raise ValueError("You can only define either include or " - "exclude regex, not both.") + self.include = listify(self.config.get('include', [])) + self.exclude = listify(self.config.get('exclude', [])) + + if self.include and self.exclude: + raise ValueError("The 'include' and 'exclude' arguments cannot " + "both be set. Please pass at most one of these " + "for domain '%s'." % self.name) if 'default_path_patterns' in config: self.path_patterns += listify(config['default_path_patterns']) def add_entity(self, ent): + ''' Add an Entity. + + Args: + ent (Entity): The Entity to add. + ''' self.entities[ent.name] = ent def add_file(self, file): + ''' Add a file to tracking. + + Args: + file (File): The File to add to tracking. + ''' self.files.append(file) @@ -281,16 +302,20 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def __init__(self, path, config=None, index=None, dynamic_getters=False, absolute_paths=True, regex_search=False, entity_mapper=None, - path_patterns=None, config_filename='layout.json'): + path_patterns=None, config_filename='layout.json', + include=None, exclude=None): """ A container for all the files and metadata found at the specified path. Args: path (str): The root path of the layout. - config (str, list): The path to the JSON config file that defines - the entities and paths for the current layout. If a list is - provided, treat as several paths to config files, creating - one master config with all of them merged (in order). + config (str, list, dict): A specification of the configuration + file(s) defining domains to use in the Layout. Must be one of: + + - A dictionary containing config information + - A string giving the path to a JSON file containing the config + - A list, where each element is one of the above + index (str): Optional path to a saved index file. If a valid value is passed, this index is used to populate Files and Entities, and the normal indexing process (which requires scanning all @@ -327,8 +352,20 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): Every directory will be scanned for this file, and if found, the config file will be read in and added to the list of configs. + include (str, list): A string or list specifying regexes used to + globally filter files when indexing. A file or directory + *must* match at least of the passed values in order to be + retained in the index. Cannot be used together with 'exclude'. + exclude (str, list): A string or list specifying regexes used to + globally filter files when indexing. If a file or directory + *must* matches any of the passed values, it will be dropped + from indexing. Cannot be used together with 'include'. """ + if include is not None and exclude is not None: + raise ValueError("You cannot specify both the include and exclude" + " arguments. Please pass at most one of these.") + self.root = abspath(path) if absolute_paths else path self.entities = OrderedDict() self.files = {} @@ -339,6 +376,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): self.path_patterns = path_patterns if path_patterns else [] self.config_filename = config_filename self.domains = OrderedDict() + self.include = listify(include or []) + self.exclude = listify(exclude or []) if config is not None: for c in listify(config): @@ -413,7 +452,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): filename = f if isinstance(f, six.string_types) else f.path - if os.path.isabs(filename) and filename.startswith(self.root + os.path.sep): + if os.path.isabs(filename) and filename.startswith( + self.root + os.path.sep): # for filenames under the root - analyze relative path to avoid # bringing injustice to the grandkids of some unfortunately named # root directories. @@ -422,19 +462,20 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): if domains is None: domains = list(self.domains.keys()) + domains = [self.domains[dom] for dom in domains] + + # Inject the Layout at the first position for global include/exclude + domains.insert(0, self) for dom in domains: - dom = self.domains[dom] # If file matches any include regex, then True - include_regex = dom.filtering_regex.get('include', []) - if include_regex: - for regex in include_regex: + if dom.include: + for regex in dom.include: if re.match(regex, filename): - break - else: - return False + return True + return False else: - # If file matches any excldue regex, then false - for regex in dom.filtering_regex.get('exclude', []): + # If file matches any exclude regex, then False + for regex in dom.exclude: if re.match(regex, filename, flags=re.UNICODE): return False @@ -473,7 +514,13 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def _get_domains_for_file(self, f): if isinstance(f, File): return f.domains - return [d.name for d in self.domains.values() if f.startswith(d.root)] + domains = [] + for d in self.domains.values(): + for path in listify(d.root): + if f.startswith(path): + domains.append(d.name) + break + return domains def _index_file(self, root, f, domains=None, update_layout=True): @@ -502,8 +549,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Only keep Files that match at least one Entity, and all # mandatory Entities - if update_layout and file_ents and not (self.mandatory - - set(file_ents)): + if update_layout and file_ents and not (self.mandatory - + set(file_ents)): self.files[f.path] = f # Bind the File to all of the matching entities for name, tag in f.tags.items(): @@ -552,12 +599,13 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): if self.config_filename in filenames: config_path = os.path.join(root, self.config_filename) config = json.load(open(config_path, 'r')) - self._load_domain(config) + root = config.get('root', root) + self._load_domain(config, root=root) # Filter Domains if current dir's config file has an # include directive - if 'include' in config: - missing = set(config['include']) - set(domains) + if 'domains' in config: + missing = set(config['domains']) - set(domains) if missing: msg = ("Missing configs '%s' specified in include " "directive of config '%s'. Please make sure " @@ -565,7 +613,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): "directory %s.") % (missing, config['name'], root) raise ValueError(msg) - domains = config['include'] + domains = config['domains'] domains.append(config['name']) filenames.remove(self.config_filename)
Enable coveralls
grabbles/grabbit
diff --git a/grabbit/tests/specs/test.json b/grabbit/tests/specs/test.json index e0c6630..39ed11a 100644 --- a/grabbit/tests/specs/test.json +++ b/grabbit/tests/specs/test.json @@ -1,8 +1,6 @@ { "name": "test", - "index" : { - "exclude" : [".*derivatives.*"] - }, + "exclude" : [".*derivatives.*"], "entities": [ { "name": "subject", diff --git a/grabbit/tests/specs/test_include.json b/grabbit/tests/specs/test_include.json index f3a87ef..8741d83 100644 --- a/grabbit/tests/specs/test_include.json +++ b/grabbit/tests/specs/test_include.json @@ -1,8 +1,6 @@ { "name": "test_with_includes", - "index" : { - "include" : ["sub-(\\d+)", "ses-.*", "func", "fmap", ".*\\..*"] - }, + "include" : ["sub-(\\d+)", "ses-.*", "func", "fmap", ".*\\..*"], "entities": [ { "name": "subject", diff --git a/grabbit/tests/specs/test_with_mapper.json b/grabbit/tests/specs/test_with_mapper.json index 84c3d28..20fca11 100644 --- a/grabbit/tests/specs/test_with_mapper.json +++ b/grabbit/tests/specs/test_with_mapper.json @@ -1,8 +1,6 @@ { "name": "test_with_mapper", - "index" : { - "exclude" : [".*derivatives.*"] - }, + "exclude" : [".*derivatives.*"], "entities": [ { "name": "subject", diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py index c0087ee..e85cbe8 100644 --- a/grabbit/tests/test_core.py +++ b/grabbit/tests/test_core.py @@ -1,6 +1,7 @@ import pytest from grabbit import File, Entity, Layout, Tag, merge_layouts import os +from os.path import join import posixpath as psp import tempfile import json @@ -14,17 +15,17 @@ def file(tmpdir): testfile = 'sub-03_ses-2_task-rest_acq-fullbrain_run-2_bold.nii.gz' fn = tmpdir.mkdir("tmp").join(testfile) fn.write('###') - return File(os.path.join(str(fn))) + return File(join(str(fn))) @pytest.fixture(scope='module', params=['local', 'hdfs']) def bids_layout(request): if request.param == 'local': - root = os.path.join(DIRNAME, 'data', '7t_trt') + root = join(DIRNAME, 'data', '7t_trt') # note about test.json: # in this test.json 'subject' regex was left to contain possible # leading 0; the other fields (run, session) has leading 0 stripped - config = os.path.join(DIRNAME, 'specs', 'test.json') + config = join(DIRNAME, 'specs', 'test.json') return Layout(root, config, regex_search=True) else: hdfs = pytest.importorskip("hdfs") @@ -39,15 +40,15 @@ def bids_layout(request): @pytest.fixture(scope='module') def stamp_layout(): - root = os.path.join(DIRNAME, 'data', 'valuable_stamps') - config = os.path.join(DIRNAME, 'specs', 'stamps.json') + root = join(DIRNAME, 'data', 'valuable_stamps') + config = join(DIRNAME, 'specs', 'stamps.json') return Layout(root, config, config_filename='dir_config.json') @pytest.fixture(scope='module') def layout_include(request): - root = os.path.join(DIRNAME, 'data', '7t_trt') - config = os.path.join(DIRNAME, 'specs', 'test_include.json') + root = join(DIRNAME, 'data', '7t_trt') + config = join(DIRNAME, 'specs', 'test_include.json') return Layout(root, config, regex_search=True) @@ -99,7 +100,7 @@ class TestEntity: def test_matches(self, tmpdir): filename = "aardvark-4-reporting-for-duty.txt" tmpdir.mkdir("tmp").join(filename).write("###") - f = File(os.path.join(str(tmpdir), filename)) + f = File(join(str(tmpdir), filename)) e = Entity('avaricious', 'aardvark-(\d+)') e.matches(f, update_file=True) assert 'avaricious' in f.entities @@ -134,15 +135,37 @@ class TestLayout: assert isinstance(bids_layout.mandatory, set) assert not bids_layout.dynamic_getters + def test_init_with_include_arg(self, bids_layout): + root = join(DIRNAME, 'data', '7t_trt') + config = join(DIRNAME, 'specs', 'test.json') + layout = Layout(root, config, regex_search=True, include='sub-\d*') + target = join(root, "dataset_description.json") + assert target in bids_layout.files + assert target not in layout.files + assert join(root, "sub-01", "sub-01_sessions.tsv") in layout.files + with pytest.raises(ValueError): + layout = Layout(root, config, include='sub-\d*', exclude="meh") + + def test_init_with_exclude_arg(self, bids_layout): + root = join(DIRNAME, 'data', '7t_trt') + config = join(DIRNAME, 'specs', 'test.json') + layout = Layout(root, config, regex_search=True, exclude='sub-\d*') + target = join(root, "dataset_description.json") + assert target in bids_layout.files + assert target in layout.files + sub_file = join(root, "sub-01", "sub-01_sessions.tsv") + assert sub_file in bids_layout.files + assert sub_file not in layout.files + def test_absolute_paths(self, bids_layout): result = bids_layout.get(subject=1, run=1, session=1) assert result # that we got some entries assert all([os.path.isabs(f.filename) for f in result]) if not hasattr(bids_layout, '_hdfs_client'): - root = os.path.join(DIRNAME, 'data', '7t_trt') + root = join(DIRNAME, 'data', '7t_trt') root = os.path.relpath(root) - config = os.path.join(DIRNAME, 'specs', 'test.json') + config = join(DIRNAME, 'specs', 'test.json') layout = Layout(root, config, absolute_paths=False) @@ -174,8 +197,8 @@ class TestLayout: assert all([os.path.isabs(f.filename) for f in result]) @pytest.mark.parametrize('data_dir, config', - [(os.path.join(DIRNAME, 'data', '7t_trt'), - os.path.join(DIRNAME, 'specs', 'test.json')), + [(join(DIRNAME, 'data', '7t_trt'), + join(DIRNAME, 'specs', 'test.json')), (psp.join('hdfs://localhost:9000/grabbit/test/', 'data', '7t_trt'), psp.join('hdfs://localhost:9000/grabbit/test/', @@ -244,7 +267,7 @@ class TestLayout: nearest = bids_layout.get_nearest( result, type='sessions', extensions='tsv', ignore_strict_entities=['type']) - target = os.path.join('7t_trt', 'sub-01', 'sub-01_sessions.tsv') + target = join('7t_trt', 'sub-01', 'sub-01_sessions.tsv') assert target in nearest nearest = bids_layout.get_nearest( result, extensions='tsv', all_=True, @@ -257,9 +280,9 @@ class TestLayout: assert nearest[0].subject == '01' def test_index_regex(self, bids_layout, layout_include): - targ = os.path.join(bids_layout.root, 'derivatives', 'excluded.json') + targ = join(bids_layout.root, 'derivatives', 'excluded.json') assert targ not in bids_layout.files - targ = os.path.join(layout_include.root, 'models', + targ = join(layout_include.root, 'models', 'excluded_model.json') assert targ not in layout_include.files @@ -284,13 +307,13 @@ class TestLayout: os.unlink(tmp) def test_load_index(self, bids_layout): - f = os.path.join(DIRNAME, 'misc', 'index.json') + f = join(DIRNAME, 'misc', 'index.json') bids_layout.load_index(f) assert bids_layout.unique('subject') == ['01'] assert len(bids_layout.files) == 24 # Test with reindexing - f = os.path.join(DIRNAME, 'misc', 'index.json') + f = join(DIRNAME, 'misc', 'index.json') bids_layout.load_index(f, reindex=True) assert bids_layout.unique('subject') == ['01'] assert len(bids_layout.files) == 24 @@ -305,8 +328,8 @@ class TestLayout: def hash_file(self, file): return str(hash(file.path)) + '.hsh' - root = os.path.join(DIRNAME, 'data', '7t_trt') - config = os.path.join(DIRNAME, 'specs', + root = join(DIRNAME, 'data', '7t_trt') + config = join(DIRNAME, 'specs', 'test_with_mapper.json') # Test with external mapper @@ -337,10 +360,11 @@ class TestLayout: def test_excludes(self, tmpdir): root = tmpdir.mkdir("ohmyderivatives").mkdir("ds") - config = os.path.join(DIRNAME, 'specs', 'test.json') + config = join(DIRNAME, 'specs', 'test.json') layout = Layout(str(root), config, regex_search=True) assert layout._check_inclusions(str(root.join("ohmyimportantfile"))) - assert not layout._check_inclusions(str(root.join("badbadderivatives"))) + assert not layout._check_inclusions( + str(root.join("badbadderivatives"))) def test_multiple_domains(self, stamp_layout): layout = stamp_layout.clone()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 3 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "runipy" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirement.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
async-generator==1.10 attrs==22.2.0 backcall==0.2.0 bleach==4.1.0 certifi==2021.5.30 decorator==5.1.1 defusedxml==0.7.1 entrypoints==0.4 -e git+https://github.com/grabbles/grabbit.git@546ea88e8c04f263c2fecad2bc7314d59a5c0587#egg=grabbit importlib-metadata==4.8.3 iniconfig==1.1.1 ipykernel==5.5.6 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nest-asyncio==1.6.0 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 python-dateutil==2.9.0.post0 pyzmq==25.1.2 runipy==0.1.5 six==1.17.0 testpath==0.6.0 tomli==1.2.3 tornado==6.1 traitlets==4.3.3 typing_extensions==4.1.1 wcwidth==0.2.13 webencodings==0.5.1 zipp==3.6.0
name: grabbit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - async-generator==1.10 - attrs==22.2.0 - backcall==0.2.0 - bleach==4.1.0 - decorator==5.1.1 - defusedxml==0.7.1 - entrypoints==0.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - ipykernel==5.5.6 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nest-asyncio==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pyzmq==25.1.2 - runipy==0.1.5 - six==1.17.0 - testpath==0.6.0 - tomli==1.2.3 - tornado==6.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - wcwidth==0.2.13 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/grabbit
[ "grabbit/tests/test_core.py::TestLayout::test_init_with_include_arg[local]", "grabbit/tests/test_core.py::TestLayout::test_init_with_exclude_arg[local]", "grabbit/tests/test_core.py::TestLayout::test_index_regex[local]", "grabbit/tests/test_core.py::TestLayout::test_excludes" ]
[]
[ "grabbit/tests/test_core.py::TestFile::test_init", "grabbit/tests/test_core.py::TestFile::test_matches", "grabbit/tests/test_core.py::TestFile::test_named_tuple", "grabbit/tests/test_core.py::TestEntity::test_init", "grabbit/tests/test_core.py::TestEntity::test_matches", "grabbit/tests/test_core.py::TestEntity::test_unique_and_count", "grabbit/tests/test_core.py::TestEntity::test_add_file", "grabbit/tests/test_core.py::TestLayout::test_init[local]", "grabbit/tests/test_core.py::TestLayout::test_absolute_paths[local]", "grabbit/tests/test_core.py::TestLayout::test_querying[local]", "grabbit/tests/test_core.py::TestLayout::test_natsort[local]", "grabbit/tests/test_core.py::TestLayout::test_unique_and_count[local]", "grabbit/tests/test_core.py::TestLayout::test_get_nearest[local]", "grabbit/tests/test_core.py::TestLayout::test_save_index[local]", "grabbit/tests/test_core.py::TestLayout::test_load_index[local]", "grabbit/tests/test_core.py::TestLayout::test_clone[local]", "grabbit/tests/test_core.py::TestLayout::test_parse_file_entities[local]", "grabbit/tests/test_core.py::test_merge_layouts[local]", "grabbit/tests/test_core.py::TestLayout::test_dynamic_getters[/grabbit/grabbit/tests/data/7t_trt-/grabbit/grabbit/tests/specs/test.json]", "grabbit/tests/test_core.py::TestLayout::test_entity_mapper", "grabbit/tests/test_core.py::TestLayout::test_multiple_domains", "grabbit/tests/test_core.py::TestLayout::test_get_by_domain" ]
[]
MIT License
2,255
[ ".travis.yml", "grabbit/core.py", ".coveragerc" ]
[ ".travis.yml", "grabbit/core.py", ".coveragerc" ]
melexis__warnings-plugin-57
f0767be9f616ff41982ee3858ffe99554882cee7
2018-03-06 09:23:42
f0767be9f616ff41982ee3858ffe99554882cee7
diff --git a/src/mlx/warnings.py b/src/mlx/warnings.py index 894f799..165c447 100644 --- a/src/mlx/warnings.py +++ b/src/mlx/warnings.py @@ -359,7 +359,9 @@ def warnings_wrapper(args): if (not args.ignore) and (retval != 0): return retval else: - warnings_logfile(warnings, args.logfile) + retval = warnings_logfile(warnings, args.logfile) + if retval != 0: + return retval warnings.return_count() return warnings.return_check_limits() @@ -383,7 +385,6 @@ def warnings_command(warnings, cmd): Raises: OSError: When program is not installed. ''' - try: print("Executing: ", end='') print(cmd) @@ -410,6 +411,16 @@ def warnings_command(warnings, cmd): def warnings_logfile(warnings, log): + ''' Parse logfile for warnings + + Args: + warnings (WarningsPlugin): Object for warnings where errors should be logged + log: Logfile for parsing + + Return: + 0: Log files existed and are parsed successfully + 1: Log files did not exist + ''' # args.logfile doesn't necessarily contain wildcards, but just to be safe, we # assume it does, and try to expand them. # This mechanism is put in place to allow wildcards to be passed on even when @@ -417,9 +428,15 @@ def warnings_logfile(warnings, log): # so that the script can be used in the exact same way even when moving from one # OS to another. for file_wildcard in log: - for logfile in glob.glob(file_wildcard): - with open(logfile, 'r') as loghandle: - warnings.check(loghandle.read()) + if glob.glob(file_wildcard): + for logfile in glob.glob(file_wildcard): + with open(logfile, 'r') as loghandle: + warnings.check(loghandle.read()) + else: + print("FILE: %s does not exist" % file_wildcard) + return 1 + + return 0 def main():
Check if we could open a file Now that we have a command option there is possibility that we missed the `--command` flag. In that moment plugin should tell us that files X, X, X do not exist and return non-0 value. Example that passes as it would be all ok (I assume also simpler stuff with empty and non existent log file passes as well): ``` mlx-warnings --sphinx --maxwarnings 0 --minwarnings 0 make -C example html ``` FYI: @SteinHeselmans @bavovanachte
melexis/warnings-plugin
diff --git a/tests/test_integration.py b/tests/test_integration.py index 9bc771a..e9556a3 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -30,6 +30,10 @@ class TestIntegration(TestCase): retval = warnings_wrapper(['--junit', 'tests/junit_single_fail.xml', 'tests/junit_double_fail.xml']) self.assertEqual(1 + 2, retval) + def test_non_existing_logfile(self): + retval = warnings_wrapper(['--sphinx', 'not-exist.log']) + self.assertEqual(1, retval) + def test_single_command_argument(self): retval = warnings_wrapper(['--junit', '--command', 'cat', 'tests/junit_single_fail.xml']) self.assertEqual(1, retval)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-travis-fold", "pytest-cov", "mock", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 importlib-metadata==4.8.3 iniconfig==1.1.1 junitparser==3.2.0 -e git+https://github.com/melexis/warnings-plugin.git@f0767be9f616ff41982ee3858ffe99554882cee7#egg=mlx.warnings mock==5.2.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-travis-fold==1.3.0 setuptools-scm==6.4.2 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: warnings-plugin channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - junitparser==3.2.0 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-travis-fold==1.3.0 - setuptools-scm==6.4.2 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/warnings-plugin
[ "tests/test_integration.py::TestIntegration::test_non_existing_logfile" ]
[]
[ "tests/test_integration.py::TestIntegration::test_command_revtal_err", "tests/test_integration.py::TestIntegration::test_command_revtal_err_supress", "tests/test_integration.py::TestIntegration::test_command_to_stderr", "tests/test_integration.py::TestIntegration::test_command_with_its_own_arguments", "tests/test_integration.py::TestIntegration::test_faulty_command", "tests/test_integration.py::TestIntegration::test_help", "tests/test_integration.py::TestIntegration::test_max", "tests/test_integration.py::TestIntegration::test_max_but_still_ok", "tests/test_integration.py::TestIntegration::test_min", "tests/test_integration.py::TestIntegration::test_min_but_still_ok", "tests/test_integration.py::TestIntegration::test_no_parser_selection", "tests/test_integration.py::TestIntegration::test_single_argument", "tests/test_integration.py::TestIntegration::test_single_command_argument", "tests/test_integration.py::TestIntegration::test_two_arguments", "tests/test_integration.py::TestIntegration::test_two_command_arguments", "tests/test_integration.py::TestIntegration::test_version", "tests/test_integration.py::TestIntegration::test_wildcarded_arguments" ]
[]
Apache License 2.0
2,257
[ "src/mlx/warnings.py" ]
[ "src/mlx/warnings.py" ]
pytorch__ignite-92
0b4aec7629390ed797782fede8e3f11fe7c549f7
2018-03-06 16:49:31
0b4aec7629390ed797782fede8e3f11fe7c549f7
jasonkriss: @alykhantejani I just added MeanPairwiseDistance to this PR.
diff --git a/ignite/metrics/__init__.py b/ignite/metrics/__init__.py index 3902c5de..2cc4e1ff 100644 --- a/ignite/metrics/__init__.py +++ b/ignite/metrics/__init__.py @@ -1,3 +1,8 @@ +from .binary_accuracy import BinaryAccuracy from .categorical_accuracy import CategoricalAccuracy +from .mean_absolute_error import MeanAbsoluteError +from .mean_pairwise_distance import MeanPairwiseDistance from .mean_squared_error import MeanSquaredError from .metric import Metric +from .root_mean_squared_error import RootMeanSquaredError +from .top_k_categorical_accuracy import TopKCategoricalAccuracy diff --git a/ignite/metrics/binary_accuracy.py b/ignite/metrics/binary_accuracy.py new file mode 100644 index 00000000..3b33a4db --- /dev/null +++ b/ignite/metrics/binary_accuracy.py @@ -0,0 +1,28 @@ +from __future__ import division + +import torch + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class BinaryAccuracy(Metric): + """ + Calculates the binary accuracy. + + `update` must receive output of the form (y_pred, y). + """ + def reset(self): + self._num_correct = 0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + correct = torch.eq(torch.round(y_pred).type(torch.LongTensor), y) + self._num_correct += torch.sum(correct) + self._num_examples += correct.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('BinaryAccuracy must have at least one example before it can be computed') + return self._num_correct / self._num_examples diff --git a/ignite/metrics/mean_absolute_error.py b/ignite/metrics/mean_absolute_error.py new file mode 100644 index 00000000..88c620de --- /dev/null +++ b/ignite/metrics/mean_absolute_error.py @@ -0,0 +1,28 @@ +from __future__ import division + +import torch + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class MeanAbsoluteError(Metric): + """ + Calculates the mean absolute error. + + `update` must receive output of the form (y_pred, y). + """ + def reset(self): + self._sum_of_absolute_errors = 0.0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + absolute_errors = torch.abs(y_pred - y.view_as(y_pred)) + self._sum_of_absolute_errors += torch.sum(absolute_errors) + self._num_examples += y.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('MeanAbsoluteError must have at least one example before it can be computed') + return self._sum_of_absolute_errors / self._num_examples diff --git a/ignite/metrics/mean_pairwise_distance.py b/ignite/metrics/mean_pairwise_distance.py new file mode 100644 index 00000000..b18be661 --- /dev/null +++ b/ignite/metrics/mean_pairwise_distance.py @@ -0,0 +1,34 @@ +from __future__ import division + +import torch +from torch.nn.functional import pairwise_distance + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class MeanPairwiseDistance(Metric): + """ + Calculates the mean pairwise distance. + + `update` must receive output of the form (y_pred, y). + """ + def __init__(self, p=2, eps=1e-6): + super(MeanPairwiseDistance, self).__init__() + self._p = p + self._eps = eps + + def reset(self): + self._sum_of_distances = 0.0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + distances = pairwise_distance(y_pred, y, p=self._p, eps=self._eps) + self._sum_of_distances += torch.sum(distances) + self._num_examples += y.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('MeanAbsoluteError must have at least one example before it can be computed') + return self._sum_of_distances / self._num_examples diff --git a/ignite/metrics/root_mean_squared_error.py b/ignite/metrics/root_mean_squared_error.py new file mode 100644 index 00000000..47e9f396 --- /dev/null +++ b/ignite/metrics/root_mean_squared_error.py @@ -0,0 +1,18 @@ +from __future__ import division +import math + +import torch + +from .mean_squared_error import MeanSquaredError +from ignite.exceptions import NotComputableError + + +class RootMeanSquaredError(MeanSquaredError): + """ + Calculates the root mean squared error. + + `update` must receive output of the form (y_pred, y). + """ + def compute(self): + mse = super(RootMeanSquaredError, self).compute() + return math.sqrt(mse) diff --git a/ignite/metrics/top_k_categorical_accuracy.py b/ignite/metrics/top_k_categorical_accuracy.py new file mode 100644 index 00000000..1ee90353 --- /dev/null +++ b/ignite/metrics/top_k_categorical_accuracy.py @@ -0,0 +1,34 @@ +from __future__ import division + +import torch + +from .metric import Metric +from ignite.exceptions import NotComputableError + + +class TopKCategoricalAccuracy(Metric): + """ + Calculates the top-k categorical accuracy. + + `update` must receive output of the form (y_pred, y). + """ + def __init__(self, k=5): + super(TopKCategoricalAccuracy, self).__init__() + self._k = k + + def reset(self): + self._num_correct = 0 + self._num_examples = 0 + + def update(self, output): + y_pred, y = output + sorted_indices = torch.topk(y_pred, self._k, dim=1)[1] + expanded_y = y.view(-1, 1).expand(-1, self._k) + correct = torch.sum(torch.eq(sorted_indices, expanded_y), dim=1) + self._num_correct += torch.sum(correct) + self._num_examples += correct.shape[0] + + def compute(self): + if self._num_examples == 0: + raise NotComputableError('TopKCategoricalAccuracy must have at least one example before it can be computed') + return self._num_correct / self._num_examples
Add pairwise distance to Metrics I think in evaluation of regression task, pairwise distance, especially norm-2 distance, as in `torch.nn.functional.pairwise_distance` is at least as frequently used as MSE, which is actually mostly used as loss rather than evaluation metrics. Therefore, I was wondering if it is worthy of being added to Metrics package as a commonly used metrics.
pytorch/ignite
diff --git a/tests/ignite/metrics/test_binary_accuracy.py b/tests/ignite/metrics/test_binary_accuracy.py new file mode 100644 index 00000000..4e083ec3 --- /dev/null +++ b/tests/ignite/metrics/test_binary_accuracy.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import BinaryAccuracy +import pytest +import torch + + +def test_zero_div(): + acc = BinaryAccuracy() + with pytest.raises(NotComputableError): + acc.compute() + + +def test_compute(): + acc = BinaryAccuracy() + + y_pred = torch.FloatTensor([0.2, 0.4, 0.6, 0.8]) + y = torch.ones(4).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 0.5 + + acc.reset() + y_pred = torch.FloatTensor([0.2, 0.7, 0.8, 0.9]) + y = torch.ones(4).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 0.75 diff --git a/tests/ignite/metrics/test_mean_absolute_error.py b/tests/ignite/metrics/test_mean_absolute_error.py new file mode 100644 index 00000000..f2bd60bf --- /dev/null +++ b/tests/ignite/metrics/test_mean_absolute_error.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import MeanAbsoluteError +import pytest +import torch + + +def test_zero_div(): + mae = MeanAbsoluteError() + with pytest.raises(NotComputableError): + mae.compute() + + +def test_compute(): + mae = MeanAbsoluteError() + + y_pred = torch.Tensor([[2.0], [-2.0]]) + y = torch.zeros(2) + mae.update((y_pred, y)) + assert mae.compute() == 2.0 + + mae.reset() + y_pred = torch.Tensor([[3.0], [-3.0]]) + y = torch.zeros(2) + mae.update((y_pred, y)) + assert mae.compute() == 3.0 diff --git a/tests/ignite/metrics/test_mean_pairwise_distance.py b/tests/ignite/metrics/test_mean_pairwise_distance.py new file mode 100644 index 00000000..45c69fdc --- /dev/null +++ b/tests/ignite/metrics/test_mean_pairwise_distance.py @@ -0,0 +1,26 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import MeanPairwiseDistance +import pytest +from pytest import approx +import torch + + +def test_zero_div(): + mpd = MeanPairwiseDistance() + with pytest.raises(NotComputableError): + mpd.compute() + + +def test_compute(): + mpd = MeanPairwiseDistance() + + y_pred = torch.Tensor([[3.0, 4.0], [-3.0, -4.0]]) + y = torch.zeros(2, 2) + mpd.update((y_pred, y)) + assert mpd.compute() == approx(5.0) + + mpd.reset() + y_pred = torch.Tensor([[4.0, 4.0, 4.0, 4.0], [-4.0, -4.0, -4.0, -4.0]]) + y = torch.zeros(2, 4) + mpd.update((y_pred, y)) + assert mpd.compute() == approx(8.0) diff --git a/tests/ignite/metrics/test_root_mean_squared_error.py b/tests/ignite/metrics/test_root_mean_squared_error.py new file mode 100644 index 00000000..69408ced --- /dev/null +++ b/tests/ignite/metrics/test_root_mean_squared_error.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import RootMeanSquaredError +import pytest +import torch + + +def test_zero_div(): + rmse = RootMeanSquaredError() + with pytest.raises(NotComputableError): + rmse.compute() + + +def test_compute(): + rmse = RootMeanSquaredError() + + y_pred = torch.Tensor([[2.0], [-2.0]]) + y = torch.zeros(2) + rmse.update((y_pred, y)) + assert rmse.compute() == 2.0 + + rmse.reset() + y_pred = torch.Tensor([[3.0], [-3.0]]) + y = torch.zeros(2) + rmse.update((y_pred, y)) + assert rmse.compute() == 3.0 diff --git a/tests/ignite/metrics/test_top_k_categorical_accuracy.py b/tests/ignite/metrics/test_top_k_categorical_accuracy.py new file mode 100644 index 00000000..d9164f9f --- /dev/null +++ b/tests/ignite/metrics/test_top_k_categorical_accuracy.py @@ -0,0 +1,25 @@ +from ignite.exceptions import NotComputableError +from ignite.metrics import TopKCategoricalAccuracy +import pytest +import torch + + +def test_zero_div(): + acc = TopKCategoricalAccuracy(2) + with pytest.raises(NotComputableError): + acc.compute() + + +def test_compute(): + acc = TopKCategoricalAccuracy(2) + + y_pred = torch.FloatTensor([[0.2, 0.4, 0.6, 0.8], [0.8, 0.6, 0.4, 0.2]]) + y = torch.ones(2).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 0.5 + + acc.reset() + y_pred = torch.FloatTensor([[0.4, 0.8, 0.2, 0.6], [0.8, 0.6, 0.4, 0.2]]) + y = torch.ones(2).type(torch.LongTensor) + acc.update((y_pred, y)) + assert acc.compute() == 1.0
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "numpy", "mock", "pytest", "codecov", "pytest-cov", "tqdm", "scikit-learn", "visdom", "torchvision", "tensorboardX", "gym" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 cloudpickle==2.2.1 codecov==2.1.13 coverage==6.2 dataclasses==0.8 decorator==4.4.2 enum34==1.1.10 gym==0.26.2 gym-notices==0.0.8 idna==3.10 -e git+https://github.com/pytorch/ignite.git@0b4aec7629390ed797782fede8e3f11fe7c549f7#egg=ignite importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work joblib==1.1.1 jsonpatch==1.32 jsonpointer==2.3 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work networkx==2.5.1 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work Pillow==8.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work protobuf==4.21.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 requests==2.27.1 scikit-learn==0.24.2 scipy==1.5.4 six==1.17.0 tensorboardX==2.6.2.2 threadpoolctl==3.1.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 torch==1.10.1 torchvision==0.11.2 tornado==6.1 tqdm==4.64.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 visdom==0.2.4 websocket-client==1.3.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ignite channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - cloudpickle==2.2.1 - codecov==2.1.13 - coverage==6.2 - dataclasses==0.8 - decorator==4.4.2 - enum34==1.1.10 - gym==0.26.2 - gym-notices==0.0.8 - idna==3.10 - importlib-resources==5.4.0 - joblib==1.1.1 - jsonpatch==1.32 - jsonpointer==2.3 - mock==5.2.0 - networkx==2.5.1 - numpy==1.19.5 - pillow==8.4.0 - protobuf==4.21.0 - pytest-cov==4.0.0 - requests==2.27.1 - scikit-learn==0.24.2 - scipy==1.5.4 - six==1.17.0 - tensorboardx==2.6.2.2 - threadpoolctl==3.1.0 - tomli==1.2.3 - torch==1.10.1 - torchvision==0.11.2 - tornado==6.1 - tqdm==4.64.1 - urllib3==1.26.20 - visdom==0.2.4 - websocket-client==1.3.1 prefix: /opt/conda/envs/ignite
[ "tests/ignite/metrics/test_binary_accuracy.py::test_zero_div", "tests/ignite/metrics/test_binary_accuracy.py::test_compute", "tests/ignite/metrics/test_mean_absolute_error.py::test_zero_div", "tests/ignite/metrics/test_mean_absolute_error.py::test_compute", "tests/ignite/metrics/test_mean_pairwise_distance.py::test_zero_div", "tests/ignite/metrics/test_mean_pairwise_distance.py::test_compute", "tests/ignite/metrics/test_root_mean_squared_error.py::test_zero_div", "tests/ignite/metrics/test_root_mean_squared_error.py::test_compute", "tests/ignite/metrics/test_top_k_categorical_accuracy.py::test_zero_div", "tests/ignite/metrics/test_top_k_categorical_accuracy.py::test_compute" ]
[]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,260
[ "ignite/metrics/binary_accuracy.py", "ignite/metrics/mean_absolute_error.py", "ignite/metrics/__init__.py", "ignite/metrics/top_k_categorical_accuracy.py", "ignite/metrics/root_mean_squared_error.py", "ignite/metrics/mean_pairwise_distance.py" ]
[ "ignite/metrics/binary_accuracy.py", "ignite/metrics/mean_absolute_error.py", "ignite/metrics/__init__.py", "ignite/metrics/top_k_categorical_accuracy.py", "ignite/metrics/root_mean_squared_error.py", "ignite/metrics/mean_pairwise_distance.py" ]
marshmallow-code__marshmallow-750
a867533d53ddbe8cb0ff63c1dc3ca53337ba525c
2018-03-06 18:29:59
8e217c8d6fefb7049ab3389f31a8d35824fa2d96
diff --git a/marshmallow/decorators.py b/marshmallow/decorators.py index 8b6df0df..cd850fc0 100644 --- a/marshmallow/decorators.py +++ b/marshmallow/decorators.py @@ -107,6 +107,9 @@ def post_dump(fn=None, pass_many=False, pass_original=False): By default, receives a single object at a time, transparently handling the ``many`` argument passed to the Schema. If ``pass_many=True``, the raw data (which may be a collection) and the value for ``many`` is passed. + + If ``pass_original=True``, the original data (before serializing) will be passed as + an additional argument to the method. """ return tag_processor(POST_DUMP, fn, pass_many, pass_original=pass_original) @@ -129,6 +132,9 @@ def post_load(fn=None, pass_many=False, pass_original=False): By default, receives a single datum at a time, transparently handling the ``many`` argument passed to the Schema. If ``pass_many=True``, the raw data (which may be a collection) and the value for ``many`` is passed. + + If ``pass_original=True``, the original data (before deserializing) will be passed as + an additional argument to the method. """ return tag_processor(POST_LOAD, fn, pass_many, pass_original=pass_original) diff --git a/marshmallow/schema.py b/marshmallow/schema.py index 79bb8ee1..904c5322 100644 --- a/marshmallow/schema.py +++ b/marshmallow/schema.py @@ -869,8 +869,8 @@ class BaseSchema(base.SchemaABC): data = utils.if_none(processor(data, many), data) elif many: if pass_original: - data = [utils.if_none(processor(item, original_data), item) - for item in data] + data = [utils.if_none(processor(item, original), item) + for item, original in zip(data, original_data)] else: data = [utils.if_none(processor(item), item) for item in data] else:
post_dump is passing a list of objects as original object Hi, I think post_dump with pass_original=True should pass the original object related to the data serialized and not a list of objects which this object belongs to. ``` python from marshmallow import fields, post_dump, Schema class DeviceSchema(Schema): id = fields.String() @post_dump(pass_original=True) def __post_dump(self, data, obj): print(obj) # <-- this is a list devices = [dict(id=1), dict(id=2)] DeviceSchema().dump(devices, many=True) ``` In the above example, the parameter `obj` is a list of devices rather than the device object itself. What do you think?
marshmallow-code/marshmallow
diff --git a/tests/test_decorators.py b/tests/test_decorators.py index 226e550f..41a2d44a 100644 --- a/tests/test_decorators.py +++ b/tests/test_decorators.py @@ -758,3 +758,80 @@ def test_decorator_error_handling_with_dump(decorator): schema.dump(object()) assert exc.value.messages == {'foo': 'error'} schema.load({}) + + +class Nested(object): + def __init__(self, foo): + self.foo = foo + + +class Example(object): + def __init__(self, nested): + self.nested = nested + + +example = Example(nested=[Nested(x) for x in range(1)]) + + [email protected]( + "data,expected_data,expected_original_data", + ( + [example, {"foo": 0}, example.nested[0]], + ), +) +def test_decorator_post_dump_with_nested_pass_original_and_pass_many( + data, expected_data, expected_original_data): + + class NestedSchema(Schema): + foo = fields.Int(required=True) + + @post_dump(pass_many=False, pass_original=True) + def check_pass_original_when_pass_many_false(self, data, original_data): + assert data == expected_data + assert original_data == expected_original_data + return data + + @post_dump(pass_many=True, pass_original=True) + def check_pass_original_when_pass_many_true(self, data, many, original_data): + assert many is True + assert data == [expected_data] + assert original_data == [expected_original_data] + return data + + class ExampleSchema(Schema): + nested = fields.Nested(NestedSchema, required=True, many=True) + + schema = ExampleSchema() + assert schema.dump(data) == {"nested": [{"foo": 0}]} + + [email protected]( + "data,expected_data,expected_original_data", + ( + [{"nested": [{"foo": 0}]}, {"foo": 0}, {"foo": 0}], + ), +) +def test_decorator_post_load_with_nested_pass_original_and_pass_many( + data, expected_data, expected_original_data): + + class NestedSchema(Schema): + foo = fields.Int(required=True) + + @post_load(pass_many=False, pass_original=True) + def check_pass_original_when_pass_many_false(self, data, original_data): + assert data == expected_data + assert original_data == expected_original_data + return data + + @post_load(pass_many=True, pass_original=True) + def check_pass_original_when_pass_many_true(self, data, many, original_data): + assert many is True + assert data == [expected_data] + assert original_data == [expected_original_data] + return data + + class ExampleSchema(Schema): + nested = fields.Nested(NestedSchema, required=True, many=True) + + schema = ExampleSchema() + assert schema.load(data) == data
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[reco]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": null, "python": "3.9", "reqs_path": [ "dev-requirements.txt", "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 execnet==2.1.1 filelock==3.18.0 flake8==3.5.0 iniconfig==2.1.0 invoke==0.22.0 -e git+https://github.com/marshmallow-code/marshmallow.git@a867533d53ddbe8cb0ff63c1dc3ca53337ba525c#egg=marshmallow mccabe==0.6.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 py==1.11.0 pycodestyle==2.3.1 pyflakes==1.6.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 python-dateutil==2.6.1 pytz==2017.3 simplejson==3.13.2 six==1.17.0 toml==0.10.2 tomli==2.2.1 tox==3.12.1 typing_extensions==4.13.0 virtualenv==20.29.3
name: marshmallow channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - execnet==2.1.1 - filelock==3.18.0 - flake8==3.5.0 - iniconfig==2.1.0 - invoke==0.22.0 - mccabe==0.6.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - py==1.11.0 - pycodestyle==2.3.1 - pyflakes==1.6.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - python-dateutil==2.6.1 - pytz==2017.3 - simplejson==3.13.2 - six==1.17.0 - toml==0.10.2 - tomli==2.2.1 - tox==3.12.1 - typing-extensions==4.13.0 - virtualenv==20.29.3 prefix: /opt/conda/envs/marshmallow
[ "tests/test_decorators.py::test_decorator_post_dump_with_nested_pass_original_and_pass_many[data0-expected_data0-expected_original_data0]", "tests/test_decorators.py::test_decorator_post_load_with_nested_pass_original_and_pass_many[data0-expected_data0-expected_original_data0]" ]
[]
[ "tests/test_decorators.py::test_decorated_processors", "tests/test_decorators.py::TestPassOriginal::test_pass_original_single_no_mutation", "tests/test_decorators.py::TestPassOriginal::test_pass_original_single_with_mutation", "tests/test_decorators.py::TestPassOriginal::test_pass_original_many", "tests/test_decorators.py::test_decorated_processor_inheritance", "tests/test_decorators.py::test_pre_dump_is_invoked_before_implicit_field_generation", "tests/test_decorators.py::TestValidatesDecorator::test_validates", "tests/test_decorators.py::TestValidatesDecorator::test_validates_with_attribute", "tests/test_decorators.py::TestValidatesDecorator::test_validates_decorator", "tests/test_decorators.py::TestValidatesDecorator::test_field_not_present", "tests/test_decorators.py::TestValidatesDecorator::test_precedence", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many_pass_original_and_pass_many[True-expected_data0-expected_original_data0-data0]", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_validator_nested_many_pass_original_and_pass_many[False-expected_data1-expected_original_data1-data0]", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_decorated_validators", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_multiple_validators", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_passing_original_data", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_allow_arbitrary_field_names_in_error", "tests/test_decorators.py::TestValidatesSchemaDecorator::test_skip_on_field_errors", "tests/test_decorators.py::test_decorator_error_handling", "tests/test_decorators.py::test_decorator_error_handling_with_load[pre_load]", "tests/test_decorators.py::test_decorator_error_handling_with_load[post_load]", "tests/test_decorators.py::test_decorator_error_handling_with_dump[pre_dump]", "tests/test_decorators.py::test_decorator_error_handling_with_dump[post_dump]" ]
[]
MIT License
2,261
[ "marshmallow/schema.py", "marshmallow/decorators.py" ]
[ "marshmallow/schema.py", "marshmallow/decorators.py" ]
wright-group__WrightTools-534
a11e47d7786f63dcc595c8e9ccf121e73a16407b
2018-03-06 22:01:33
a6ff42f2a36f12a92d186a9532f6ec4cfd58d3c0
pep8speaks: Hello @ksunden! Thanks for submitting the PR. - In the file [`WrightTools/kit/_array.py`](https://github.com/wright-group/WrightTools/blob/405cd2cd8b838ac1bbf3b676aaaeb5f7a6de2a3d/WrightTools/kit/_array.py), following are the PEP8 issues : > [Line 216:35](https://github.com/wright-group/WrightTools/blob/405cd2cd8b838ac1bbf3b676aaaeb5f7a6de2a3d/WrightTools/kit/_array.py#L216): [E712](https://duckduckgo.com/?q=pep8%20E712) comparison to False should be 'if cond is False:' or 'if not cond:' - In the file [`tests/kit/remove_nans_1D.py`](https://github.com/wright-group/WrightTools/blob/405cd2cd8b838ac1bbf3b676aaaeb5f7a6de2a3d/tests/kit/remove_nans_1D.py), following are the PEP8 issues : > [Line 30:1](https://github.com/wright-group/WrightTools/blob/405cd2cd8b838ac1bbf3b676aaaeb5f7a6de2a3d/tests/kit/remove_nans_1D.py#L30): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 ksunden: ``` >>> a = np.array([np.nan, 1, 2, 2]) >>> np.isnan(a) array([ True, False, False, False]) >>> not np.isnan(a) Traceback (most recent call last): File "<input>", line 1, in <module> not np.isnan(a) ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all() >>> np.isnan(a) == False array([False, True, True, True]) >>> np.isnan(a) is False False ``` Hush, pep8speaks, that doesn't work here....
diff --git a/WrightTools/kit/_array.py b/WrightTools/kit/_array.py index 66cfb11..16136f1 100644 --- a/WrightTools/kit/_array.py +++ b/WrightTools/kit/_array.py @@ -210,17 +210,10 @@ def remove_nans_1D(*args): tuple Tuple of 1D arrays in same order as given, with nan indicies removed. """ - # find all indicies to keep - bads = np.array([]) - for arr in args: - bad = np.array(np.where(np.isnan(arr))).flatten() - bads = np.hstack((bad, bads)) - if hasattr(args, 'shape') and len(args.shape) == 1: - goods = [i for i in np.arange(args.shape[0]) if i not in bads] - else: - goods = [i for i in np.arange(len(args[0])) if i not in bads] - # apply - return tuple(a[goods] for a in args) + vals = np.isnan(args[0]) + for a in args: + vals |= np.isnan(a) + return tuple(np.array(a)[vals == False] for a in args) def share_nans(*arrs):
remove_nans_1D fails for list ``` >>> wt.kit.remove_nans_1D([np.nan, 1, 2, 2]) Traceback (most recent call last): File "<input>", line 1, in <module> wt.kit.remove_nans_1D([np.nan, 1, 2, 2]) File "/home/kyle/wright/WrightTools/WrightTools/kit/_array.py", line 223, in rem ove_nans_1D return tuple(a[goods] for a in args) File "/home/kyle/wright/WrightTools/WrightTools/kit/_array.py", line 223, in <ge nexpr> return tuple(a[goods] for a in args) TypeError: list indices must be integers or slices, not list >>> wt.kit.remove_nans_1D(np.array([np.nan, 1, 2, 2])) (array([1., 2., 2.]),) ```
wright-group/WrightTools
diff --git a/tests/kit/remove_nans_1D.py b/tests/kit/remove_nans_1D.py old mode 100644 new mode 100755 index 31d15ab..8c09a16 --- a/tests/kit/remove_nans_1D.py +++ b/tests/kit/remove_nans_1D.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 """Test remove_nans_1D.""" @@ -18,10 +19,20 @@ def test_simple(): assert wt.kit.remove_nans_1D(arr)[0].all() == np.arange(0, 6, dtype=float).all() -def test_list(): +def test_multiple(): arrs = [np.random.random(21) for _ in range(5)] arrs[0][0] = np.nan arrs[1][-1] = np.nan arrs = wt.kit.remove_nans_1D(*arrs) for arr in arrs: assert arr.size == 19 + + +def test_list(): + assert np.all(wt.kit.remove_nans_1D([np.nan, 1, 2, 3])[0] == np.array([1, 2, 3])) + + +if __name__ == "__main__": + test_simple() + test_multiple() + test_list()
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 1 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "flake8", "pydocstyle" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libfreetype6-dev hdf5-tools libhdf5-dev libopenblas-dev" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
appdirs==1.4.4 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 coverage==6.2 cycler==0.11.0 flake8==5.0.4 h5py==3.1.0 imageio==2.15.0 importlib-metadata==4.2.0 iniconfig==1.1.1 kiwisolver==1.3.1 matplotlib==3.3.4 mccabe==0.7.0 numexpr==2.8.1 numpy==1.19.5 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pydocstyle==6.3.0 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 scipy==1.5.4 six==1.17.0 snowballstemmer==2.2.0 tidy_headers==1.0.3 tomli==1.2.3 typing_extensions==4.1.1 -e git+https://github.com/wright-group/WrightTools.git@a11e47d7786f63dcc595c8e9ccf121e73a16407b#egg=WrightTools zipp==3.6.0
name: WrightTools channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - appdirs==1.4.4 - attrs==22.2.0 - cached-property==1.5.2 - coverage==6.2 - cycler==0.11.0 - flake8==5.0.4 - h5py==3.1.0 - imageio==2.15.0 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - kiwisolver==1.3.1 - matplotlib==3.3.4 - mccabe==0.7.0 - numexpr==2.8.1 - numpy==1.19.5 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pydocstyle==6.3.0 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - tidy-headers==1.0.3 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/WrightTools
[ "tests/kit/remove_nans_1D.py::test_list" ]
[]
[ "tests/kit/remove_nans_1D.py::test_simple", "tests/kit/remove_nans_1D.py::test_multiple" ]
[]
MIT License
2,262
[ "WrightTools/kit/_array.py" ]
[ "WrightTools/kit/_array.py" ]
Backblaze__B2_Command_Line_Tool-420
15a60ad1c71b75366061e4f742ef52eb9dcc23e7
2018-03-07 02:01:24
ee2339bd21d21d6140936d58597957250a33fc26
diff --git a/b2/sync/scan_policies.py b/b2/sync/scan_policies.py index 198c079..dfb9413 100644 --- a/b2/sync/scan_policies.py +++ b/b2/sync/scan_policies.py @@ -27,10 +27,45 @@ class RegexSet(object): return any(c.match(s) is not None for c in self._compiled_list) +def convert_dir_regex_to_dir_prefix_regex(dir_regex): + """ + The patterns used to match directory names (and file names) are allowed + to match a prefix of the name. This 'feature' was unintentional, but is + being retained for compatibility. + + This means that a regex that matches a directory name can't be used directly + to match against a file name and test whether the file should be excluded + because it matches the directory. + + The pattern 'photos' will match directory names 'photos' and 'photos2', + and should exclude files 'photos/kitten.jpg', and 'photos2/puppy.jpg'. + It should not exclude 'photos.txt', because there is no directory name + that matches. + + On the other hand, the pattern 'photos$' should match 'photos/kitten.jpg', + but not 'photos2/puppy.jpg', nor 'photos.txt' + + If the original regex is valid, there are only two cases to consider: + either the regex ends in '$' or does not. + """ + if dir_regex.endswith('$'): + return dir_regex[:-1] + r'/' + else: + return dir_regex + r'.*?/' + + class ScanPoliciesManager(object): """ Policy object used when scanning folders for syncing, used to decide which files to include in the list of files to be synced. + + Code that scans through files should at least use should_exclude_file() + to decide whether each file should be included; it will check include/exclude + patterns for file names, as well as patterns for excluding directeries. + + Code that scans may optionally use should_exclude_directory() to test whether + it can skip a directory completely and not bother listing the files and + sub-directories in it. """ def __init__( @@ -40,6 +75,9 @@ class ScanPoliciesManager(object): include_file_regexes=tuple(), ): self._exclude_dir_set = RegexSet(exclude_dir_regexes) + self._exclude_file_because_of_dir_set = RegexSet( + map(convert_dir_regex_to_dir_prefix_regex, exclude_dir_regexes) + ) self._exclude_file_set = RegexSet(exclude_file_regexes) self._include_file_set = RegexSet(include_file_regexes) @@ -51,8 +89,12 @@ class ScanPoliciesManager(object): being scanned. :return: True iff excluded. """ - return self._exclude_file_set.matches(file_path) and \ - not self._include_file_set.matches(file_path) + exclude_because_of_dir = self._exclude_file_because_of_dir_set.matches(file_path) + exclude_because_of_file = ( + self._exclude_file_set.matches(file_path) and + not self._include_file_set.matches(file_path) + ) + return exclude_because_of_dir or exclude_because_of_file def should_exclude_directory(self, dir_path): """
--excludeDirRegex does not work when source is B2 The new filtering that lets you exclude an entire directory works in the `LocalFolder` class, but not the `B2Folder` class. I think there are two possible approaches to fixing it: (1) change B2Folder to simulate the existence of directories, and check them for exclusion, or (2) extend `ScanPoliciesManager.should_exclude_file` to also test whether any of the directories in the path are excluded. I like #2, but I think it would need optimization to avoid checking every parent directory of every file.
Backblaze/B2_Command_Line_Tool
diff --git a/test/test_scan_policies.py b/test/test_scan_policies.py index f3bb797..853730d 100644 --- a/test/test_scan_policies.py +++ b/test/test_scan_policies.py @@ -30,8 +30,20 @@ class TestScanPolicies(TestBase): def test_exclude_dir(self): policy = ScanPoliciesManager( - include_file_regexes=['.*[.]txt$'], exclude_dir_regexes=['alfa$'] + include_file_regexes=['.*[.]txt$'], exclude_dir_regexes=['alfa', 'bravo$'] ) self.assertTrue(policy.should_exclude_directory('alfa')) - self.assertFalse(policy.should_exclude_directory('alfa2')) - self.assertFalse(policy.should_exclude_directory('alfa/hello')) + self.assertTrue(policy.should_exclude_directory('alfa2')) + self.assertTrue(policy.should_exclude_directory('alfa/hello')) + + self.assertTrue(policy.should_exclude_directory('bravo')) + self.assertFalse(policy.should_exclude_directory('bravo2')) + self.assertFalse(policy.should_exclude_directory('bravo/hello')) + + self.assertTrue(policy.should_exclude_file('alfa/foo')) + self.assertTrue(policy.should_exclude_file('alfa2/hello/foo')) + self.assertTrue(policy.should_exclude_file('alfa/hello/foo.txt')) + + self.assertTrue(policy.should_exclude_file('bravo/foo')) + self.assertFalse(policy.should_exclude_file('bravo2/hello/foo')) + self.assertTrue(policy.should_exclude_file('bravo/hello/foo.txt'))
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
arrow==0.12.0 attrs==22.2.0 -e git+https://github.com/Backblaze/B2_Command_Line_Tool.git@15a60ad1c71b75366061e4f742ef52eb9dcc23e7#egg=b2 certifi==2021.5.30 charset-normalizer==2.0.12 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 logfury==1.0.1 nose==1.3.7 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 requests==2.27.1 six==1.17.0 tomli==1.2.3 tqdm==4.64.1 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: B2_Command_Line_Tool channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - arrow==0.12.0 - attrs==22.2.0 - charset-normalizer==2.0.12 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - logfury==1.0.1 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - requests==2.27.1 - six==1.17.0 - tomli==1.2.3 - tqdm==4.64.1 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/B2_Command_Line_Tool
[ "test/test_scan_policies.py::TestScanPolicies::test_exclude_dir" ]
[]
[ "test/test_scan_policies.py::TestScanPolicies::test_default", "test/test_scan_policies.py::TestScanPolicies::test_exclude_include" ]
[]
MIT License
2,263
[ "b2/sync/scan_policies.py" ]
[ "b2/sync/scan_policies.py" ]
gurumitts__pylutron-caseta-20
52dfa7000b7cd23bee586671d8ef839c932178b8
2018-03-07 02:34:07
52dfa7000b7cd23bee586671d8ef839c932178b8
diff --git a/pylutron_caseta/smartbridge.py b/pylutron_caseta/smartbridge.py index 05527d7..fd50d34 100755 --- a/pylutron_caseta/smartbridge.py +++ b/pylutron_caseta/smartbridge.py @@ -318,7 +318,7 @@ class Smartbridge: if 'LocalZones' in device: device_zone = device['LocalZones'][0]['href'] device_zone = device_zone[device_zone.rfind('/') + 1:] - device_name = device['Name'] + device_name = '_'.join(device['FullyQualifiedName']) device_type = device['DeviceType'] self.devices[device_id] = {'device_id': device_id, 'name': device_name,
Device name uses incomplete name. https://github.com/gurumitts/pylutron-caseta/blob/52dfa7000b7cd23bee586671d8ef839c932178b8/pylutron_caseta/smartbridge.py#L321 Device name is assigned the `Name` value from the devices dict, however, the `FullyQualifiedName` would be more appropriate as the latest Lutron Caseta app enforces the use of Rooms. The room name is the prefix the `FullyQualifiedName` and left out of `Name`. This leads to multiple switches/dimmers named `Light` or `Lights` vs `Kitchen Lights`, `Kitchen Table Lights`. Changing line 321 to `device_name = " ".join(device["FullyQualifiedName"])` seems more appropriate.
gurumitts/pylutron-caseta
diff --git a/tests/test_smartbridge.py b/tests/test_smartbridge.py index 16adacf..874e593 100644 --- a/tests/test_smartbridge.py +++ b/tests/test_smartbridge.py @@ -188,7 +188,7 @@ def test_device_list(event_loop, bridge): "current_state": -1}, "2": { "device_id": "2", - "name": "Lights", + "name": "Hallway_Lights", "type": "WallDimmer", "zone": "1", "current_state": -1}}
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 coverage==6.2 execnet==1.9.0 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/gurumitts/pylutron-caseta.git@52dfa7000b7cd23bee586671d8ef839c932178b8#egg=pylutron_caseta pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pylutron-caseta channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==6.2 - execnet==1.9.0 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - tomli==1.2.3 prefix: /opt/conda/envs/pylutron-caseta
[ "tests/test_smartbridge.py::test_device_list" ]
[]
[ "tests/test_smartbridge.py::test_notifications", "tests/test_smartbridge.py::test_scene_list", "tests/test_smartbridge.py::test_is_connected", "tests/test_smartbridge.py::test_is_on", "tests/test_smartbridge.py::test_set_value", "tests/test_smartbridge.py::test_activate_scene" ]
[]
Apache License 2.0
2,264
[ "pylutron_caseta/smartbridge.py" ]
[ "pylutron_caseta/smartbridge.py" ]
coecms__ARCCSSive-28
2188bbb4aa2a34f1c3c6e3508bf98c0ee9d2703e
2018-03-07 03:13:47
2188bbb4aa2a34f1c3c6e3508bf98c0ee9d2703e
diff --git a/.travis.yml b/.travis.yml index 9f1999b..18c4269 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,5 @@ language: python python: - - '2.6' - '2.7' - '3.4' install: diff --git a/ARCCSSive/cli/compare_ESGF.py b/ARCCSSive/cli/compare_ESGF.py index 7d343f6..b74610c 100644 --- a/ARCCSSive/cli/compare_ESGF.py +++ b/ARCCSSive/cli/compare_ESGF.py @@ -25,6 +25,16 @@ from ARCCSSive.CMIP5.pyesgf_functions import ESGFSearch from ARCCSSive.CMIP5.other_functions import assign_mips, combine_constraints from ARCCSSive.CMIP5.compare_helpers import * import sys +from six.moves import input +from distutils.util import strtobool + +def query_yes_no(prompt): + answer = input(prompt) + try: + return strtobool(answer) + except ValueError: + print("Please answer Y or N") + return query_yes_no(prompt) def parse_input(): ''' Parse input arguments ''' @@ -148,11 +158,8 @@ def main(): print("Nothing currently available on ESGF nodes and no local version exists for constraints:\n",constraints,"and variables:",variables) else: print(esgf.ds_count(),"instances were found on ESGF and ",outputs.count()," on the local database") - if sys.version_info < ( 3, 0 ): - request=raw_input("Do you want to proceed with comparison (Y) or write current results (N) ? Y/N \n") - else: - request=input("Do you want to proceed with comparison (Y) or write current results (N) ? Y/N \n") - if request == "Y": + request = query_yes_no("Do you want to proceed with comparison (Y) or write current results (N) ? Y/N \n") + if request: esgf_results, db_results=compare_instances(cmip5.session, esgf_results, db_results, orig_args.keys(), admin) # build table to summarise results @@ -184,11 +191,8 @@ def main(): print(s.split("'")[0]) fout.writelines("'" +s + "'\n") fout.close() - if sys.version_info < ( 3, 0 ): - request2=raw_input("submit a request to download these files? Y/N \n") - else: - request2=input("submit a request to download these files? Y/N \n") - if request2 == "Y": os.system ("cp %s %s" % (outfile, outdir+outfile)) + request2 = query_yes_no("submit a request to download these files? Y/N \n") + if request2: os.system ("cp %s %s" % (outfile, outdir+outfile)) for var in variables: remote=[ds for ds in esgf_results if ds['variable']==var] local=[v for v in db_results if v.variable.__dict__['variable']==var] diff --git a/conda/meta.yaml b/conda/meta.yaml index 500d3f2..38bf6ef 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -29,9 +29,11 @@ requirements: test: source_files: - setup.cfg + - conftest.py - tests requires: - pytest + - mock # [py2k] commands: - py.test - search_replica -h
Make compare_ESGF questions case-insensitive Currently compare_ESGF responses must be a capital `Y`. Make this case-insensitive, possibly also so that it works with 'yes'
coecms/ARCCSSive
diff --git a/conftest.py b/conftest.py index 57150ac..9495faf 100644 --- a/conftest.py +++ b/conftest.py @@ -11,3 +11,4 @@ if sys.version_info >= (3,0): collect_ignore.append('ARCCSSive/CMIP5/pyesgf_functions.py') collect_ignore.append('ARCCSSive/CMIP5/compare_helpers.py') collect_ignore.append('ARCCSSive/cli/compare_ESGF.py') + collect_ignore.append('tests/cli/test_compare_ESGF.py') diff --git a/tests/cli/test_compare_ESGF.py b/tests/cli/test_compare_ESGF.py new file mode 100644 index 0000000..fd046c7 --- /dev/null +++ b/tests/cli/test_compare_ESGF.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# Copyright 2018 ARC Centre of Excellence for Climate Systems Science +# author: Scott Wales <[email protected]> +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function +from ARCCSSive.cli.compare_ESGF import * +import mock + +def sample_query_yes_no(answer, value): + with mock.patch('ARCCSSive.cli.compare_ESGF.input', return_value=answer) as inp: + assert query_yes_no("") == value + +def test_query_yes_no(): + sample_query_yes_no("Y", True) + sample_query_yes_no("y", True) + sample_query_yes_no("yes", True) + sample_query_yes_no("yEs", True) + + sample_query_yes_no("N", False) + sample_query_yes_no("n", False) + sample_query_yes_no("no", False) + sample_query_yes_no("nO", False)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 3 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.16 -e git+https://github.com/coecms/ARCCSSive.git@2188bbb4aa2a34f1c3c6e3508bf98c0ee9d2703e#egg=ARCCSSive attrs==25.3.0 babel==2.17.0 cattrs==24.1.3 certifi==2025.1.31 cffi==1.17.1 charset-normalizer==3.4.1 commonmark==0.9.1 cryptography==44.0.2 defusedxml==0.7.1 docutils==0.21.2 esgf-pyclient==0.3.1 exceptiongroup==1.2.2 greenlet==3.1.1 idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 Jinja2==3.1.6 MarkupSafe==3.0.2 mock==5.2.0 MyProxyClient==2.1.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 pycparser==2.22 Pygments==2.19.1 pyOpenSSL==25.0.0 pytest==8.3.5 recommonmark==0.7.1 requests==2.32.3 requests-cache==1.2.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==7.4.7 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 SQLAlchemy==2.0.40 tomli==2.2.1 typing_extensions==4.13.0 url-normalize==2.2.0 urllib3==2.3.0 WebOb==1.8.9 zipp==3.21.0
name: ARCCSSive channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - attrs==25.3.0 - babel==2.17.0 - cattrs==24.1.3 - certifi==2025.1.31 - cffi==1.17.1 - charset-normalizer==3.4.1 - commonmark==0.9.1 - cryptography==44.0.2 - defusedxml==0.7.1 - docutils==0.21.2 - esgf-pyclient==0.3.1 - exceptiongroup==1.2.2 - greenlet==3.1.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jinja2==3.1.6 - markupsafe==3.0.2 - mock==5.2.0 - myproxyclient==2.1.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - pycparser==2.22 - pygments==2.19.1 - pyopenssl==25.0.0 - pytest==8.3.5 - recommonmark==0.7.1 - requests==2.32.3 - requests-cache==1.2.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==7.4.7 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - sqlalchemy==2.0.40 - tomli==2.2.1 - typing-extensions==4.13.0 - url-normalize==2.2.0 - urllib3==2.3.0 - webob==1.8.9 - zipp==3.21.0 prefix: /opt/conda/envs/ARCCSSive
[ "tests/cli/test_compare_ESGF.py::test_query_yes_no" ]
[]
[]
[]
Apache License 2.0
2,265
[ ".travis.yml", "conda/meta.yaml", "ARCCSSive/cli/compare_ESGF.py" ]
[ ".travis.yml", "conda/meta.yaml", "ARCCSSive/cli/compare_ESGF.py" ]
pika__pika-991
16cdb80b4c0aacc9766abf033fccecb2c1ccb1a3
2018-03-07 18:25:08
4c904dea651caaf2a54b0fca0b9e908dec18a4f8
vitaly-krugl: @lukebakken, I haven't forgotten - will catch up in the next couple of days. lukebakken: @vitaly-krugl no hurry at all! Thanks again. lukebakken: @vitaly-krugl the `queue.declare` method never makes it to RabbitMQ. An `AssertionError` is thrown [here](https://github.com/pika/pika/blob/master/pika/spec.py#L1003-L1004) which gums up the works when the `with` clause tries to exit. vitaly-krugl: Would this alone punch at the heart of the problem [here](https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/channel.py#L1382)? ``` try: self._send_method(method) except Exception: self._blocking = None raise ``` lukebakken: I'll try that out. vitaly-krugl: Don't try it out just yet, I missed something there (and the fix in the PR did, too, I think) vitaly-krugl: The [if acceptable_replies:](https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/channel.py#L1361) block sets `self._blocking` and also registers a number of callbacks. If `self._send_method(method)` (as in this case), we really don't want any of those registered callbacks to remain there either. Since a successful `self._send_method(method)` call will ultimately just enqueue some data on the output write buffer, it should be possible to move the (https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/channel.py#L1361) block after [self._send_method(method)](https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/channel.py#L1382). Furthermore, to ensure that an incomplete message doesn't get placed in the output buffer (due to marshaling failure of one of its subframes), [Connection._send_message()](https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/connection.py#L2273) needs to be modified to pre-marshal all of its parts and then append them to the output frame buffer only after all marshaling is done, updating the stats and finally calling `self._flush_outbound() ` and `self._detect_backpressure()` like [Connection._send_frame()](https://github.com/pika/pika/blob/b7f27983cfbcbaf34a06b6fc9259a7fd50b8838d/pika/connection.py#L2251-L2257). To this end, `Connection._send_message()` and `Connection._send_frame()` should share a method (e.g., `Connection._output_marshaled_frame()` that updates `self.bytes_sent` and `self.frames_sent` and appends the marshaled frame data to `self.outbound_buffer` . lukebakken: @vitaly-krugl - ready for re-review. Thanks! lukebakken: @vitaly-krugl - I have merged in the tests you provided and this is ready for another review. Thanks! vitaly-krugl: @lukebakken, I renamed this PR "Request marshaling error should not corrupt a channel", which reflects issues #990 and #912 more accurately. vitaly-krugl: On broker's Channel.Close, the draining is necessary because ANQP says to ignore all incoming requests after channel is closed except Channel.Close. So, draining in that case helps break the gridlock. However, in the case the client is closing the channel with some blocking requests still pending normally, we have a perfectly healthy channel and nothing special is needed. The normal course of events will see it through. On Tue, Apr 10, 2018, 5:00 AM Luke Bakken <[email protected]> wrote: > *@lukebakken* commented on this pull request. > ------------------------------ > > In pika/channel.py > <https://github.com/pika/pika/pull/991#discussion_r180392933>: > > > @@ -1327,9 +1327,10 @@ def _on_synchronous_complete(self, _method_frame_unused): > while self._blocked and self._blocking is None: > self._rpc(*self._blocked.popleft()) > > - def _drain_blocked_methods_on_remote_close(self): > > If we think we need an "emergency channel-close" method that purges > queued-up requests (I don't think we do > > Draining blocked methods on a broker-initiated close was introduced in > #957 <https://github.com/pika/pika/pull/957> - please check that PR out > again. I still think it's necessary. > > — > You are receiving this because you were mentioned. > Reply to this email directly, view it on GitHub > <https://github.com/pika/pika/pull/991#discussion_r180392933>, or mute > the thread > <https://github.com/notifications/unsubscribe-auth/ABX9KigMj1hv6PIavaR70oqFZk9LqB0iks5tnJ7igaJpZM4Sg71J> > . > vitaly-krugl: @lukebakken, is this PR ready for re-review? vitaly-krugl: I think I might not have ended that review ?? On Mon, Apr 16, 2018, 6:22 AM Luke Bakken <[email protected]> wrote: > @vitaly-krugl <https://github.com/vitaly-krugl> if I re-select your name > in the "Reviewers" dropdown, the status icon changes back to an orange disk > ... do you not get a new email saying I re-requested a review? I assumed > that you did. If you don't get an email, I can @-mention you in a comment. > Thanks for the re-re-reviews 😄 > > — > You are receiving this because you were mentioned. > Reply to this email directly, view it on GitHub > <https://github.com/pika/pika/pull/991#issuecomment-381597285>, or mute > the thread > <https://github.com/notifications/unsubscribe-auth/ABX9KmC9Xn-wJn2vurcgpPYZe9kJuVEtks5tpJsrgaJpZM4Sg71J> > . > lukebakken: @vitaly-krugl thanks! Sorry I missed the previous comment about that test. lukebakken: @vitaly-krugl - changes made. I'll merge this once builds complete. Thanks!
diff --git a/pika/channel.py b/pika/channel.py index fb67a0d..282f53c 100644 --- a/pika/channel.py +++ b/pika/channel.py @@ -1347,7 +1347,7 @@ class Channel(object): sent, and thus its completion callback would never be called. """ - LOGGER.debug('Draining %i blocked frames due to remote Channel.Close', + LOGGER.debug('Draining %i blocked frames due to broker-requested Channel.Close', len(self._blocked)) while self._blocked: method = self._blocked.popleft()[0] @@ -1408,6 +1408,12 @@ class Channel(object): self._blocked.append([method, callback, acceptable_replies]) return + # Note: _send_method can throw exceptions if there are framing errors + # or invalid data passed in. Call it here to prevent self._blocking + # from being set if an exception is thrown. This also prevents + # acceptable_replies registering callbacks when exceptions are thrown + self._send_method(method) + # If acceptable replies are set, add callbacks if acceptable_replies: # Block until a response frame is received for synchronous frames @@ -1430,8 +1436,6 @@ class Channel(object): self.callbacks.add(self.channel_number, reply, callback, arguments=arguments) - self._send_method(method) - def _raise_if_not_open(self): """If channel is not in the OPEN state, raises ChannelClosed with `reply_code` and `reply_text` corresponding to current state. If channel diff --git a/pika/connection.py b/pika/connection.py index be2b1bc..271b198 100644 --- a/pika/connection.py +++ b/pika/connection.py @@ -2306,11 +2306,7 @@ class Connection(pika.compat.AbstractBase): 'Attempted to send a frame on closed connection.') marshaled_frame = frame_value.marshal() - self.bytes_sent += len(marshaled_frame) - self.frames_sent += 1 - self._adapter_emit_data(marshaled_frame) - if self.params.backpressure_detection: - self._detect_backpressure() + self._output_marshaled_frames([marshaled_frame]) def _send_method(self, channel_number, method, content=None): """Constructs a RPC method frame and then sends it to the broker. @@ -2336,8 +2332,14 @@ class Connection(pika.compat.AbstractBase): """ length = len(content[1]) - self._send_frame(frame.Method(channel_number, method_frame)) - self._send_frame(frame.Header(channel_number, length, content[0])) + marshaled_body_frames = [] + + # Note: we construct the Method, Header and Content objects, marshal them + # *then* output in case the marshaling operation throws an exception + frame_method = frame.Method(channel_number, method_frame) + frame_header = frame.Header(channel_number, length, content[0]) + marshaled_body_frames.append(frame_method.marshal()) + marshaled_body_frames.append(frame_header.marshal()) if content[1]: chunks = int(math.ceil(float(length) / self._body_max_length)) @@ -2346,7 +2348,10 @@ class Connection(pika.compat.AbstractBase): end = start + self._body_max_length if end > length: end = length - self._send_frame(frame.Body(channel_number, content[1][start:end])) + frame_body = frame.Body(channel_number, content[1][start:end]) + marshaled_body_frames.append(frame_body.marshal()) + + self._output_marshaled_frames(marshaled_body_frames) def _set_connection_state(self, connection_state): """Set the connection state. @@ -2382,3 +2387,16 @@ class Connection(pika.compat.AbstractBase): """ self._frame_buffer = self._frame_buffer[byte_count:] self.bytes_received += byte_count + + def _output_marshaled_frames(self, marshaled_frames): + """Output list of marshaled frames to buffer and update stats + + :param list marshaled_frames: A list of frames marshaled to bytes + + """ + for marshaled_frame in marshaled_frames: + self.bytes_sent += len(marshaled_frame) + self.frames_sent += 1 + self._adapter_emit_data(marshaled_frame) + if self.params.backpressure_detection: + self._detect_backpressure()
BlockingChannel.queue_declare hanging on non-string queue parameters Under Python 3.6.4 and Pika 0.11.2, the `BlockingChannel.queue_declare` method hangs when setting its `queue` parameter to a value that is not of `str` type (e.g., `int`, `bool`, `list`, `dict`, `tuple`, `NoneType`). Input: ``` $ python3 <<EOF import pika with pika.BlockingConnection() as connection: channel = connection.channel() channel.queue_declare(queue=[1, 2, 3]) EOF ```
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 2a51ae1..967e109 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -625,8 +625,9 @@ class TestExchangeRedeclareWithDifferentValues(AsyncTestCase, AsyncAdapters): raise AssertionError("Should not have received an Exchange.DeclareOk") -class TestPassiveExchangeDeclareWithConcurrentClose(AsyncTestCase, AsyncAdapters): - DESCRIPTION = "should close channel: declare passive exchange with close" +class TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker( + AsyncTestCase, AsyncAdapters): + DESCRIPTION = "No deadlock when closing a channel with pending blocked requests and concurrent Channel.Close from broker." # To observe the behavior that this is testing, comment out this line # in pika/channel.py - _on_close: @@ -636,10 +637,12 @@ class TestPassiveExchangeDeclareWithConcurrentClose(AsyncTestCase, AsyncAdapters # With the above line commented out, this test will hang def begin(self, channel): - self.name = self.__class__.__name__ + ':' + uuid.uuid1().hex + base_exch_name = self.__class__.__name__ + ':' + uuid.uuid1().hex self.channel.add_on_close_callback(self.on_channel_closed) for i in range(0, 99): - exch_name = self.name + ':' + str(i) + # Passively declare a non-existent exchange to force Channel.Close + # from broker + exch_name = base_exch_name + ':' + str(i) cb = functools.partial(self.on_bad_result, exch_name) channel.exchange_declare(exch_name, exchange_type='direct', @@ -648,15 +651,49 @@ class TestPassiveExchangeDeclareWithConcurrentClose(AsyncTestCase, AsyncAdapters channel.close() def on_channel_closed(self, channel, reply_code, reply_text): + # The close is expected because the requested exchange doesn't exist self.stop() def on_bad_result(self, exch_name, frame): - self.channel.exchange_delete(exch_name) - raise AssertionError("Should not have received an Exchange.DeclareOk") + self.fail("Should not have received an Exchange.DeclareOk") -class TestQueueDeclareAndDelete(AsyncTestCase, AsyncAdapters): - DESCRIPTION = "Create and delete a queue" +class TestClosingAChannelPermitsBlockedRequestToComplete(AsyncTestCase, + AsyncAdapters): + DESCRIPTION = "Closing a channel permits blocked requests to complete." + + def begin(self, channel): + self._queue_deleted = False + + channel.add_on_close_callback(self.on_channel_closed) + + q_name = self.__class__.__name__ + ':' + uuid.uuid1().hex + # NOTE we pass callback to make it a blocking request + channel.queue_declare(q_name, + exclusive=True, + callback=lambda _frame: None) + + self.assertIsNotNone(channel._blocking) + + # The Queue.Delete should block on completion of Queue.Declare + channel.queue_delete(q_name, callback=self.on_queue_deleted) + self.assertTrue(channel._blocked) + + # This Channel.Close should allow the blocked Queue.Delete to complete + # Before closing the channel + channel.close() + + def on_queue_deleted(self, _frame): + # Getting this callback shows that the blocked request was processed + self._queue_deleted = True + + def on_channel_closed(self, _channel, _reply_code, _reply_text): + self.assertTrue(self._queue_deleted) + self.stop() + + +class TestQueueUnnamedDeclareAndDelete(AsyncTestCase, AsyncAdapters): + DESCRIPTION = "Create and delete an unnamed queue" def begin(self, channel): channel.queue_declare(queue='', @@ -673,11 +710,11 @@ class TestQueueDeclareAndDelete(AsyncTestCase, AsyncAdapters): def on_queue_delete(self, frame): self.assertIsInstance(frame.method, spec.Queue.DeleteOk) + # NOTE: with event loops that suppress exceptions from callbacks self.stop() - -class TestQueueNameDeclareAndDelete(AsyncTestCase, AsyncAdapters): +class TestQueueNamedDeclareAndDelete(AsyncTestCase, AsyncAdapters): DESCRIPTION = "Create and delete a named queue" def begin(self, channel): @@ -701,7 +738,6 @@ class TestQueueNameDeclareAndDelete(AsyncTestCase, AsyncAdapters): self.stop() - class TestQueueRedeclareWithDifferentValues(AsyncTestCase, AsyncAdapters): DESCRIPTION = "Should close chan: re-declared queue w/ diff params" @@ -745,7 +781,6 @@ class TestTX1_Select(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 self.stop() - class TestTX2_Commit(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 DESCRIPTION = "Start a transaction, and commit it" diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index d79ded2..d0ed48e 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -50,7 +50,6 @@ def setUpModule(): logging.basicConfig(level=logging.DEBUG) -#@unittest.skip('SKIPPING WHILE DEBUGGING SOME CHANGES. DO NOT MERGE LIKE THIS') class BlockingTestCaseBase(unittest.TestCase): TIMEOUT = DEFAULT_TIMEOUT @@ -355,6 +354,16 @@ class TestCreateAndCloseConnectionWithChannelAndConsumer(BlockingTestCaseBase): self.assertFalse(ch._impl._consumers) +class TestUsingInvalidQueueArgument(BlockingTestCaseBase): + def test(self): + """BlockingConnection raises expected exception when invalid queue parameter is used + """ + connection = self._connect() + ch = connection.channel() + with self.assertRaises(AssertionError): + ch.queue_declare(queue=[1, 2, 3]) + + class TestSuddenBrokerDisconnectBeforeChannel(BlockingTestCaseBase): def test(self): diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py index 10e594e..dc353ef 100644 --- a/tests/unit/channel_tests.py +++ b/tests/unit/channel_tests.py @@ -1587,3 +1587,18 @@ class ChannelTests(unittest.TestCase): self.assertRaises(TypeError, self.obj._validate_rpc_completion_callback, 'foo') + + def test_no_side_effects_from_send_method_error(self): + self.obj._set_state(self.obj.OPEN) + + self.assertIsNone(self.obj._blocking) + + with mock.patch.object(self.obj.callbacks, 'add') as cb_add_mock: + with mock.patch.object(self.obj, '_send_method', + side_effect=TypeError) as send_method_mock: + with self.assertRaises(TypeError): + self.obj.queue_delete('', callback=lambda _frame: None) + + self.assertEqual(send_method_mock.call_count, 1) + self.assertIsNone(self.obj._blocking) + self.assertEqual(cb_add_mock.call_count, 0) diff --git a/tests/unit/connection_tests.py b/tests/unit/connection_tests.py index 19df873..04fd543 100644 --- a/tests/unit/connection_tests.py +++ b/tests/unit/connection_tests.py @@ -983,3 +983,31 @@ class ConnectionTests(unittest.TestCase): # pylint: disable=R0904 # Make sure _detect_backpressure doesn't throw self.connection._detect_backpressure() + + + def test_no_side_effects_from_message_marshal_error(self): + # Verify that frame buffer is empty on entry + self.assertEqual(b'', self.connection._frame_buffer) + + # Use Basic.Public with invalid body to trigger marshalling error + method = spec.Basic.Publish() + properties = spec.BasicProperties() + # Verify that marshalling of method and header won't trigger error + frame.Method(1, method).marshal() + frame.Header(1, body_size=10, props=properties).marshal() + # Create bogus body that should trigger an error during marshalling + body = [1,2,3,4] + # Verify that frame body can be created using the bogus body, but + # that marshalling will fail + frame.Body(1, body) + with self.assertRaises(TypeError): + frame.Body(1, body).marshal() + + # Now, attempt to send the method with the bogus body + with self.assertRaises(TypeError): + self.connection._send_method(channel_number=1, + method=method, + content=(properties, body)) + + # Now make sure that nothing is enqueued on frame buffer + self.assertEqual(b'', self.connection._frame_buffer)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.12
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-asyncio", "coverage", "codecov", "mock", "tornado", "twisted" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 Automat==24.8.1 certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 constantly==23.10.4 coverage==7.8.0 exceptiongroup==1.2.2 hyperlink==21.0.0 idna==3.10 incremental==24.7.2 iniconfig==2.1.0 mock==5.2.0 nose==1.3.7 packaging==24.2 -e git+https://github.com/pika/pika.git@16cdb80b4c0aacc9766abf033fccecb2c1ccb1a3#egg=pika pluggy==1.5.0 pytest==8.3.5 pytest-asyncio==0.26.0 requests==2.32.3 tomli==2.2.1 tornado==6.4.2 Twisted==24.11.0 typing_extensions==4.13.0 urllib3==2.3.0 zope.interface==7.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - automat==24.8.1 - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - constantly==23.10.4 - coverage==7.8.0 - exceptiongroup==1.2.2 - hyperlink==21.0.0 - idna==3.10 - incremental==24.7.2 - iniconfig==2.1.0 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - requests==2.32.3 - tomli==2.2.1 - tornado==6.4.2 - twisted==24.11.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - zope-interface==7.2 prefix: /opt/conda/envs/pika
[ "tests/unit/channel_tests.py::ChannelTests::test_no_side_effects_from_send_method_error", "tests/unit/connection_tests.py::ConnectionTests::test_no_side_effects_from_message_marshal_error" ]
[ "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestA_Connect::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestConstructAndImmediatelyCloseConnection::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCloseConnectionDuringAMQPHandshake::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestSocketConnectTimeoutWithTinySocketTimeout::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestStackConnectionTimeoutWithTinyStackTimeout::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaDefaultConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionViaCustomConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionMultipleConfigsDefaultConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionRetriesWithDefaultConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionConnectionWorkflowSocketConnectionFailure::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAMQPHandshakeTimesOutDefaultWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndImmediatelyAbortDefaultConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestCreateConnectionAndAsynchronouslyAbortDefaultConnectionWorkflow::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestConfirmSelect::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestBlockingNonBlockingBlockingRPCWontStall::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestConsumeCancel::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestExchangeDeclareAndDelete::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestExchangeRedeclareWithDifferentValues::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestNoDeadlockWhenClosingChannelWithPendingBlockedRequestsAndConcurrentChannelCloseFromBroker::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestClosingAChannelPermitsBlockedRequestToComplete::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestQueueUnnamedDeclareAndDelete::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestQueueNamedDeclareAndDelete::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestQueueRedeclareWithDifferentValues::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestTX1_Select::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestTX2_Commit::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestTX2_CommitFailure::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestTX3_Rollback::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestTX3_RollbackFailure::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsume::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndConsumeBig::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestZ_PublishAndGet::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestZ_AccessDenied::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionTimesOut::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestBlockedConnectionUnblocks::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeRequestBeforeIOLoopStarts::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromIOLoopThread::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestAddCallbackThreadsafeFromAnotherThread::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestIOLoopStopBeforeIOLoopStarts::test_with_tornado", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_asyncio", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_select_default", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_select_epoll", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_select_poll", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_select_select", "tests/acceptance/async_adapter_tests.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test_with_tornado", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestCreateConnectionWithNoneSocketAndStackTimeouts::test", "tests/acceptance/blocking_adapter_test.py::TestCreateConnectionFromTwoConfigsFirstUnreachable::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnectionRaisesWrongState::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerExitSurvivesClosedConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUsingInvalidQueueArgument::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromSameThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStartConsumingRaisesChannelClosedOnSameChannelFailure::test", "tests/acceptance/blocking_adapter_test.py::TestStartConsumingReturnsAfterCancelFromBroker::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorInterruptedByCancelFromBroker::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorCancelEncountersCancelFromBroker::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorPassesChannelClosedOnSameChannelFailure::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateConnectionFromTwoUnreachableConfigs::test", "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/channel_tests.py::ChannelTests::test_add_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added", "tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_calls_send_method", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch_with_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_raise_if_not_open", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch_no_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_unknown_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_raise_if_not_open", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_require_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called_auto_ack", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_count_raises_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_size_raises_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_cleanup", "tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closed_state_raises_channel_error_and_stays_closed", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closing_state_raises_already_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_open_state_transitions_to_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_opening_state", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_async", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_ack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_nack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_with_bad_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_consumer_tags", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame", "tests/unit/channel_tests.py::ChannelTests::test_has_content_false", "tests/unit/channel_tests.py::ChannelTests::test_has_content_true", "tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning", "tests/unit/channel_tests.py::ChannelTests::test_init_blocked", "tests/unit/channel_tests.py::ChannelTests::test_init_blocking", "tests/unit/channel_tests.py::ChannelTests::test_init_callbacks", "tests/unit/channel_tests.py::ChannelTests::test_init_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_init_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_connection", "tests/unit/channel_tests.py::ChannelTests::test_init_consumers", "tests/unit/channel_tests.py::ChannelTests::test_init_content_frame_assembler", "tests/unit/channel_tests.py::ChannelTests::test_init_flow", "tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_state", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_true", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_true", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_close_from_broker_in_closing_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_from_broker_in_open_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_from_broker_warning", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closed_state_is_suppressed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closing_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_open_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_opening_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok_following_close_from_broker", "tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_eventok", "tests/unit/channel_tests.py::ChannelTests::test_on_flow", "tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_getempty", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_onreturn", "tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_raise_if_not_open_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_repr", "tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection", "tests/unit/channel_tests.py::ChannelTests::test_send_method", "tests/unit/channel_tests.py::ChannelTests::test_set_state", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_validate_callback_raises_value_error_not_callable", "tests/unit/connection_tests.py::ConnectionTests::test_add_callbacks", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_close_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_connection_blocked_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_connection_unblocked_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_open_error_callback", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_multiple_blocked_in_a_row_sets_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_multiple_unblocked_in_a_row_removes_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_on_stream_terminated_removes_timer", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_timeout_terminates_connection", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_unblocked_removes_timer", "tests/unit/connection_tests.py::ConnectionTests::test_channel", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_closed_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_closing_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_init_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_protocol_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_start_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_channel_on_tune_connection_raises_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties_default", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties_override", "tests/unit/connection_tests.py::ConnectionTests::test_close_calls_on_close_ready_when_no_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_closes_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_closes_opening_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_does_not_close_closing_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_raises_wrong_state_when_already_closed_or_closing", "tests/unit/connection_tests.py::ConnectionTests::test_connect_no_adapter_connect_from_constructor_with_external_workflow", "tests/unit/connection_tests.py::ConnectionTests::test_connection_blocked_sets_timer", "tests/unit/connection_tests.py::ConnectionTests::test_create_with_blocked_connection_timeout_config", "tests/unit/connection_tests.py::ConnectionTests::test_deliver_frame_to_channel_with_frame_for_unknown_channel", "tests/unit/connection_tests.py::ConnectionTests::test_new_conn_should_use_first_channel", "tests/unit/connection_tests.py::ConnectionTests::test_next_channel_number_returns_lowest_unused", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_closing_state_last_channel_calls_on_close_ready", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_closing_state_more_channels_no_on_close_ready", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_non_closing_state", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_with_closing_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_close_from_broker_passes_correct_exception", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_close_ok", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_start", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_tune", "tests/unit/connection_tests.py::ConnectionTests::test_on_data_available", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_connected", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_terminated_cleans_up", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_terminated_invokes_access_denied_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_terminated_invokes_auth_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_terminated_invokes_connection_closed_callback", "tests/unit/connection_tests.py::ConnectionTests::test_on_stream_terminated_invokes_protocol_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_send_message_updates_frames_sent_and_bytes_sent", "tests/unit/connection_tests.py::ConnectionTests::test_set_backpressure_multiplier" ]
[]
BSD 3-Clause "New" or "Revised" License
2,266
[ "pika/channel.py", "pika/connection.py" ]
[ "pika/channel.py", "pika/connection.py" ]
oasis-open__cti-pattern-validator-46
5f50cd15f293440a4447d01795222eb97c8f495e
2018-03-07 19:51:01
801c2364013d3cc5529f5e0b967def7f505a91e4
diff --git a/stix2patterns/grammars/STIXPatternLexer.py b/stix2patterns/grammars/STIXPatternLexer.py index dd44915..dc01a53 100644 --- a/stix2patterns/grammars/STIXPatternLexer.py +++ b/stix2patterns/grammars/STIXPatternLexer.py @@ -1,4 +1,4 @@ -# Generated from STIXPattern.g4 by ANTLR 4.7 +# Generated from STIXPattern.g4 by ANTLR 4.7.1 # encoding: utf-8 from __future__ import print_function @@ -11,7 +11,7 @@ from antlr4 import * def serializedATN(): with StringIO() as buf: buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2") - buf.write(u"\64\u020e\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6") + buf.write(u"\67\u0257\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6") buf.write(u"\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4") buf.write(u"\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t") buf.write(u"\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27") @@ -22,233 +22,275 @@ def serializedATN(): buf.write(u"\4\63\t\63\4\64\t\64\4\65\t\65\4\66\t\66\4\67\t\67\4") buf.write(u"8\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@") buf.write(u"\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\t") - buf.write(u"I\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\3\2\5\2\u00a3") - buf.write(u"\n\2\3\2\3\2\3\2\7\2\u00a8\n\2\f\2\16\2\u00ab\13\2\5") - buf.write(u"\2\u00ad\n\2\3\3\5\3\u00b0\n\3\3\3\7\3\u00b3\n\3\f\3") - buf.write(u"\16\3\u00b6\13\3\3\3\3\3\6\3\u00ba\n\3\r\3\16\3\u00bb") - buf.write(u"\3\4\3\4\3\4\7\4\u00c1\n\4\f\4\16\4\u00c4\13\4\3\4\3") - buf.write(u"\4\3\5\3\5\3\5\7\5\u00cb\n\5\f\5\16\5\u00ce\13\5\3\5") - buf.write(u"\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00d8\n\6\f\6\16\6\u00db") - buf.write(u"\13\6\3\6\3\6\3\7\3\7\5\7\u00e1\n\7\3\b\3\b\3\b\3\b\3") - buf.write(u"\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3") - buf.write(u"\b\3\b\3\b\3\b\3\b\3\b\6\b\u00fa\n\b\r\b\16\b\u00fb\5") - buf.write(u"\b\u00fe\n\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n") - buf.write(u"\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f") - buf.write(u"\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16") - buf.write(u"\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3") - buf.write(u"\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20") - buf.write(u"\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3") - buf.write(u"\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24") - buf.write(u"\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3") - buf.write(u"\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30") - buf.write(u"\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3") - buf.write(u"\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33") - buf.write(u"\3\33\7\33\u0177\n\33\f\33\16\33\u017a\13\33\3\34\3\34") - buf.write(u"\7\34\u017e\n\34\f\34\16\34\u0181\13\34\3\35\3\35\3\35") - buf.write(u"\5\35\u0186\n\35\3\36\3\36\3\36\3\36\5\36\u018c\n\36") - buf.write(u"\3\37\3\37\3 \3 \3 \3!\3!\3\"\3\"\3\"\3#\3#\3$\3$\3%") - buf.write(u"\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3,\3,\3-\3") - buf.write(u"-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\63\3\63") - buf.write(u"\3\64\3\64\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39") - buf.write(u"\3:\3:\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3") - buf.write(u"B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K") - buf.write(u"\3K\3L\3L\3L\3M\3M\3N\6N\u01f0\nN\rN\16N\u01f1\3N\3N") - buf.write(u"\3O\3O\3O\3O\7O\u01fa\nO\fO\16O\u01fd\13O\3O\3O\3O\3") - buf.write(u"O\3O\3P\3P\3P\3P\7P\u0208\nP\fP\16P\u020b\13P\3P\3P\3") - buf.write(u"\u01fb\2Q\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25") - buf.write(u"\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26") - buf.write(u"+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C") - buf.write(u"#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\2c\2e\2g\2i\2k\2") - buf.write(u"m\2o\2q\2s\2u\2w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085") - buf.write(u"\2\u0087\2\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093") - buf.write(u"\2\u0095\2\u0097\2\u0099\2\u009b\62\u009d\63\u009f\64") - buf.write(u"\3\2\'\4\2--//\3\2\63;\3\2\62;\4\2))^^\5\2C\\aac|\6\2") - buf.write(u"\62;C\\aac|\7\2//\62;C\\aac|\4\2CCcc\4\2DDdd\4\2EEee") - buf.write(u"\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4\2") - buf.write(u"LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRrr") - buf.write(u"\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2") - buf.write(u"YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\5\2\62;CHch\7\2--\61;") - buf.write(u"??C\\c|\f\2\13\17\"\"\u0087\u0087\u00a2\u00a2\u1682\u1682") - buf.write(u"\u2002\u200c\u202a\u202b\u2031\u2031\u2061\u2061\u3002") - buf.write(u"\u3002\4\2\f\f\17\17\2\u0205\2\3\3\2\2\2\2\5\3\2\2\2") - buf.write(u"\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17") - buf.write(u"\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27") - buf.write(u"\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37") - buf.write(u"\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2") - buf.write(u"\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2") - buf.write(u"\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2") - buf.write(u"\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2") - buf.write(u"\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3") - buf.write(u"\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2") - buf.write(u"W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2") - buf.write(u"\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\3\u00a2") - buf.write(u"\3\2\2\2\5\u00af\3\2\2\2\7\u00bd\3\2\2\2\t\u00c7\3\2") - buf.write(u"\2\2\13\u00d1\3\2\2\2\r\u00e0\3\2\2\2\17\u00e2\3\2\2") - buf.write(u"\2\21\u0102\3\2\2\2\23\u0106\3\2\2\2\25\u0109\3\2\2\2") - buf.write(u"\27\u010d\3\2\2\2\31\u0118\3\2\2\2\33\u011d\3\2\2\2\35") - buf.write(u"\u0125\3\2\2\2\37\u0130\3\2\2\2!\u0139\3\2\2\2#\u013e") - buf.write(u"\3\2\2\2%\u0141\3\2\2\2\'\u0147\3\2\2\2)\u014c\3\2\2") - buf.write(u"\2+\u0154\3\2\2\2-\u0159\3\2\2\2/\u015f\3\2\2\2\61\u0166") - buf.write(u"\3\2\2\2\63\u016e\3\2\2\2\65\u0174\3\2\2\2\67\u017b\3") - buf.write(u"\2\2\29\u0185\3\2\2\2;\u018b\3\2\2\2=\u018d\3\2\2\2?") - buf.write(u"\u018f\3\2\2\2A\u0192\3\2\2\2C\u0194\3\2\2\2E\u0197\3") - buf.write(u"\2\2\2G\u0199\3\2\2\2I\u019b\3\2\2\2K\u019d\3\2\2\2M") - buf.write(u"\u019f\3\2\2\2O\u01a1\3\2\2\2Q\u01a3\3\2\2\2S\u01a5\3") - buf.write(u"\2\2\2U\u01a7\3\2\2\2W\u01a9\3\2\2\2Y\u01ab\3\2\2\2[") - buf.write(u"\u01ad\3\2\2\2]\u01af\3\2\2\2_\u01b1\3\2\2\2a\u01b3\3") - buf.write(u"\2\2\2c\u01b5\3\2\2\2e\u01b7\3\2\2\2g\u01b9\3\2\2\2i") - buf.write(u"\u01bb\3\2\2\2k\u01bd\3\2\2\2m\u01bf\3\2\2\2o\u01c1\3") - buf.write(u"\2\2\2q\u01c3\3\2\2\2s\u01c5\3\2\2\2u\u01c7\3\2\2\2w") - buf.write(u"\u01c9\3\2\2\2y\u01cb\3\2\2\2{\u01cd\3\2\2\2}\u01cf\3") - buf.write(u"\2\2\2\177\u01d1\3\2\2\2\u0081\u01d3\3\2\2\2\u0083\u01d5") - buf.write(u"\3\2\2\2\u0085\u01d7\3\2\2\2\u0087\u01d9\3\2\2\2\u0089") - buf.write(u"\u01db\3\2\2\2\u008b\u01dd\3\2\2\2\u008d\u01df\3\2\2") - buf.write(u"\2\u008f\u01e1\3\2\2\2\u0091\u01e3\3\2\2\2\u0093\u01e5") - buf.write(u"\3\2\2\2\u0095\u01e7\3\2\2\2\u0097\u01e9\3\2\2\2\u0099") - buf.write(u"\u01ec\3\2\2\2\u009b\u01ef\3\2\2\2\u009d\u01f5\3\2\2") - buf.write(u"\2\u009f\u0203\3\2\2\2\u00a1\u00a3\t\2\2\2\u00a2\u00a1") - buf.write(u"\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00ac\3\2\2\2\u00a4") - buf.write(u"\u00ad\7\62\2\2\u00a5\u00a9\t\3\2\2\u00a6\u00a8\t\4\2") - buf.write(u"\2\u00a7\u00a6\3\2\2\2\u00a8\u00ab\3\2\2\2\u00a9\u00a7") - buf.write(u"\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00ad\3\2\2\2\u00ab") - buf.write(u"\u00a9\3\2\2\2\u00ac\u00a4\3\2\2\2\u00ac\u00a5\3\2\2") - buf.write(u"\2\u00ad\4\3\2\2\2\u00ae\u00b0\t\2\2\2\u00af\u00ae\3") - buf.write(u"\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b4\3\2\2\2\u00b1") - buf.write(u"\u00b3\t\4\2\2\u00b2\u00b1\3\2\2\2\u00b3\u00b6\3\2\2") - buf.write(u"\2\u00b4\u00b2\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5\u00b7") - buf.write(u"\3\2\2\2\u00b6\u00b4\3\2\2\2\u00b7\u00b9\7\60\2\2\u00b8") - buf.write(u"\u00ba\t\4\2\2\u00b9\u00b8\3\2\2\2\u00ba\u00bb\3\2\2") - buf.write(u"\2\u00bb\u00b9\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\6\3") - buf.write(u"\2\2\2\u00bd\u00be\7j\2\2\u00be\u00c2\5E#\2\u00bf\u00c1") - buf.write(u"\5\u0097L\2\u00c0\u00bf\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2") - buf.write(u"\u00c0\3\2\2\2\u00c2\u00c3\3\2\2\2\u00c3\u00c5\3\2\2") - buf.write(u"\2\u00c4\u00c2\3\2\2\2\u00c5\u00c6\5E#\2\u00c6\b\3\2") - buf.write(u"\2\2\u00c7\u00c8\7d\2\2\u00c8\u00cc\5E#\2\u00c9\u00cb") - buf.write(u"\5\u0099M\2\u00ca\u00c9\3\2\2\2\u00cb\u00ce\3\2\2\2\u00cc") - buf.write(u"\u00ca\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00cf\3\2\2") - buf.write(u"\2\u00ce\u00cc\3\2\2\2\u00cf\u00d0\5E#\2\u00d0\n\3\2") - buf.write(u"\2\2\u00d1\u00d9\5E#\2\u00d2\u00d8\n\5\2\2\u00d3\u00d4") - buf.write(u"\7^\2\2\u00d4\u00d8\7)\2\2\u00d5\u00d6\7^\2\2\u00d6\u00d8") - buf.write(u"\7^\2\2\u00d7\u00d2\3\2\2\2\u00d7\u00d3\3\2\2\2\u00d7") - buf.write(u"\u00d5\3\2\2\2\u00d8\u00db\3\2\2\2\u00d9\u00d7\3\2\2") - buf.write(u"\2\u00d9\u00da\3\2\2\2\u00da\u00dc\3\2\2\2\u00db\u00d9") - buf.write(u"\3\2\2\2\u00dc\u00dd\5E#\2\u00dd\f\3\2\2\2\u00de\u00e1") - buf.write(u"\5+\26\2\u00df\u00e1\5-\27\2\u00e0\u00de\3\2\2\2\u00e0") - buf.write(u"\u00df\3\2\2\2\u00e1\16\3\2\2\2\u00e2\u00e3\7v\2\2\u00e3") - buf.write(u"\u00e4\5E#\2\u00e4\u00e5\t\4\2\2\u00e5\u00e6\t\4\2\2") - buf.write(u"\u00e6\u00e7\t\4\2\2\u00e7\u00e8\t\4\2\2\u00e8\u00e9") - buf.write(u"\5W,\2\u00e9\u00ea\t\4\2\2\u00ea\u00eb\t\4\2\2\u00eb") - buf.write(u"\u00ec\5W,\2\u00ec\u00ed\t\4\2\2\u00ed\u00ee\t\4\2\2") - buf.write(u"\u00ee\u00ef\7V\2\2\u00ef\u00f0\t\4\2\2\u00f0\u00f1\t") - buf.write(u"\4\2\2\u00f1\u00f2\5G$\2\u00f2\u00f3\t\4\2\2\u00f3\u00f4") - buf.write(u"\t\4\2\2\u00f4\u00f5\5G$\2\u00f5\u00f6\t\4\2\2\u00f6") - buf.write(u"\u00fd\t\4\2\2\u00f7\u00f9\5I%\2\u00f8\u00fa\t\4\2\2") - buf.write(u"\u00f9\u00f8\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb\u00f9") - buf.write(u"\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00fe\3\2\2\2\u00fd") - buf.write(u"\u00f7\3\2\2\2\u00fd\u00fe\3\2\2\2\u00fe\u00ff\3\2\2") - buf.write(u"\2\u00ff\u0100\7\\\2\2\u0100\u0101\5E#\2\u0101\20\3\2") - buf.write(u"\2\2\u0102\u0103\5a\61\2\u0103\u0104\5{>\2\u0104\u0105") - buf.write(u"\5g\64\2\u0105\22\3\2\2\2\u0106\u0107\5}?\2\u0107\u0108") - buf.write(u"\5\u0083B\2\u0108\24\3\2\2\2\u0109\u010a\5{>\2\u010a") - buf.write(u"\u010b\5}?\2\u010b\u010c\5\u0087D\2\u010c\26\3\2\2\2") - buf.write(u"\u010d\u010e\5k\66\2\u010e\u010f\5}?\2\u010f\u0110\5") - buf.write(u"w<\2\u0110\u0111\5w<\2\u0111\u0112\5}?\2\u0112\u0113") - buf.write(u"\5\u008dG\2\u0113\u0114\5i\65\2\u0114\u0115\5g\64\2\u0115") - buf.write(u"\u0116\5c\62\2\u0116\u0117\5\u0091I\2\u0117\30\3\2\2") - buf.write(u"\2\u0118\u0119\5w<\2\u0119\u011a\5q9\2\u011a\u011b\5") - buf.write(u"u;\2\u011b\u011c\5i\65\2\u011c\32\3\2\2\2\u011d\u011e") - buf.write(u"\5y=\2\u011e\u011f\5a\61\2\u011f\u0120\5\u0087D\2\u0120") - buf.write(u"\u0121\5e\63\2\u0121\u0122\5o8\2\u0122\u0123\5i\65\2") - buf.write(u"\u0123\u0124\5\u0085C\2\u0124\34\3\2\2\2\u0125\u0126") - buf.write(u"\5q9\2\u0126\u0127\5\u0085C\2\u0127\u0128\5\u0085C\2") - buf.write(u"\u0128\u0129\5\u0089E\2\u0129\u012a\5\177@\2\u012a\u012b") - buf.write(u"\5i\65\2\u012b\u012c\5\u0083B\2\u012c\u012d\5\u0085C") - buf.write(u"\2\u012d\u012e\5i\65\2\u012e\u012f\5\u0087D\2\u012f\36") - buf.write(u"\3\2\2\2\u0130\u0131\5q9\2\u0131\u0132\5\u0085C\2\u0132") - buf.write(u"\u0133\5\u0085C\2\u0133\u0134\5\u0089E\2\u0134\u0135") - buf.write(u"\5c\62\2\u0135\u0136\5\u0085C\2\u0136\u0137\5i\65\2\u0137") - buf.write(u"\u0138\5\u0087D\2\u0138 \3\2\2\2\u0139\u013a\5w<\2\u013a") - buf.write(u"\u013b\5a\61\2\u013b\u013c\5\u0085C\2\u013c\u013d\5\u0087") - buf.write(u"D\2\u013d\"\3\2\2\2\u013e\u013f\5q9\2\u013f\u0140\5{") - buf.write(u">\2\u0140$\3\2\2\2\u0141\u0142\5\u0085C\2\u0142\u0143") - buf.write(u"\5\u0087D\2\u0143\u0144\5a\61\2\u0144\u0145\5\u0083B") - buf.write(u"\2\u0145\u0146\5\u0087D\2\u0146&\3\2\2\2\u0147\u0148") - buf.write(u"\5\u0085C\2\u0148\u0149\5\u0087D\2\u0149\u014a\5}?\2") - buf.write(u"\u014a\u014b\5\177@\2\u014b(\3\2\2\2\u014c\u014d\5\u0085") - buf.write(u"C\2\u014d\u014e\5i\65\2\u014e\u014f\5e\63\2\u014f\u0150") - buf.write(u"\5}?\2\u0150\u0151\5{>\2\u0151\u0152\5g\64\2\u0152\u0153") - buf.write(u"\5\u0085C\2\u0153*\3\2\2\2\u0154\u0155\5\u0087D\2\u0155") - buf.write(u"\u0156\5\u0083B\2\u0156\u0157\5\u0089E\2\u0157\u0158") - buf.write(u"\5i\65\2\u0158,\3\2\2\2\u0159\u015a\5k\66\2\u015a\u015b") - buf.write(u"\5a\61\2\u015b\u015c\5w<\2\u015c\u015d\5\u0085C\2\u015d") - buf.write(u"\u015e\5i\65\2\u015e.\3\2\2\2\u015f\u0160\5\u008dG\2") - buf.write(u"\u0160\u0161\5q9\2\u0161\u0162\5\u0087D\2\u0162\u0163") - buf.write(u"\5o8\2\u0163\u0164\5q9\2\u0164\u0165\5{>\2\u0165\60\3") - buf.write(u"\2\2\2\u0166\u0167\5\u0083B\2\u0167\u0168\5i\65\2\u0168") - buf.write(u"\u0169\5\177@\2\u0169\u016a\5i\65\2\u016a\u016b\5a\61") - buf.write(u"\2\u016b\u016c\5\u0087D\2\u016c\u016d\5\u0085C\2\u016d") - buf.write(u"\62\3\2\2\2\u016e\u016f\5\u0087D\2\u016f\u0170\5q9\2") - buf.write(u"\u0170\u0171\5y=\2\u0171\u0172\5i\65\2\u0172\u0173\5") - buf.write(u"\u0085C\2\u0173\64\3\2\2\2\u0174\u0178\t\6\2\2\u0175") - buf.write(u"\u0177\t\7\2\2\u0176\u0175\3\2\2\2\u0177\u017a\3\2\2") - buf.write(u"\2\u0178\u0176\3\2\2\2\u0178\u0179\3\2\2\2\u0179\66\3") - buf.write(u"\2\2\2\u017a\u0178\3\2\2\2\u017b\u017f\t\6\2\2\u017c") - buf.write(u"\u017e\t\b\2\2\u017d\u017c\3\2\2\2\u017e\u0181\3\2\2") - buf.write(u"\2\u017f\u017d\3\2\2\2\u017f\u0180\3\2\2\2\u01808\3\2") - buf.write(u"\2\2\u0181\u017f\3\2\2\2\u0182\u0186\7?\2\2\u0183\u0184") - buf.write(u"\7?\2\2\u0184\u0186\7?\2\2\u0185\u0182\3\2\2\2\u0185") - buf.write(u"\u0183\3\2\2\2\u0186:\3\2\2\2\u0187\u0188\7#\2\2\u0188") - buf.write(u"\u018c\7?\2\2\u0189\u018a\7>\2\2\u018a\u018c\7@\2\2\u018b") - buf.write(u"\u0187\3\2\2\2\u018b\u0189\3\2\2\2\u018c<\3\2\2\2\u018d") - buf.write(u"\u018e\7>\2\2\u018e>\3\2\2\2\u018f\u0190\7>\2\2\u0190") - buf.write(u"\u0191\7?\2\2\u0191@\3\2\2\2\u0192\u0193\7@\2\2\u0193") - buf.write(u"B\3\2\2\2\u0194\u0195\7@\2\2\u0195\u0196\7?\2\2\u0196") - buf.write(u"D\3\2\2\2\u0197\u0198\7)\2\2\u0198F\3\2\2\2\u0199\u019a") - buf.write(u"\7<\2\2\u019aH\3\2\2\2\u019b\u019c\7\60\2\2\u019cJ\3") - buf.write(u"\2\2\2\u019d\u019e\7.\2\2\u019eL\3\2\2\2\u019f\u01a0") - buf.write(u"\7+\2\2\u01a0N\3\2\2\2\u01a1\u01a2\7*\2\2\u01a2P\3\2") - buf.write(u"\2\2\u01a3\u01a4\7_\2\2\u01a4R\3\2\2\2\u01a5\u01a6\7") - buf.write(u"]\2\2\u01a6T\3\2\2\2\u01a7\u01a8\7-\2\2\u01a8V\3\2\2") - buf.write(u"\2\u01a9\u01aa\5Y-\2\u01aaX\3\2\2\2\u01ab\u01ac\7/\2") - buf.write(u"\2\u01acZ\3\2\2\2\u01ad\u01ae\7`\2\2\u01ae\\\3\2\2\2") - buf.write(u"\u01af\u01b0\7\61\2\2\u01b0^\3\2\2\2\u01b1\u01b2\7,\2") - buf.write(u"\2\u01b2`\3\2\2\2\u01b3\u01b4\t\t\2\2\u01b4b\3\2\2\2") - buf.write(u"\u01b5\u01b6\t\n\2\2\u01b6d\3\2\2\2\u01b7\u01b8\t\13") - buf.write(u"\2\2\u01b8f\3\2\2\2\u01b9\u01ba\t\f\2\2\u01bah\3\2\2") - buf.write(u"\2\u01bb\u01bc\t\r\2\2\u01bcj\3\2\2\2\u01bd\u01be\t\16") - buf.write(u"\2\2\u01bel\3\2\2\2\u01bf\u01c0\t\17\2\2\u01c0n\3\2\2") - buf.write(u"\2\u01c1\u01c2\t\20\2\2\u01c2p\3\2\2\2\u01c3\u01c4\t") - buf.write(u"\21\2\2\u01c4r\3\2\2\2\u01c5\u01c6\t\22\2\2\u01c6t\3") - buf.write(u"\2\2\2\u01c7\u01c8\t\23\2\2\u01c8v\3\2\2\2\u01c9\u01ca") - buf.write(u"\t\24\2\2\u01cax\3\2\2\2\u01cb\u01cc\t\25\2\2\u01ccz") - buf.write(u"\3\2\2\2\u01cd\u01ce\t\26\2\2\u01ce|\3\2\2\2\u01cf\u01d0") - buf.write(u"\t\27\2\2\u01d0~\3\2\2\2\u01d1\u01d2\t\30\2\2\u01d2\u0080") - buf.write(u"\3\2\2\2\u01d3\u01d4\t\31\2\2\u01d4\u0082\3\2\2\2\u01d5") - buf.write(u"\u01d6\t\32\2\2\u01d6\u0084\3\2\2\2\u01d7\u01d8\t\33") - buf.write(u"\2\2\u01d8\u0086\3\2\2\2\u01d9\u01da\t\34\2\2\u01da\u0088") - buf.write(u"\3\2\2\2\u01db\u01dc\t\35\2\2\u01dc\u008a\3\2\2\2\u01dd") - buf.write(u"\u01de\t\36\2\2\u01de\u008c\3\2\2\2\u01df\u01e0\t\37") - buf.write(u"\2\2\u01e0\u008e\3\2\2\2\u01e1\u01e2\t \2\2\u01e2\u0090") - buf.write(u"\3\2\2\2\u01e3\u01e4\t!\2\2\u01e4\u0092\3\2\2\2\u01e5") - buf.write(u"\u01e6\t\"\2\2\u01e6\u0094\3\2\2\2\u01e7\u01e8\t#\2\2") - buf.write(u"\u01e8\u0096\3\2\2\2\u01e9\u01ea\5\u0095K\2\u01ea\u01eb") - buf.write(u"\5\u0095K\2\u01eb\u0098\3\2\2\2\u01ec\u01ed\t$\2\2\u01ed") - buf.write(u"\u009a\3\2\2\2\u01ee\u01f0\t%\2\2\u01ef\u01ee\3\2\2\2") - buf.write(u"\u01f0\u01f1\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f1\u01f2") - buf.write(u"\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\bN\2\2\u01f4") - buf.write(u"\u009c\3\2\2\2\u01f5\u01f6\7\61\2\2\u01f6\u01f7\7,\2") - buf.write(u"\2\u01f7\u01fb\3\2\2\2\u01f8\u01fa\13\2\2\2\u01f9\u01f8") - buf.write(u"\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fb") - buf.write(u"\u01f9\3\2\2\2\u01fc\u01fe\3\2\2\2\u01fd\u01fb\3\2\2") - buf.write(u"\2\u01fe\u01ff\7,\2\2\u01ff\u0200\7\61\2\2\u0200\u0201") - buf.write(u"\3\2\2\2\u0201\u0202\bO\2\2\u0202\u009e\3\2\2\2\u0203") - buf.write(u"\u0204\7\61\2\2\u0204\u0205\7\61\2\2\u0205\u0209\3\2") - buf.write(u"\2\2\u0206\u0208\n&\2\2\u0207\u0206\3\2\2\2\u0208\u020b") - buf.write(u"\3\2\2\2\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020a") - buf.write(u"\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020d\bP\2\2") - buf.write(u"\u020d\u00a0\3\2\2\2\27\2\u00a2\u00a9\u00ac\u00af\u00b4") - buf.write(u"\u00bb\u00c2\u00cc\u00d7\u00d9\u00e0\u00fb\u00fd\u0178") - buf.write(u"\u017f\u0185\u018b\u01f1\u01fb\u0209\3\b\2\2") + buf.write(u"I\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R") + buf.write(u"\tR\4S\tS\3\2\3\2\3\2\3\2\7\2\u00ac\n\2\f\2\16\2\u00af") + buf.write(u"\13\2\5\2\u00b1\n\2\3\3\5\3\u00b4\n\3\3\3\3\3\3\3\7\3") + buf.write(u"\u00b9\n\3\f\3\16\3\u00bc\13\3\5\3\u00be\n\3\3\4\3\4") + buf.write(u"\7\4\u00c2\n\4\f\4\16\4\u00c5\13\4\3\4\3\4\6\4\u00c9") + buf.write(u"\n\4\r\4\16\4\u00ca\3\5\5\5\u00ce\n\5\3\5\7\5\u00d1\n") + buf.write(u"\5\f\5\16\5\u00d4\13\5\3\5\3\5\6\5\u00d8\n\5\r\5\16\5") + buf.write(u"\u00d9\3\6\3\6\3\6\7\6\u00df\n\6\f\6\16\6\u00e2\13\6") + buf.write(u"\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\7\7\u00ed\n\7\f") + buf.write(u"\7\16\7\u00f0\13\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3") + buf.write(u"\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5\7\u0103\n\7\3\7") + buf.write(u"\3\7\3\b\3\b\3\b\3\b\3\b\3\b\7\b\u010d\n\b\f\b\16\b\u0110") + buf.write(u"\13\b\3\b\3\b\3\t\3\t\5\t\u0116\n\t\3\n\3\n\3\n\3\n\3") + buf.write(u"\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u0123\n\n\3\n\3\n\3\n") + buf.write(u"\3\n\3\n\3\n\3\n\5\n\u012c\n\n\3\n\3\n\3\n\3\n\3\n\5") + buf.write(u"\n\u0133\n\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u013d") + buf.write(u"\n\n\3\n\3\n\6\n\u0141\n\n\r\n\16\n\u0142\5\n\u0145\n") + buf.write(u"\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\r\3") + buf.write(u"\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3") + buf.write(u"\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20") + buf.write(u"\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3") + buf.write(u"\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22") + buf.write(u"\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\24\3") + buf.write(u"\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26") + buf.write(u"\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3") + buf.write(u"\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31") + buf.write(u"\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3") + buf.write(u"\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34") + buf.write(u"\3\35\3\35\7\35\u01be\n\35\f\35\16\35\u01c1\13\35\3\36") + buf.write(u"\3\36\7\36\u01c5\n\36\f\36\16\36\u01c8\13\36\3\37\3\37") + buf.write(u"\3\37\5\37\u01cd\n\37\3 \3 \3 \3 \5 \u01d3\n \3!\3!\3") + buf.write(u"\"\3\"\3\"\3#\3#\3$\3$\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3") + buf.write(u")\3)\3*\3*\3+\3+\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3") + buf.write(u"\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\65\3\65\3\66") + buf.write(u"\3\66\3\67\3\67\38\38\39\39\3:\3:\3;\3;\3<\3<\3=\3=\3") + buf.write(u">\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F") + buf.write(u"\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3N\3N\3N\3") + buf.write(u"O\3O\3P\6P\u0237\nP\rP\16P\u0238\3P\3P\3Q\3Q\3Q\3Q\7") + buf.write(u"Q\u0241\nQ\fQ\16Q\u0244\13Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3") + buf.write(u"R\7R\u024f\nR\fR\16R\u0252\13R\3R\3R\3S\3S\3\u0242\2") + buf.write(u"T\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r") + buf.write(u"\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30") + buf.write(u"/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K") + buf.write(u"\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\2g\2i\2k\2m\2o\2") + buf.write(u"q\2s\2u\2w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087") + buf.write(u"\2\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095") + buf.write(u"\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f\64\u00a1\65") + buf.write(u"\u00a3\66\u00a5\67\3\2-\3\2\639\3\2\62;\3\2\63:\4\2)") + buf.write(u")^^\3\2\63;\3\2\62\64\3\2\63\64\3\2\62\63\3\2\62\65\3") + buf.write(u"\2\62\67\5\2C\\aac|\6\2\62;C\\aac|\7\2//\62;C\\aac|\4") + buf.write(u"\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2II") + buf.write(u"ii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4") + buf.write(u"\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VV") + buf.write(u"vv\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||") + buf.write(u"\5\2\62;CHch\6\2--\61;C\\c|\f\2\13\17\"\"\u0087\u0087") + buf.write(u"\u00a2\u00a2\u1682\u1682\u2002\u200c\u202a\u202b\u2031") + buf.write(u"\u2031\u2061\u2061\u3002\u3002\4\2\f\f\17\17\2\u0259") + buf.write(u"\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13") + buf.write(u"\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3") + buf.write(u"\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3") + buf.write(u"\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2") + buf.write(u"\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3") + buf.write(u"\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2") + buf.write(u"\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2") + buf.write(u"?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2") + buf.write(u"\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2") + buf.write(u"\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2") + buf.write(u"\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2\u009f") + buf.write(u"\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2") + buf.write(u"\2\2\3\u00a7\3\2\2\2\5\u00b3\3\2\2\2\7\u00bf\3\2\2\2") + buf.write(u"\t\u00cd\3\2\2\2\13\u00db\3\2\2\2\r\u00e5\3\2\2\2\17") + buf.write(u"\u0106\3\2\2\2\21\u0115\3\2\2\2\23\u0117\3\2\2\2\25\u0149") + buf.write(u"\3\2\2\2\27\u014d\3\2\2\2\31\u0150\3\2\2\2\33\u0154\3") + buf.write(u"\2\2\2\35\u015f\3\2\2\2\37\u0164\3\2\2\2!\u016c\3\2\2") + buf.write(u"\2#\u0177\3\2\2\2%\u0180\3\2\2\2\'\u0185\3\2\2\2)\u0188") + buf.write(u"\3\2\2\2+\u018e\3\2\2\2-\u0193\3\2\2\2/\u019b\3\2\2\2") + buf.write(u"\61\u01a0\3\2\2\2\63\u01a6\3\2\2\2\65\u01ad\3\2\2\2\67") + buf.write(u"\u01b5\3\2\2\29\u01bb\3\2\2\2;\u01c2\3\2\2\2=\u01cc\3") + buf.write(u"\2\2\2?\u01d2\3\2\2\2A\u01d4\3\2\2\2C\u01d6\3\2\2\2E") + buf.write(u"\u01d9\3\2\2\2G\u01db\3\2\2\2I\u01de\3\2\2\2K\u01e0\3") + buf.write(u"\2\2\2M\u01e2\3\2\2\2O\u01e4\3\2\2\2Q\u01e6\3\2\2\2S") + buf.write(u"\u01e8\3\2\2\2U\u01ea\3\2\2\2W\u01ec\3\2\2\2Y\u01ee\3") + buf.write(u"\2\2\2[\u01f0\3\2\2\2]\u01f2\3\2\2\2_\u01f4\3\2\2\2a") + buf.write(u"\u01f6\3\2\2\2c\u01f8\3\2\2\2e\u01fa\3\2\2\2g\u01fc\3") + buf.write(u"\2\2\2i\u01fe\3\2\2\2k\u0200\3\2\2\2m\u0202\3\2\2\2o") + buf.write(u"\u0204\3\2\2\2q\u0206\3\2\2\2s\u0208\3\2\2\2u\u020a\3") + buf.write(u"\2\2\2w\u020c\3\2\2\2y\u020e\3\2\2\2{\u0210\3\2\2\2}") + buf.write(u"\u0212\3\2\2\2\177\u0214\3\2\2\2\u0081\u0216\3\2\2\2") + buf.write(u"\u0083\u0218\3\2\2\2\u0085\u021a\3\2\2\2\u0087\u021c") + buf.write(u"\3\2\2\2\u0089\u021e\3\2\2\2\u008b\u0220\3\2\2\2\u008d") + buf.write(u"\u0222\3\2\2\2\u008f\u0224\3\2\2\2\u0091\u0226\3\2\2") + buf.write(u"\2\u0093\u0228\3\2\2\2\u0095\u022a\3\2\2\2\u0097\u022c") + buf.write(u"\3\2\2\2\u0099\u022e\3\2\2\2\u009b\u0230\3\2\2\2\u009d") + buf.write(u"\u0233\3\2\2\2\u009f\u0236\3\2\2\2\u00a1\u023c\3\2\2") + buf.write(u"\2\u00a3\u024a\3\2\2\2\u00a5\u0255\3\2\2\2\u00a7\u00b0") + buf.write(u"\7/\2\2\u00a8\u00b1\7\62\2\2\u00a9\u00ad\t\2\2\2\u00aa") + buf.write(u"\u00ac\t\3\2\2\u00ab\u00aa\3\2\2\2\u00ac\u00af\3\2\2") + buf.write(u"\2\u00ad\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\u00b1") + buf.write(u"\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\u00a8\3\2\2\2\u00b0") + buf.write(u"\u00a9\3\2\2\2\u00b1\4\3\2\2\2\u00b2\u00b4\7-\2\2\u00b3") + buf.write(u"\u00b2\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00bd\3\2\2") + buf.write(u"\2\u00b5\u00be\7\62\2\2\u00b6\u00ba\t\4\2\2\u00b7\u00b9") + buf.write(u"\t\3\2\2\u00b8\u00b7\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba") + buf.write(u"\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb\u00be\3\2\2") + buf.write(u"\2\u00bc\u00ba\3\2\2\2\u00bd\u00b5\3\2\2\2\u00bd\u00b6") + buf.write(u"\3\2\2\2\u00be\6\3\2\2\2\u00bf\u00c3\7/\2\2\u00c0\u00c2") + buf.write(u"\t\3\2\2\u00c1\u00c0\3\2\2\2\u00c2\u00c5\3\2\2\2\u00c3") + buf.write(u"\u00c1\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c6\3\2\2") + buf.write(u"\2\u00c5\u00c3\3\2\2\2\u00c6\u00c8\7\60\2\2\u00c7\u00c9") + buf.write(u"\t\3\2\2\u00c8\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca") + buf.write(u"\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\b\3\2\2\2\u00cc") + buf.write(u"\u00ce\7-\2\2\u00cd\u00cc\3\2\2\2\u00cd\u00ce\3\2\2\2") + buf.write(u"\u00ce\u00d2\3\2\2\2\u00cf\u00d1\t\3\2\2\u00d0\u00cf") + buf.write(u"\3\2\2\2\u00d1\u00d4\3\2\2\2\u00d2\u00d0\3\2\2\2\u00d2") + buf.write(u"\u00d3\3\2\2\2\u00d3\u00d5\3\2\2\2\u00d4\u00d2\3\2\2") + buf.write(u"\2\u00d5\u00d7\7\60\2\2\u00d6\u00d8\t\3\2\2\u00d7\u00d6") + buf.write(u"\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00d7\3\2\2\2\u00d9") + buf.write(u"\u00da\3\2\2\2\u00da\n\3\2\2\2\u00db\u00dc\7j\2\2\u00dc") + buf.write(u"\u00e0\5I%\2\u00dd\u00df\5\u009bN\2\u00de\u00dd\3\2\2") + buf.write(u"\2\u00df\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1") + buf.write(u"\3\2\2\2\u00e1\u00e3\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3") + buf.write(u"\u00e4\5I%\2\u00e4\f\3\2\2\2\u00e5\u00e6\7d\2\2\u00e6") + buf.write(u"\u00ee\5I%\2\u00e7\u00e8\5\u009dO\2\u00e8\u00e9\5\u009d") + buf.write(u"O\2\u00e9\u00ea\5\u009dO\2\u00ea\u00eb\5\u009dO\2\u00eb") + buf.write(u"\u00ed\3\2\2\2\u00ec\u00e7\3\2\2\2\u00ed\u00f0\3\2\2") + buf.write(u"\2\u00ee\u00ec\3\2\2\2\u00ee\u00ef\3\2\2\2\u00ef\u0102") + buf.write(u"\3\2\2\2\u00f0\u00ee\3\2\2\2\u00f1\u00f2\5\u009dO\2\u00f2") + buf.write(u"\u00f3\5\u009dO\2\u00f3\u00f4\5\u009dO\2\u00f4\u00f5") + buf.write(u"\5\u009dO\2\u00f5\u0103\3\2\2\2\u00f6\u00f7\5\u009dO") + buf.write(u"\2\u00f7\u00f8\5\u009dO\2\u00f8\u00f9\5\u009dO\2\u00f9") + buf.write(u"\u00fa\3\2\2\2\u00fa\u00fb\7?\2\2\u00fb\u0103\3\2\2\2") + buf.write(u"\u00fc\u00fd\5\u009dO\2\u00fd\u00fe\5\u009dO\2\u00fe") + buf.write(u"\u00ff\3\2\2\2\u00ff\u0100\7?\2\2\u0100\u0101\7?\2\2") + buf.write(u"\u0101\u0103\3\2\2\2\u0102\u00f1\3\2\2\2\u0102\u00f6") + buf.write(u"\3\2\2\2\u0102\u00fc\3\2\2\2\u0103\u0104\3\2\2\2\u0104") + buf.write(u"\u0105\5I%\2\u0105\16\3\2\2\2\u0106\u010e\5I%\2\u0107") + buf.write(u"\u010d\n\5\2\2\u0108\u0109\7^\2\2\u0109\u010d\7)\2\2") + buf.write(u"\u010a\u010b\7^\2\2\u010b\u010d\7^\2\2\u010c\u0107\3") + buf.write(u"\2\2\2\u010c\u0108\3\2\2\2\u010c\u010a\3\2\2\2\u010d") + buf.write(u"\u0110\3\2\2\2\u010e\u010c\3\2\2\2\u010e\u010f\3\2\2") + buf.write(u"\2\u010f\u0111\3\2\2\2\u0110\u010e\3\2\2\2\u0111\u0112") + buf.write(u"\5I%\2\u0112\20\3\2\2\2\u0113\u0116\5/\30\2\u0114\u0116") + buf.write(u"\5\61\31\2\u0115\u0113\3\2\2\2\u0115\u0114\3\2\2\2\u0116") + buf.write(u"\22\3\2\2\2\u0117\u0118\7v\2\2\u0118\u0119\5I%\2\u0119") + buf.write(u"\u011a\t\3\2\2\u011a\u011b\t\3\2\2\u011b\u011c\t\3\2") + buf.write(u"\2\u011c\u011d\t\3\2\2\u011d\u0122\5[.\2\u011e\u011f") + buf.write(u"\7\62\2\2\u011f\u0123\t\6\2\2\u0120\u0121\7\63\2\2\u0121") + buf.write(u"\u0123\t\7\2\2\u0122\u011e\3\2\2\2\u0122\u0120\3\2\2") + buf.write(u"\2\u0123\u0124\3\2\2\2\u0124\u012b\5[.\2\u0125\u0126") + buf.write(u"\7\62\2\2\u0126\u012c\t\6\2\2\u0127\u0128\t\b\2\2\u0128") + buf.write(u"\u012c\t\3\2\2\u0129\u012a\7\65\2\2\u012a\u012c\t\t\2") + buf.write(u"\2\u012b\u0125\3\2\2\2\u012b\u0127\3\2\2\2\u012b\u0129") + buf.write(u"\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u0132\7V\2\2\u012e") + buf.write(u"\u012f\t\t\2\2\u012f\u0133\t\3\2\2\u0130\u0131\7\64\2") + buf.write(u"\2\u0131\u0133\t\n\2\2\u0132\u012e\3\2\2\2\u0132\u0130") + buf.write(u"\3\2\2\2\u0133\u0134\3\2\2\2\u0134\u0135\5K&\2\u0135") + buf.write(u"\u0136\t\13\2\2\u0136\u0137\t\3\2\2\u0137\u013c\5K&\2") + buf.write(u"\u0138\u0139\t\13\2\2\u0139\u013d\t\3\2\2\u013a\u013b") + buf.write(u"\78\2\2\u013b\u013d\7\62\2\2\u013c\u0138\3\2\2\2\u013c") + buf.write(u"\u013a\3\2\2\2\u013d\u0144\3\2\2\2\u013e\u0140\5M\'\2") + buf.write(u"\u013f\u0141\t\3\2\2\u0140\u013f\3\2\2\2\u0141\u0142") + buf.write(u"\3\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143") + buf.write(u"\u0145\3\2\2\2\u0144\u013e\3\2\2\2\u0144\u0145\3\2\2") + buf.write(u"\2\u0145\u0146\3\2\2\2\u0146\u0147\7\\\2\2\u0147\u0148") + buf.write(u"\5I%\2\u0148\24\3\2\2\2\u0149\u014a\5e\63\2\u014a\u014b") + buf.write(u"\5\177@\2\u014b\u014c\5k\66\2\u014c\26\3\2\2\2\u014d") + buf.write(u"\u014e\5\u0081A\2\u014e\u014f\5\u0087D\2\u014f\30\3\2") + buf.write(u"\2\2\u0150\u0151\5\177@\2\u0151\u0152\5\u0081A\2\u0152") + buf.write(u"\u0153\5\u008bF\2\u0153\32\3\2\2\2\u0154\u0155\5o8\2") + buf.write(u"\u0155\u0156\5\u0081A\2\u0156\u0157\5{>\2\u0157\u0158") + buf.write(u"\5{>\2\u0158\u0159\5\u0081A\2\u0159\u015a\5\u0091I\2") + buf.write(u"\u015a\u015b\5m\67\2\u015b\u015c\5k\66\2\u015c\u015d") + buf.write(u"\5g\64\2\u015d\u015e\5\u0095K\2\u015e\34\3\2\2\2\u015f") + buf.write(u"\u0160\5{>\2\u0160\u0161\5u;\2\u0161\u0162\5y=\2\u0162") + buf.write(u"\u0163\5m\67\2\u0163\36\3\2\2\2\u0164\u0165\5}?\2\u0165") + buf.write(u"\u0166\5e\63\2\u0166\u0167\5\u008bF\2\u0167\u0168\5i") + buf.write(u"\65\2\u0168\u0169\5s:\2\u0169\u016a\5m\67\2\u016a\u016b") + buf.write(u"\5\u0089E\2\u016b \3\2\2\2\u016c\u016d\5u;\2\u016d\u016e") + buf.write(u"\5\u0089E\2\u016e\u016f\5\u0089E\2\u016f\u0170\5\u008d") + buf.write(u"G\2\u0170\u0171\5\u0083B\2\u0171\u0172\5m\67\2\u0172") + buf.write(u"\u0173\5\u0087D\2\u0173\u0174\5\u0089E\2\u0174\u0175") + buf.write(u"\5m\67\2\u0175\u0176\5\u008bF\2\u0176\"\3\2\2\2\u0177") + buf.write(u"\u0178\5u;\2\u0178\u0179\5\u0089E\2\u0179\u017a\5\u0089") + buf.write(u"E\2\u017a\u017b\5\u008dG\2\u017b\u017c\5g\64\2\u017c") + buf.write(u"\u017d\5\u0089E\2\u017d\u017e\5m\67\2\u017e\u017f\5\u008b") + buf.write(u"F\2\u017f$\3\2\2\2\u0180\u0181\5{>\2\u0181\u0182\5e\63") + buf.write(u"\2\u0182\u0183\5\u0089E\2\u0183\u0184\5\u008bF\2\u0184") + buf.write(u"&\3\2\2\2\u0185\u0186\5u;\2\u0186\u0187\5\177@\2\u0187") + buf.write(u"(\3\2\2\2\u0188\u0189\5\u0089E\2\u0189\u018a\5\u008b") + buf.write(u"F\2\u018a\u018b\5e\63\2\u018b\u018c\5\u0087D\2\u018c") + buf.write(u"\u018d\5\u008bF\2\u018d*\3\2\2\2\u018e\u018f\5\u0089") + buf.write(u"E\2\u018f\u0190\5\u008bF\2\u0190\u0191\5\u0081A\2\u0191") + buf.write(u"\u0192\5\u0083B\2\u0192,\3\2\2\2\u0193\u0194\5\u0089") + buf.write(u"E\2\u0194\u0195\5m\67\2\u0195\u0196\5i\65\2\u0196\u0197") + buf.write(u"\5\u0081A\2\u0197\u0198\5\177@\2\u0198\u0199\5k\66\2") + buf.write(u"\u0199\u019a\5\u0089E\2\u019a.\3\2\2\2\u019b\u019c\5") + buf.write(u"\u008bF\2\u019c\u019d\5\u0087D\2\u019d\u019e\5\u008d") + buf.write(u"G\2\u019e\u019f\5m\67\2\u019f\60\3\2\2\2\u01a0\u01a1") + buf.write(u"\5o8\2\u01a1\u01a2\5e\63\2\u01a2\u01a3\5{>\2\u01a3\u01a4") + buf.write(u"\5\u0089E\2\u01a4\u01a5\5m\67\2\u01a5\62\3\2\2\2\u01a6") + buf.write(u"\u01a7\5\u0091I\2\u01a7\u01a8\5u;\2\u01a8\u01a9\5\u008b") + buf.write(u"F\2\u01a9\u01aa\5s:\2\u01aa\u01ab\5u;\2\u01ab\u01ac\5") + buf.write(u"\177@\2\u01ac\64\3\2\2\2\u01ad\u01ae\5\u0087D\2\u01ae") + buf.write(u"\u01af\5m\67\2\u01af\u01b0\5\u0083B\2\u01b0\u01b1\5m") + buf.write(u"\67\2\u01b1\u01b2\5e\63\2\u01b2\u01b3\5\u008bF\2\u01b3") + buf.write(u"\u01b4\5\u0089E\2\u01b4\66\3\2\2\2\u01b5\u01b6\5\u008b") + buf.write(u"F\2\u01b6\u01b7\5u;\2\u01b7\u01b8\5}?\2\u01b8\u01b9\5") + buf.write(u"m\67\2\u01b9\u01ba\5\u0089E\2\u01ba8\3\2\2\2\u01bb\u01bf") + buf.write(u"\t\f\2\2\u01bc\u01be\t\r\2\2\u01bd\u01bc\3\2\2\2\u01be") + buf.write(u"\u01c1\3\2\2\2\u01bf\u01bd\3\2\2\2\u01bf\u01c0\3\2\2") + buf.write(u"\2\u01c0:\3\2\2\2\u01c1\u01bf\3\2\2\2\u01c2\u01c6\t\f") + buf.write(u"\2\2\u01c3\u01c5\t\16\2\2\u01c4\u01c3\3\2\2\2\u01c5\u01c8") + buf.write(u"\3\2\2\2\u01c6\u01c4\3\2\2\2\u01c6\u01c7\3\2\2\2\u01c7") + buf.write(u"<\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c9\u01cd\7?\2\2\u01ca") + buf.write(u"\u01cb\7?\2\2\u01cb\u01cd\7?\2\2\u01cc\u01c9\3\2\2\2") + buf.write(u"\u01cc\u01ca\3\2\2\2\u01cd>\3\2\2\2\u01ce\u01cf\7#\2") + buf.write(u"\2\u01cf\u01d3\7?\2\2\u01d0\u01d1\7>\2\2\u01d1\u01d3") + buf.write(u"\7@\2\2\u01d2\u01ce\3\2\2\2\u01d2\u01d0\3\2\2\2\u01d3") + buf.write(u"@\3\2\2\2\u01d4\u01d5\7>\2\2\u01d5B\3\2\2\2\u01d6\u01d7") + buf.write(u"\7>\2\2\u01d7\u01d8\7?\2\2\u01d8D\3\2\2\2\u01d9\u01da") + buf.write(u"\7@\2\2\u01daF\3\2\2\2\u01db\u01dc\7@\2\2\u01dc\u01dd") + buf.write(u"\7?\2\2\u01ddH\3\2\2\2\u01de\u01df\7)\2\2\u01dfJ\3\2") + buf.write(u"\2\2\u01e0\u01e1\7<\2\2\u01e1L\3\2\2\2\u01e2\u01e3\7") + buf.write(u"\60\2\2\u01e3N\3\2\2\2\u01e4\u01e5\7.\2\2\u01e5P\3\2") + buf.write(u"\2\2\u01e6\u01e7\7+\2\2\u01e7R\3\2\2\2\u01e8\u01e9\7") + buf.write(u"*\2\2\u01e9T\3\2\2\2\u01ea\u01eb\7_\2\2\u01ebV\3\2\2") + buf.write(u"\2\u01ec\u01ed\7]\2\2\u01edX\3\2\2\2\u01ee\u01ef\7-\2") + buf.write(u"\2\u01efZ\3\2\2\2\u01f0\u01f1\5]/\2\u01f1\\\3\2\2\2\u01f2") + buf.write(u"\u01f3\7/\2\2\u01f3^\3\2\2\2\u01f4\u01f5\7`\2\2\u01f5") + buf.write(u"`\3\2\2\2\u01f6\u01f7\7\61\2\2\u01f7b\3\2\2\2\u01f8\u01f9") + buf.write(u"\7,\2\2\u01f9d\3\2\2\2\u01fa\u01fb\t\17\2\2\u01fbf\3") + buf.write(u"\2\2\2\u01fc\u01fd\t\20\2\2\u01fdh\3\2\2\2\u01fe\u01ff") + buf.write(u"\t\21\2\2\u01ffj\3\2\2\2\u0200\u0201\t\22\2\2\u0201l") + buf.write(u"\3\2\2\2\u0202\u0203\t\23\2\2\u0203n\3\2\2\2\u0204\u0205") + buf.write(u"\t\24\2\2\u0205p\3\2\2\2\u0206\u0207\t\25\2\2\u0207r") + buf.write(u"\3\2\2\2\u0208\u0209\t\26\2\2\u0209t\3\2\2\2\u020a\u020b") + buf.write(u"\t\27\2\2\u020bv\3\2\2\2\u020c\u020d\t\30\2\2\u020dx") + buf.write(u"\3\2\2\2\u020e\u020f\t\31\2\2\u020fz\3\2\2\2\u0210\u0211") + buf.write(u"\t\32\2\2\u0211|\3\2\2\2\u0212\u0213\t\33\2\2\u0213~") + buf.write(u"\3\2\2\2\u0214\u0215\t\34\2\2\u0215\u0080\3\2\2\2\u0216") + buf.write(u"\u0217\t\35\2\2\u0217\u0082\3\2\2\2\u0218\u0219\t\36") + buf.write(u"\2\2\u0219\u0084\3\2\2\2\u021a\u021b\t\37\2\2\u021b\u0086") + buf.write(u"\3\2\2\2\u021c\u021d\t \2\2\u021d\u0088\3\2\2\2\u021e") + buf.write(u"\u021f\t!\2\2\u021f\u008a\3\2\2\2\u0220\u0221\t\"\2\2") + buf.write(u"\u0221\u008c\3\2\2\2\u0222\u0223\t#\2\2\u0223\u008e\3") + buf.write(u"\2\2\2\u0224\u0225\t$\2\2\u0225\u0090\3\2\2\2\u0226\u0227") + buf.write(u"\t%\2\2\u0227\u0092\3\2\2\2\u0228\u0229\t&\2\2\u0229") + buf.write(u"\u0094\3\2\2\2\u022a\u022b\t\'\2\2\u022b\u0096\3\2\2") + buf.write(u"\2\u022c\u022d\t(\2\2\u022d\u0098\3\2\2\2\u022e\u022f") + buf.write(u"\t)\2\2\u022f\u009a\3\2\2\2\u0230\u0231\5\u0099M\2\u0231") + buf.write(u"\u0232\5\u0099M\2\u0232\u009c\3\2\2\2\u0233\u0234\t*") + buf.write(u"\2\2\u0234\u009e\3\2\2\2\u0235\u0237\t+\2\2\u0236\u0235") + buf.write(u"\3\2\2\2\u0237\u0238\3\2\2\2\u0238\u0236\3\2\2\2\u0238") + buf.write(u"\u0239\3\2\2\2\u0239\u023a\3\2\2\2\u023a\u023b\bP\2\2") + buf.write(u"\u023b\u00a0\3\2\2\2\u023c\u023d\7\61\2\2\u023d\u023e") + buf.write(u"\7,\2\2\u023e\u0242\3\2\2\2\u023f\u0241\13\2\2\2\u0240") + buf.write(u"\u023f\3\2\2\2\u0241\u0244\3\2\2\2\u0242\u0243\3\2\2") + buf.write(u"\2\u0242\u0240\3\2\2\2\u0243\u0245\3\2\2\2\u0244\u0242") + buf.write(u"\3\2\2\2\u0245\u0246\7,\2\2\u0246\u0247\7\61\2\2\u0247") + buf.write(u"\u0248\3\2\2\2\u0248\u0249\bQ\2\2\u0249\u00a2\3\2\2\2") + buf.write(u"\u024a\u024b\7\61\2\2\u024b\u024c\7\61\2\2\u024c\u0250") + buf.write(u"\3\2\2\2\u024d\u024f\n,\2\2\u024e\u024d\3\2\2\2\u024f") + buf.write(u"\u0252\3\2\2\2\u0250\u024e\3\2\2\2\u0250\u0251\3\2\2") + buf.write(u"\2\u0251\u0253\3\2\2\2\u0252\u0250\3\2\2\2\u0253\u0254") + buf.write(u"\bR\2\2\u0254\u00a4\3\2\2\2\u0255\u0256\13\2\2\2\u0256") + buf.write(u"\u00a6\3\2\2\2 \2\u00ad\u00b0\u00b3\u00ba\u00bd\u00c3") + buf.write(u"\u00ca\u00cd\u00d2\u00d9\u00e0\u00ee\u0102\u010c\u010e") + buf.write(u"\u0115\u0122\u012b\u0132\u013c\u0142\u0144\u01bf\u01c6") + buf.write(u"\u01cc\u01d2\u0238\u0242\u0250\3\b\2\2") return buf.getvalue() @@ -256,100 +298,103 @@ class STIXPatternLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) - decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] - IntLiteral = 1 - FloatLiteral = 2 - HexLiteral = 3 - BinaryLiteral = 4 - StringLiteral = 5 - BoolLiteral = 6 - TimestampLiteral = 7 - AND = 8 - OR = 9 - NOT = 10 - FOLLOWEDBY = 11 - LIKE = 12 - MATCHES = 13 - ISSUPERSET = 14 - ISSUBSET = 15 - LAST = 16 - IN = 17 - START = 18 - STOP = 19 - SECONDS = 20 - TRUE = 21 - FALSE = 22 - WITHIN = 23 - REPEATS = 24 - TIMES = 25 - IdentifierWithoutHyphen = 26 - IdentifierWithHyphen = 27 - EQ = 28 - NEQ = 29 - LT = 30 - LE = 31 - GT = 32 - GE = 33 - QUOTE = 34 - COLON = 35 - DOT = 36 - COMMA = 37 - RPAREN = 38 - LPAREN = 39 - RBRACK = 40 - LBRACK = 41 - PLUS = 42 - HYPHEN = 43 - MINUS = 44 - POWER_OP = 45 - DIVIDE = 46 - ASTERISK = 47 - WS = 48 - COMMENT = 49 - LINE_COMMENT = 50 + IntNegLiteral = 1 + IntPosLiteral = 2 + FloatNegLiteral = 3 + FloatPosLiteral = 4 + HexLiteral = 5 + BinaryLiteral = 6 + StringLiteral = 7 + BoolLiteral = 8 + TimestampLiteral = 9 + AND = 10 + OR = 11 + NOT = 12 + FOLLOWEDBY = 13 + LIKE = 14 + MATCHES = 15 + ISSUPERSET = 16 + ISSUBSET = 17 + LAST = 18 + IN = 19 + START = 20 + STOP = 21 + SECONDS = 22 + TRUE = 23 + FALSE = 24 + WITHIN = 25 + REPEATS = 26 + TIMES = 27 + IdentifierWithoutHyphen = 28 + IdentifierWithHyphen = 29 + EQ = 30 + NEQ = 31 + LT = 32 + LE = 33 + GT = 34 + GE = 35 + QUOTE = 36 + COLON = 37 + DOT = 38 + COMMA = 39 + RPAREN = 40 + LPAREN = 41 + RBRACK = 42 + LBRACK = 43 + PLUS = 44 + HYPHEN = 45 + MINUS = 46 + POWER_OP = 47 + DIVIDE = 48 + ASTERISK = 49 + WS = 50 + COMMENT = 51 + LINE_COMMENT = 52 + InvalidCharacter = 53 - channelNames = [u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"] + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] - modeNames = [u"DEFAULT_MODE"] + modeNames = [ u"DEFAULT_MODE" ] - literalNames = [u"<INVALID>", u"'<'", u"'<='", u"'>'", u"'>='", u"'''", - u"':'", u"'.'", u"','", u"')'", u"'('", u"']'", u"'['", - u"'+'", u"'-'", u"'^'", u"'/'", u"'*'"] + literalNames = [ u"<INVALID>", + u"'<'", u"'<='", u"'>'", u"'>='", u"'''", u"':'", u"'.'", u"','", + u"')'", u"'('", u"']'", u"'['", u"'+'", u"'-'", u"'^'", u"'/'", + u"'*'" ] - symbolicNames = [u"<INVALID>", u"IntLiteral", u"FloatLiteral", - u"HexLiteral", u"BinaryLiteral", u"StringLiteral", - u"BoolLiteral", u"TimestampLiteral", u"AND", u"OR", - u"NOT", u"FOLLOWEDBY", u"LIKE", u"MATCHES", u"ISSUPERSET", - u"ISSUBSET", u"LAST", u"IN", u"START", u"STOP", u"SECONDS", - u"TRUE", u"FALSE", u"WITHIN", u"REPEATS", u"TIMES", - u"IdentifierWithoutHyphen", u"IdentifierWithHyphen", - u"EQ", u"NEQ", u"LT", u"LE", u"GT", u"GE", u"QUOTE", - u"COLON", u"DOT", u"COMMA", u"RPAREN", u"LPAREN", - u"RBRACK", u"LBRACK", u"PLUS", u"HYPHEN", u"MINUS", - u"POWER_OP", u"DIVIDE", u"ASTERISK", u"WS", u"COMMENT", - u"LINE_COMMENT"] + symbolicNames = [ u"<INVALID>", + u"IntNegLiteral", u"IntPosLiteral", u"FloatNegLiteral", u"FloatPosLiteral", + u"HexLiteral", u"BinaryLiteral", u"StringLiteral", u"BoolLiteral", + u"TimestampLiteral", u"AND", u"OR", u"NOT", u"FOLLOWEDBY", u"LIKE", + u"MATCHES", u"ISSUPERSET", u"ISSUBSET", u"LAST", u"IN", u"START", + u"STOP", u"SECONDS", u"TRUE", u"FALSE", u"WITHIN", u"REPEATS", + u"TIMES", u"IdentifierWithoutHyphen", u"IdentifierWithHyphen", + u"EQ", u"NEQ", u"LT", u"LE", u"GT", u"GE", u"QUOTE", u"COLON", + u"DOT", u"COMMA", u"RPAREN", u"LPAREN", u"RBRACK", u"LBRACK", + u"PLUS", u"HYPHEN", u"MINUS", u"POWER_OP", u"DIVIDE", u"ASTERISK", + u"WS", u"COMMENT", u"LINE_COMMENT", u"InvalidCharacter" ] - ruleNames = [u"IntLiteral", u"FloatLiteral", u"HexLiteral", u"BinaryLiteral", - u"StringLiteral", u"BoolLiteral", u"TimestampLiteral", - u"AND", u"OR", u"NOT", u"FOLLOWEDBY", u"LIKE", u"MATCHES", - u"ISSUPERSET", u"ISSUBSET", u"LAST", u"IN", u"START", - u"STOP", u"SECONDS", u"TRUE", u"FALSE", u"WITHIN", u"REPEATS", - u"TIMES", u"IdentifierWithoutHyphen", u"IdentifierWithHyphen", - u"EQ", u"NEQ", u"LT", u"LE", u"GT", u"GE", u"QUOTE", u"COLON", - u"DOT", u"COMMA", u"RPAREN", u"LPAREN", u"RBRACK", u"LBRACK", - u"PLUS", u"HYPHEN", u"MINUS", u"POWER_OP", u"DIVIDE", - u"ASTERISK", u"A", u"B", u"C", u"D", u"E", u"F", u"G", - u"H", u"I", u"J", u"K", u"L", u"M", u"N", u"O", u"P", - u"Q", u"R", u"S", u"T", u"U", u"V", u"W", u"X", u"Y", - u"Z", u"HexDigit", u"TwoHexDigits", u"Base64Char", u"WS", - u"COMMENT", u"LINE_COMMENT"] + ruleNames = [ u"IntNegLiteral", u"IntPosLiteral", u"FloatNegLiteral", + u"FloatPosLiteral", u"HexLiteral", u"BinaryLiteral", u"StringLiteral", + u"BoolLiteral", u"TimestampLiteral", u"AND", u"OR", u"NOT", + u"FOLLOWEDBY", u"LIKE", u"MATCHES", u"ISSUPERSET", u"ISSUBSET", + u"LAST", u"IN", u"START", u"STOP", u"SECONDS", u"TRUE", + u"FALSE", u"WITHIN", u"REPEATS", u"TIMES", u"IdentifierWithoutHyphen", + u"IdentifierWithHyphen", u"EQ", u"NEQ", u"LT", u"LE", + u"GT", u"GE", u"QUOTE", u"COLON", u"DOT", u"COMMA", u"RPAREN", + u"LPAREN", u"RBRACK", u"LBRACK", u"PLUS", u"HYPHEN", u"MINUS", + u"POWER_OP", u"DIVIDE", u"ASTERISK", u"A", u"B", u"C", + u"D", u"E", u"F", u"G", u"H", u"I", u"J", u"K", u"L", + u"M", u"N", u"O", u"P", u"Q", u"R", u"S", u"T", u"U", + u"V", u"W", u"X", u"Y", u"Z", u"HexDigit", u"TwoHexDigits", + u"Base64Char", u"WS", u"COMMENT", u"LINE_COMMENT", u"InvalidCharacter" ] grammarFileName = u"STIXPattern.g4" def __init__(self, input=None, output=sys.stdout): super(STIXPatternLexer, self).__init__(input, output=output) - self.checkVersion("4.7") + self.checkVersion("4.7.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/stix2patterns/grammars/STIXPatternListener.py b/stix2patterns/grammars/STIXPatternListener.py index e8e17fd..e7f88d4 100644 --- a/stix2patterns/grammars/STIXPatternListener.py +++ b/stix2patterns/grammars/STIXPatternListener.py @@ -1,4 +1,5 @@ -# Generated from STIXPattern.g4 by ANTLR 4.7 +# Generated from STIXPattern.g4 by ANTLR 4.7.1 + from antlr4 import * @@ -13,6 +14,7 @@ class STIXPatternListener(ParseTreeListener): def exitPattern(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressions. def enterObservationExpressions(self, ctx): pass @@ -21,6 +23,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressions(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionOr. def enterObservationExpressionOr(self, ctx): pass @@ -29,6 +32,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionOr(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionAnd. def enterObservationExpressionAnd(self, ctx): pass @@ -37,6 +41,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionAnd(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionRepeated. def enterObservationExpressionRepeated(self, ctx): pass @@ -45,6 +50,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionRepeated(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionSimple. def enterObservationExpressionSimple(self, ctx): pass @@ -53,6 +59,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionSimple(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionCompound. def enterObservationExpressionCompound(self, ctx): pass @@ -61,6 +68,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionCompound(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionWithin. def enterObservationExpressionWithin(self, ctx): pass @@ -69,6 +77,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionWithin(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#observationExpressionStartStop. def enterObservationExpressionStartStop(self, ctx): pass @@ -77,6 +86,7 @@ class STIXPatternListener(ParseTreeListener): def exitObservationExpressionStartStop(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#comparisonExpression. def enterComparisonExpression(self, ctx): pass @@ -85,6 +95,7 @@ class STIXPatternListener(ParseTreeListener): def exitComparisonExpression(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#comparisonExpressionAnd. def enterComparisonExpressionAnd(self, ctx): pass @@ -93,6 +104,7 @@ class STIXPatternListener(ParseTreeListener): def exitComparisonExpressionAnd(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestEqual. def enterPropTestEqual(self, ctx): pass @@ -101,6 +113,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestEqual(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestOrder. def enterPropTestOrder(self, ctx): pass @@ -109,6 +122,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestOrder(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestSet. def enterPropTestSet(self, ctx): pass @@ -117,6 +131,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestSet(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestLike. def enterPropTestLike(self, ctx): pass @@ -125,6 +140,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestLike(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestRegex. def enterPropTestRegex(self, ctx): pass @@ -133,6 +149,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestRegex(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestIsSubset. def enterPropTestIsSubset(self, ctx): pass @@ -141,6 +158,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestIsSubset(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestIsSuperset. def enterPropTestIsSuperset(self, ctx): pass @@ -149,6 +167,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestIsSuperset(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#propTestParen. def enterPropTestParen(self, ctx): pass @@ -157,6 +176,7 @@ class STIXPatternListener(ParseTreeListener): def exitPropTestParen(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#startStopQualifier. def enterStartStopQualifier(self, ctx): pass @@ -165,6 +185,7 @@ class STIXPatternListener(ParseTreeListener): def exitStartStopQualifier(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#withinQualifier. def enterWithinQualifier(self, ctx): pass @@ -173,6 +194,7 @@ class STIXPatternListener(ParseTreeListener): def exitWithinQualifier(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#repeatedQualifier. def enterRepeatedQualifier(self, ctx): pass @@ -181,6 +203,7 @@ class STIXPatternListener(ParseTreeListener): def exitRepeatedQualifier(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#objectPath. def enterObjectPath(self, ctx): pass @@ -189,6 +212,7 @@ class STIXPatternListener(ParseTreeListener): def exitObjectPath(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#objectType. def enterObjectType(self, ctx): pass @@ -197,6 +221,7 @@ class STIXPatternListener(ParseTreeListener): def exitObjectType(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#firstPathComponent. def enterFirstPathComponent(self, ctx): pass @@ -205,6 +230,7 @@ class STIXPatternListener(ParseTreeListener): def exitFirstPathComponent(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#indexPathStep. def enterIndexPathStep(self, ctx): pass @@ -213,6 +239,7 @@ class STIXPatternListener(ParseTreeListener): def exitIndexPathStep(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#pathStep. def enterPathStep(self, ctx): pass @@ -221,6 +248,7 @@ class STIXPatternListener(ParseTreeListener): def exitPathStep(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#keyPathStep. def enterKeyPathStep(self, ctx): pass @@ -229,6 +257,7 @@ class STIXPatternListener(ParseTreeListener): def exitKeyPathStep(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#setLiteral. def enterSetLiteral(self, ctx): pass @@ -237,6 +266,7 @@ class STIXPatternListener(ParseTreeListener): def exitSetLiteral(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#primitiveLiteral. def enterPrimitiveLiteral(self, ctx): pass @@ -245,6 +275,7 @@ class STIXPatternListener(ParseTreeListener): def exitPrimitiveLiteral(self, ctx): pass + # Enter a parse tree produced by STIXPatternParser#orderableLiteral. def enterOrderableLiteral(self, ctx): pass diff --git a/stix2patterns/grammars/STIXPatternParser.py b/stix2patterns/grammars/STIXPatternParser.py index 2789809..c4d3e6d 100644 --- a/stix2patterns/grammars/STIXPatternParser.py +++ b/stix2patterns/grammars/STIXPatternParser.py @@ -1,4 +1,4 @@ -# Generated from STIXPattern.g4 by ANTLR 4.7 +# Generated from STIXPattern.g4 by ANTLR 4.7.1 # encoding: utf-8 from __future__ import print_function @@ -11,7 +11,7 @@ from antlr4 import * def serializedATN(): with StringIO() as buf: buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3") - buf.write(u"\64\u00e8\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write(u"\67\u00e8\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write(u"\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t") buf.write(u"\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22") buf.write(u"\4\23\t\23\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\7\3/\n\3\f") @@ -33,120 +33,117 @@ def serializedATN(): buf.write(u"\3\21\7\21\u00d9\n\21\f\21\16\21\u00dc\13\21\3\21\3\21") buf.write(u"\5\21\u00e0\n\21\3\22\3\22\5\22\u00e4\n\22\3\23\3\23") buf.write(u"\3\23\2\t\4\6\b\n\f\16\36\24\2\4\6\b\n\f\16\20\22\24") - buf.write(u"\26\30\32\34\36 \"$\2\t\3\2\36\37\3\2 #\3\2\3\4\3\2\34") - buf.write(u"\35\4\2\7\7\34\34\4\2\3\3\61\61\4\2\3\7\t\t\2\u00f2\2") - buf.write(u"&\3\2\2\2\4(\3\2\2\2\6\63\3\2\2\2\b>\3\2\2\2\nR\3\2\2") - buf.write(u"\2\f_\3\2\2\2\16j\3\2\2\2\20\u00aa\3\2\2\2\22\u00ac\3") - buf.write(u"\2\2\2\24\u00b1\3\2\2\2\26\u00b5\3\2\2\2\30\u00b9\3\2") - buf.write(u"\2\2\32\u00bf\3\2\2\2\34\u00c1\3\2\2\2\36\u00c9\3\2\2") - buf.write(u"\2 \u00df\3\2\2\2\"\u00e3\3\2\2\2$\u00e5\3\2\2\2&\'\5") - buf.write(u"\4\3\2\'\3\3\2\2\2()\b\3\1\2)*\5\6\4\2*\60\3\2\2\2+,") - buf.write(u"\f\4\2\2,-\7\r\2\2-/\5\4\3\5.+\3\2\2\2/\62\3\2\2\2\60") - buf.write(u".\3\2\2\2\60\61\3\2\2\2\61\5\3\2\2\2\62\60\3\2\2\2\63") - buf.write(u"\64\b\4\1\2\64\65\5\b\5\2\65;\3\2\2\2\66\67\f\4\2\2\67") - buf.write(u"8\7\13\2\28:\5\6\4\59\66\3\2\2\2:=\3\2\2\2;9\3\2\2\2") - buf.write(u";<\3\2\2\2<\7\3\2\2\2=;\3\2\2\2>?\b\5\1\2?@\5\n\6\2@") - buf.write(u"F\3\2\2\2AB\f\4\2\2BC\7\n\2\2CE\5\b\5\5DA\3\2\2\2EH\3") - buf.write(u"\2\2\2FD\3\2\2\2FG\3\2\2\2G\t\3\2\2\2HF\3\2\2\2IJ\b\6") - buf.write(u"\1\2JK\7+\2\2KL\5\f\7\2LM\7*\2\2MS\3\2\2\2NO\7)\2\2O") - buf.write(u"P\5\4\3\2PQ\7(\2\2QS\3\2\2\2RI\3\2\2\2RN\3\2\2\2S\\\3") - buf.write(u"\2\2\2TU\f\5\2\2U[\5\22\n\2VW\f\4\2\2W[\5\24\13\2XY\f") - buf.write(u"\3\2\2Y[\5\26\f\2ZT\3\2\2\2ZV\3\2\2\2ZX\3\2\2\2[^\3\2") - buf.write(u"\2\2\\Z\3\2\2\2\\]\3\2\2\2]\13\3\2\2\2^\\\3\2\2\2_`\b") - buf.write(u"\7\1\2`a\5\16\b\2ag\3\2\2\2bc\f\4\2\2cd\7\13\2\2df\5") - buf.write(u"\f\7\5eb\3\2\2\2fi\3\2\2\2ge\3\2\2\2gh\3\2\2\2h\r\3\2") - buf.write(u"\2\2ig\3\2\2\2jk\b\b\1\2kl\5\20\t\2lr\3\2\2\2mn\f\4\2") - buf.write(u"\2no\7\n\2\2oq\5\16\b\5pm\3\2\2\2qt\3\2\2\2rp\3\2\2\2") - buf.write(u"rs\3\2\2\2s\17\3\2\2\2tr\3\2\2\2uw\5\30\r\2vx\7\f\2\2") - buf.write(u"wv\3\2\2\2wx\3\2\2\2xy\3\2\2\2yz\t\2\2\2z{\5\"\22\2{") - buf.write(u"\u00ab\3\2\2\2|~\5\30\r\2}\177\7\f\2\2~}\3\2\2\2~\177") - buf.write(u"\3\2\2\2\177\u0080\3\2\2\2\u0080\u0081\t\3\2\2\u0081") + buf.write(u"\26\30\32\34\36 \"$\2\t\3\2 !\3\2\"%\4\2\4\4\6\6\3\2") + buf.write(u"\36\37\4\2\t\t\36\36\4\2\3\4\63\63\4\2\3\t\13\13\2\u00f2") + buf.write(u"\2&\3\2\2\2\4(\3\2\2\2\6\63\3\2\2\2\b>\3\2\2\2\nR\3\2") + buf.write(u"\2\2\f_\3\2\2\2\16j\3\2\2\2\20\u00aa\3\2\2\2\22\u00ac") + buf.write(u"\3\2\2\2\24\u00b1\3\2\2\2\26\u00b5\3\2\2\2\30\u00b9\3") + buf.write(u"\2\2\2\32\u00bf\3\2\2\2\34\u00c1\3\2\2\2\36\u00c9\3\2") + buf.write(u"\2\2 \u00df\3\2\2\2\"\u00e3\3\2\2\2$\u00e5\3\2\2\2&\'") + buf.write(u"\5\4\3\2\'\3\3\2\2\2()\b\3\1\2)*\5\6\4\2*\60\3\2\2\2") + buf.write(u"+,\f\4\2\2,-\7\17\2\2-/\5\4\3\5.+\3\2\2\2/\62\3\2\2\2") + buf.write(u"\60.\3\2\2\2\60\61\3\2\2\2\61\5\3\2\2\2\62\60\3\2\2\2") + buf.write(u"\63\64\b\4\1\2\64\65\5\b\5\2\65;\3\2\2\2\66\67\f\4\2") + buf.write(u"\2\678\7\r\2\28:\5\6\4\59\66\3\2\2\2:=\3\2\2\2;9\3\2") + buf.write(u"\2\2;<\3\2\2\2<\7\3\2\2\2=;\3\2\2\2>?\b\5\1\2?@\5\n\6") + buf.write(u"\2@F\3\2\2\2AB\f\4\2\2BC\7\f\2\2CE\5\b\5\5DA\3\2\2\2") + buf.write(u"EH\3\2\2\2FD\3\2\2\2FG\3\2\2\2G\t\3\2\2\2HF\3\2\2\2I") + buf.write(u"J\b\6\1\2JK\7-\2\2KL\5\f\7\2LM\7,\2\2MS\3\2\2\2NO\7+") + buf.write(u"\2\2OP\5\4\3\2PQ\7*\2\2QS\3\2\2\2RI\3\2\2\2RN\3\2\2\2") + buf.write(u"S\\\3\2\2\2TU\f\5\2\2U[\5\22\n\2VW\f\4\2\2W[\5\24\13") + buf.write(u"\2XY\f\3\2\2Y[\5\26\f\2ZT\3\2\2\2ZV\3\2\2\2ZX\3\2\2\2") + buf.write(u"[^\3\2\2\2\\Z\3\2\2\2\\]\3\2\2\2]\13\3\2\2\2^\\\3\2\2") + buf.write(u"\2_`\b\7\1\2`a\5\16\b\2ag\3\2\2\2bc\f\4\2\2cd\7\r\2\2") + buf.write(u"df\5\f\7\5eb\3\2\2\2fi\3\2\2\2ge\3\2\2\2gh\3\2\2\2h\r") + buf.write(u"\3\2\2\2ig\3\2\2\2jk\b\b\1\2kl\5\20\t\2lr\3\2\2\2mn\f") + buf.write(u"\4\2\2no\7\f\2\2oq\5\16\b\5pm\3\2\2\2qt\3\2\2\2rp\3\2") + buf.write(u"\2\2rs\3\2\2\2s\17\3\2\2\2tr\3\2\2\2uw\5\30\r\2vx\7\16") + buf.write(u"\2\2wv\3\2\2\2wx\3\2\2\2xy\3\2\2\2yz\t\2\2\2z{\5\"\22") + buf.write(u"\2{\u00ab\3\2\2\2|~\5\30\r\2}\177\7\16\2\2~}\3\2\2\2") + buf.write(u"~\177\3\2\2\2\177\u0080\3\2\2\2\u0080\u0081\t\3\2\2\u0081") buf.write(u"\u0082\5$\23\2\u0082\u00ab\3\2\2\2\u0083\u0085\5\30\r") - buf.write(u"\2\u0084\u0086\7\f\2\2\u0085\u0084\3\2\2\2\u0085\u0086") - buf.write(u"\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\7\23\2\2\u0088") + buf.write(u"\2\u0084\u0086\7\16\2\2\u0085\u0084\3\2\2\2\u0085\u0086") + buf.write(u"\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\7\25\2\2\u0088") buf.write(u"\u0089\5 \21\2\u0089\u00ab\3\2\2\2\u008a\u008c\5\30\r") - buf.write(u"\2\u008b\u008d\7\f\2\2\u008c\u008b\3\2\2\2\u008c\u008d") - buf.write(u"\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u008f\7\16\2\2\u008f") - buf.write(u"\u0090\7\7\2\2\u0090\u00ab\3\2\2\2\u0091\u0093\5\30\r") - buf.write(u"\2\u0092\u0094\7\f\2\2\u0093\u0092\3\2\2\2\u0093\u0094") - buf.write(u"\3\2\2\2\u0094\u0095\3\2\2\2\u0095\u0096\7\17\2\2\u0096") - buf.write(u"\u0097\7\7\2\2\u0097\u00ab\3\2\2\2\u0098\u009a\5\30\r") - buf.write(u"\2\u0099\u009b\7\f\2\2\u009a\u0099\3\2\2\2\u009a\u009b") - buf.write(u"\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\7\21\2\2\u009d") - buf.write(u"\u009e\7\7\2\2\u009e\u00ab\3\2\2\2\u009f\u00a1\5\30\r") - buf.write(u"\2\u00a0\u00a2\7\f\2\2\u00a1\u00a0\3\2\2\2\u00a1\u00a2") - buf.write(u"\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00a4\7\20\2\2\u00a4") - buf.write(u"\u00a5\7\7\2\2\u00a5\u00ab\3\2\2\2\u00a6\u00a7\7)\2\2") - buf.write(u"\u00a7\u00a8\5\f\7\2\u00a8\u00a9\7(\2\2\u00a9\u00ab\3") + buf.write(u"\2\u008b\u008d\7\16\2\2\u008c\u008b\3\2\2\2\u008c\u008d") + buf.write(u"\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u008f\7\20\2\2\u008f") + buf.write(u"\u0090\7\t\2\2\u0090\u00ab\3\2\2\2\u0091\u0093\5\30\r") + buf.write(u"\2\u0092\u0094\7\16\2\2\u0093\u0092\3\2\2\2\u0093\u0094") + buf.write(u"\3\2\2\2\u0094\u0095\3\2\2\2\u0095\u0096\7\21\2\2\u0096") + buf.write(u"\u0097\7\t\2\2\u0097\u00ab\3\2\2\2\u0098\u009a\5\30\r") + buf.write(u"\2\u0099\u009b\7\16\2\2\u009a\u0099\3\2\2\2\u009a\u009b") + buf.write(u"\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\7\23\2\2\u009d") + buf.write(u"\u009e\7\t\2\2\u009e\u00ab\3\2\2\2\u009f\u00a1\5\30\r") + buf.write(u"\2\u00a0\u00a2\7\16\2\2\u00a1\u00a0\3\2\2\2\u00a1\u00a2") + buf.write(u"\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00a4\7\22\2\2\u00a4") + buf.write(u"\u00a5\7\t\2\2\u00a5\u00ab\3\2\2\2\u00a6\u00a7\7+\2\2") + buf.write(u"\u00a7\u00a8\5\f\7\2\u00a8\u00a9\7*\2\2\u00a9\u00ab\3") buf.write(u"\2\2\2\u00aau\3\2\2\2\u00aa|\3\2\2\2\u00aa\u0083\3\2") buf.write(u"\2\2\u00aa\u008a\3\2\2\2\u00aa\u0091\3\2\2\2\u00aa\u0098") buf.write(u"\3\2\2\2\u00aa\u009f\3\2\2\2\u00aa\u00a6\3\2\2\2\u00ab") - buf.write(u"\21\3\2\2\2\u00ac\u00ad\7\24\2\2\u00ad\u00ae\7\7\2\2") - buf.write(u"\u00ae\u00af\7\25\2\2\u00af\u00b0\7\7\2\2\u00b0\23\3") - buf.write(u"\2\2\2\u00b1\u00b2\7\31\2\2\u00b2\u00b3\t\4\2\2\u00b3") - buf.write(u"\u00b4\7\26\2\2\u00b4\25\3\2\2\2\u00b5\u00b6\7\32\2\2") - buf.write(u"\u00b6\u00b7\7\3\2\2\u00b7\u00b8\7\33\2\2\u00b8\27\3") - buf.write(u"\2\2\2\u00b9\u00ba\5\32\16\2\u00ba\u00bb\7%\2\2\u00bb") + buf.write(u"\21\3\2\2\2\u00ac\u00ad\7\26\2\2\u00ad\u00ae\7\t\2\2") + buf.write(u"\u00ae\u00af\7\27\2\2\u00af\u00b0\7\t\2\2\u00b0\23\3") + buf.write(u"\2\2\2\u00b1\u00b2\7\33\2\2\u00b2\u00b3\t\4\2\2\u00b3") + buf.write(u"\u00b4\7\30\2\2\u00b4\25\3\2\2\2\u00b5\u00b6\7\34\2\2") + buf.write(u"\u00b6\u00b7\7\4\2\2\u00b7\u00b8\7\35\2\2\u00b8\27\3") + buf.write(u"\2\2\2\u00b9\u00ba\5\32\16\2\u00ba\u00bb\7\'\2\2\u00bb") buf.write(u"\u00bd\5\34\17\2\u00bc\u00be\5\36\20\2\u00bd\u00bc\3") buf.write(u"\2\2\2\u00bd\u00be\3\2\2\2\u00be\31\3\2\2\2\u00bf\u00c0") buf.write(u"\t\5\2\2\u00c0\33\3\2\2\2\u00c1\u00c2\t\6\2\2\u00c2\35") - buf.write(u"\3\2\2\2\u00c3\u00c4\b\20\1\2\u00c4\u00c5\7&\2\2\u00c5") - buf.write(u"\u00ca\t\6\2\2\u00c6\u00c7\7+\2\2\u00c7\u00c8\t\7\2\2") - buf.write(u"\u00c8\u00ca\7*\2\2\u00c9\u00c3\3\2\2\2\u00c9\u00c6\3") + buf.write(u"\3\2\2\2\u00c3\u00c4\b\20\1\2\u00c4\u00c5\7(\2\2\u00c5") + buf.write(u"\u00ca\t\6\2\2\u00c6\u00c7\7-\2\2\u00c7\u00c8\t\7\2\2") + buf.write(u"\u00c8\u00ca\7,\2\2\u00c9\u00c3\3\2\2\2\u00c9\u00c6\3") buf.write(u"\2\2\2\u00ca\u00cf\3\2\2\2\u00cb\u00cc\f\5\2\2\u00cc") buf.write(u"\u00ce\5\36\20\6\u00cd\u00cb\3\2\2\2\u00ce\u00d1\3\2") buf.write(u"\2\2\u00cf\u00cd\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0\37") - buf.write(u"\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d2\u00d3\7)\2\2\u00d3") - buf.write(u"\u00e0\7(\2\2\u00d4\u00d5\7)\2\2\u00d5\u00da\5\"\22\2") - buf.write(u"\u00d6\u00d7\7\'\2\2\u00d7\u00d9\5\"\22\2\u00d8\u00d6") + buf.write(u"\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d2\u00d3\7+\2\2\u00d3") + buf.write(u"\u00e0\7*\2\2\u00d4\u00d5\7+\2\2\u00d5\u00da\5\"\22\2") + buf.write(u"\u00d6\u00d7\7)\2\2\u00d7\u00d9\5\"\22\2\u00d8\u00d6") buf.write(u"\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3\2\2\2\u00da") buf.write(u"\u00db\3\2\2\2\u00db\u00dd\3\2\2\2\u00dc\u00da\3\2\2") - buf.write(u"\2\u00dd\u00de\7(\2\2\u00de\u00e0\3\2\2\2\u00df\u00d2") + buf.write(u"\2\u00dd\u00de\7*\2\2\u00de\u00e0\3\2\2\2\u00df\u00d2") buf.write(u"\3\2\2\2\u00df\u00d4\3\2\2\2\u00e0!\3\2\2\2\u00e1\u00e4") - buf.write(u"\5$\23\2\u00e2\u00e4\7\b\2\2\u00e3\u00e1\3\2\2\2\u00e3") + buf.write(u"\5$\23\2\u00e2\u00e4\7\n\2\2\u00e3\u00e1\3\2\2\2\u00e3") buf.write(u"\u00e2\3\2\2\2\u00e4#\3\2\2\2\u00e5\u00e6\t\b\2\2\u00e6") buf.write(u"%\3\2\2\2\30\60;FRZ\\grw~\u0085\u008c\u0093\u009a\u00a1") buf.write(u"\u00aa\u00bd\u00c9\u00cf\u00da\u00df\u00e3") return buf.getvalue() -class STIXPatternParser(Parser): +class STIXPatternParser ( Parser ): grammarFileName = "STIXPattern.g4" atn = ATNDeserializer().deserialize(serializedATN()) - decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] sharedContextCache = PredictionContextCache() - literalNames = [ - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", - u"<INVALID>", u"<INVALID>", u"'<'", u"'<='", u"'>'", - u"'>='", u"'''", u"':'", u"'.'", u"','", u"')'", u"'('", - u"']'", u"'['", u"'+'", u"<INVALID>", u"'-'", u"'^'", - u"'/'", u"'*'" - ] - - symbolicNames = [ - u"<INVALID>", u"IntLiteral", u"FloatLiteral", u"HexLiteral", - u"BinaryLiteral", u"StringLiteral", u"BoolLiteral", - u"TimestampLiteral", u"AND", u"OR", u"NOT", u"FOLLOWEDBY", - u"LIKE", u"MATCHES", u"ISSUPERSET", u"ISSUBSET", u"LAST", - u"IN", u"START", u"STOP", u"SECONDS", u"TRUE", u"FALSE", - u"WITHIN", u"REPEATS", u"TIMES", u"IdentifierWithoutHyphen", - u"IdentifierWithHyphen", u"EQ", u"NEQ", u"LT", u"LE", - u"GT", u"GE", u"QUOTE", u"COLON", u"DOT", u"COMMA", - u"RPAREN", u"LPAREN", u"RBRACK", u"LBRACK", u"PLUS", - u"HYPHEN", u"MINUS", u"POWER_OP", u"DIVIDE", u"ASTERISK", - u"WS", u"COMMENT", u"LINE_COMMENT" - ] + literalNames = [ u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>", + u"'<'", u"'<='", u"'>'", u"'>='", u"'''", u"':'", u"'.'", + u"','", u"')'", u"'('", u"']'", u"'['", u"'+'", u"<INVALID>", + u"'-'", u"'^'", u"'/'", u"'*'" ] + + symbolicNames = [ u"<INVALID>", u"IntNegLiteral", u"IntPosLiteral", + u"FloatNegLiteral", u"FloatPosLiteral", u"HexLiteral", + u"BinaryLiteral", u"StringLiteral", u"BoolLiteral", + u"TimestampLiteral", u"AND", u"OR", u"NOT", u"FOLLOWEDBY", + u"LIKE", u"MATCHES", u"ISSUPERSET", u"ISSUBSET", u"LAST", + u"IN", u"START", u"STOP", u"SECONDS", u"TRUE", u"FALSE", + u"WITHIN", u"REPEATS", u"TIMES", u"IdentifierWithoutHyphen", + u"IdentifierWithHyphen", u"EQ", u"NEQ", u"LT", u"LE", + u"GT", u"GE", u"QUOTE", u"COLON", u"DOT", u"COMMA", + u"RPAREN", u"LPAREN", u"RBRACK", u"LBRACK", u"PLUS", + u"HYPHEN", u"MINUS", u"POWER_OP", u"DIVIDE", u"ASTERISK", + u"WS", u"COMMENT", u"LINE_COMMENT", u"InvalidCharacter" ] RULE_pattern = 0 RULE_observationExpressions = 1 @@ -167,74 +164,77 @@ class STIXPatternParser(Parser): RULE_primitiveLiteral = 16 RULE_orderableLiteral = 17 - ruleNames = [ - u"pattern", u"observationExpressions", u"observationExpressionOr", - u"observationExpressionAnd", u"observationExpression", - u"comparisonExpression", u"comparisonExpressionAnd", - u"propTest", u"startStopQualifier", u"withinQualifier", - u"repeatedQualifier", u"objectPath", u"objectType", u"firstPathComponent", - u"objectPathComponent", u"setLiteral", u"primitiveLiteral", - u"orderableLiteral" - ] + ruleNames = [ u"pattern", u"observationExpressions", u"observationExpressionOr", + u"observationExpressionAnd", u"observationExpression", + u"comparisonExpression", u"comparisonExpressionAnd", + u"propTest", u"startStopQualifier", u"withinQualifier", + u"repeatedQualifier", u"objectPath", u"objectType", u"firstPathComponent", + u"objectPathComponent", u"setLiteral", u"primitiveLiteral", + u"orderableLiteral" ] EOF = Token.EOF - IntLiteral = 1 - FloatLiteral = 2 - HexLiteral = 3 - BinaryLiteral = 4 - StringLiteral = 5 - BoolLiteral = 6 - TimestampLiteral = 7 - AND = 8 - OR = 9 - NOT = 10 - FOLLOWEDBY = 11 - LIKE = 12 - MATCHES = 13 - ISSUPERSET = 14 - ISSUBSET = 15 - LAST = 16 - IN = 17 - START = 18 - STOP = 19 - SECONDS = 20 - TRUE = 21 - FALSE = 22 - WITHIN = 23 - REPEATS = 24 - TIMES = 25 - IdentifierWithoutHyphen = 26 - IdentifierWithHyphen = 27 - EQ = 28 - NEQ = 29 - LT = 30 - LE = 31 - GT = 32 - GE = 33 - QUOTE = 34 - COLON = 35 - DOT = 36 - COMMA = 37 - RPAREN = 38 - LPAREN = 39 - RBRACK = 40 - LBRACK = 41 - PLUS = 42 - HYPHEN = 43 - MINUS = 44 - POWER_OP = 45 - DIVIDE = 46 - ASTERISK = 47 - WS = 48 - COMMENT = 49 - LINE_COMMENT = 50 + IntNegLiteral=1 + IntPosLiteral=2 + FloatNegLiteral=3 + FloatPosLiteral=4 + HexLiteral=5 + BinaryLiteral=6 + StringLiteral=7 + BoolLiteral=8 + TimestampLiteral=9 + AND=10 + OR=11 + NOT=12 + FOLLOWEDBY=13 + LIKE=14 + MATCHES=15 + ISSUPERSET=16 + ISSUBSET=17 + LAST=18 + IN=19 + START=20 + STOP=21 + SECONDS=22 + TRUE=23 + FALSE=24 + WITHIN=25 + REPEATS=26 + TIMES=27 + IdentifierWithoutHyphen=28 + IdentifierWithHyphen=29 + EQ=30 + NEQ=31 + LT=32 + LE=33 + GT=34 + GE=35 + QUOTE=36 + COLON=37 + DOT=38 + COMMA=39 + RPAREN=40 + LPAREN=41 + RBRACK=42 + LBRACK=43 + PLUS=44 + HYPHEN=45 + MINUS=46 + POWER_OP=47 + DIVIDE=48 + ASTERISK=49 + WS=50 + COMMENT=51 + LINE_COMMENT=52 + InvalidCharacter=53 def __init__(self, input, output=sys.stdout): super(STIXPatternParser, self).__init__(input, output=output) - self.checkVersion("4.7") + self.checkVersion("4.7.1") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None + + class PatternContext(ParserRuleContext): def __init__(self, parser, parent=None, invokingState=-1): @@ -242,7 +242,8 @@ class STIXPatternParser(Parser): self.parser = parser def observationExpressions(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext,0) + def getRuleIndex(self): return STIXPatternParser.RULE_pattern @@ -255,7 +256,11 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPattern"): listener.exitPattern(self) + + + def pattern(self): + localctx = STIXPatternParser.PatternContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_pattern) try: @@ -277,13 +282,15 @@ class STIXPatternParser(Parser): self.parser = parser def observationExpressionOr(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionOrContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionOrContext,0) + def observationExpressions(self, i=None): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ObservationExpressionsContext) else: - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext, i) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext,i) + def FOLLOWEDBY(self): return self.getToken(STIXPatternParser.FOLLOWEDBY, 0) @@ -299,10 +306,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressions"): listener.exitObservationExpressions(self) + + def observationExpressions(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ObservationExpressionsContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 2 self.enterRecursionRule(localctx, 2, self.RULE_observationExpressions, _p) try: @@ -312,11 +322,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 46 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 0, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,0,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.ObservationExpressionsContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpressions) self.state = 41 @@ -329,7 +340,7 @@ class STIXPatternParser(Parser): self.observationExpressions(3) self.state = 48 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 0, self._ctx) + _alt = self._interp.adaptivePredict(self._input,0,self._ctx) except RecognitionException as re: localctx.exception = re @@ -346,13 +357,15 @@ class STIXPatternParser(Parser): self.parser = parser def observationExpressionAnd(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionAndContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionAndContext,0) + def observationExpressionOr(self, i=None): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ObservationExpressionOrContext) else: - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionOrContext, i) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionOrContext,i) + def OR(self): return self.getToken(STIXPatternParser.OR, 0) @@ -368,10 +381,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionOr"): listener.exitObservationExpressionOr(self) + + def observationExpressionOr(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ObservationExpressionOrContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 4 self.enterRecursionRule(localctx, 4, self.RULE_observationExpressionOr, _p) try: @@ -381,11 +397,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 57 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 1, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.ObservationExpressionOrContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpressionOr) self.state = 52 @@ -398,7 +415,8 @@ class STIXPatternParser(Parser): self.observationExpressionOr(3) self.state = 59 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 1, self._ctx) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -414,13 +432,15 @@ class STIXPatternParser(Parser): self.parser = parser def observationExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext,0) + def observationExpressionAnd(self, i=None): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ObservationExpressionAndContext) else: - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionAndContext, i) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionAndContext,i) + def AND(self): return self.getToken(STIXPatternParser.AND, 0) @@ -436,10 +456,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionAnd"): listener.exitObservationExpressionAnd(self) + + def observationExpressionAnd(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ObservationExpressionAndContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 6 self.enterRecursionRule(localctx, 6, self.RULE_observationExpressionAnd, _p) try: @@ -449,11 +472,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 68 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 2, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.ObservationExpressionAndContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpressionAnd) self.state = 63 @@ -466,7 +490,8 @@ class STIXPatternParser(Parser): self.observationExpressionAnd(3) self.state = 70 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 2, self._ctx) + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -481,23 +506,27 @@ class STIXPatternParser(Parser): super(STIXPatternParser.ObservationExpressionContext, self).__init__(parent, invokingState) self.parser = parser + def getRuleIndex(self): return STIXPatternParser.RULE_observationExpression + def copyFrom(self, ctx): super(STIXPatternParser.ObservationExpressionContext, self).copyFrom(ctx) + class ObservationExpressionRepeatedContext(ObservationExpressionContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) super(STIXPatternParser.ObservationExpressionRepeatedContext, self).__init__(parser) self.copyFrom(ctx) def observationExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext,0) def repeatedQualifier(self): - return self.getTypedRuleContext(STIXPatternParser.RepeatedQualifierContext, 0) + return self.getTypedRuleContext(STIXPatternParser.RepeatedQualifierContext,0) + def enterRule(self, listener): if hasattr(listener, "enterObservationExpressionRepeated"): @@ -507,17 +536,17 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionRepeated"): listener.exitObservationExpressionRepeated(self) + class ObservationExpressionSimpleContext(ObservationExpressionContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) super(STIXPatternParser.ObservationExpressionSimpleContext, self).__init__(parser) self.copyFrom(ctx) def LBRACK(self): return self.getToken(STIXPatternParser.LBRACK, 0) - def comparisonExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext,0) def RBRACK(self): return self.getToken(STIXPatternParser.RBRACK, 0) @@ -530,17 +559,17 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionSimple"): listener.exitObservationExpressionSimple(self) + class ObservationExpressionCompoundContext(ObservationExpressionContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) super(STIXPatternParser.ObservationExpressionCompoundContext, self).__init__(parser) self.copyFrom(ctx) def LPAREN(self): return self.getToken(STIXPatternParser.LPAREN, 0) - def observationExpressions(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionsContext,0) def RPAREN(self): return self.getToken(STIXPatternParser.RPAREN, 0) @@ -553,17 +582,19 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionCompound"): listener.exitObservationExpressionCompound(self) + class ObservationExpressionWithinContext(ObservationExpressionContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) super(STIXPatternParser.ObservationExpressionWithinContext, self).__init__(parser) self.copyFrom(ctx) def observationExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext,0) def withinQualifier(self): - return self.getTypedRuleContext(STIXPatternParser.WithinQualifierContext, 0) + return self.getTypedRuleContext(STIXPatternParser.WithinQualifierContext,0) + def enterRule(self, listener): if hasattr(listener, "enterObservationExpressionWithin"): @@ -573,17 +604,19 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionWithin"): listener.exitObservationExpressionWithin(self) + class ObservationExpressionStartStopContext(ObservationExpressionContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObservationExpressionContext) super(STIXPatternParser.ObservationExpressionStartStopContext, self).__init__(parser) self.copyFrom(ctx) def observationExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObservationExpressionContext,0) def startStopQualifier(self): - return self.getTypedRuleContext(STIXPatternParser.StartStopQualifierContext, 0) + return self.getTypedRuleContext(STIXPatternParser.StartStopQualifierContext,0) + def enterRule(self, listener): if hasattr(listener, "enterObservationExpressionStartStop"): @@ -593,10 +626,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObservationExpressionStartStop"): listener.exitObservationExpressionStartStop(self) + + def observationExpression(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ObservationExpressionContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 8 self.enterRecursionRule(localctx, 8, self.RULE_observationExpression, _p) try: @@ -607,6 +643,7 @@ class STIXPatternParser(Parser): if token in [STIXPatternParser.LBRACK]: localctx = STIXPatternParser.ObservationExpressionSimpleContext(self, localctx) self._ctx = localctx + _prevctx = localctx self.state = 72 self.match(STIXPatternParser.LBRACK) @@ -618,6 +655,7 @@ class STIXPatternParser(Parser): elif token in [STIXPatternParser.LPAREN]: localctx = STIXPatternParser.ObservationExpressionCompoundContext(self, localctx) self._ctx = localctx + _prevctx = localctx self.state = 76 self.match(STIXPatternParser.LPAREN) self.state = 77 @@ -631,17 +669,17 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 90 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 5, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,5,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx self.state = 88 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 4, self._ctx) + la_ = self._interp.adaptivePredict(self._input,4,self._ctx) if la_ == 1: - localctx = STIXPatternParser.\ - ObservationExpressionStartStopContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) + localctx = STIXPatternParser.ObservationExpressionStartStopContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpression) self.state = 82 if not self.precpred(self._ctx, 3): @@ -652,8 +690,7 @@ class STIXPatternParser(Parser): pass elif la_ == 2: - localctx = STIXPatternParser.\ - ObservationExpressionWithinContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) + localctx = STIXPatternParser.ObservationExpressionWithinContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpression) self.state = 84 if not self.precpred(self._ctx, 2): @@ -664,8 +701,7 @@ class STIXPatternParser(Parser): pass elif la_ == 3: - localctx = STIXPatternParser.\ - ObservationExpressionRepeatedContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) + localctx = STIXPatternParser.ObservationExpressionRepeatedContext(self, STIXPatternParser.ObservationExpressionContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_observationExpression) self.state = 86 if not self.precpred(self._ctx, 1): @@ -675,9 +711,10 @@ class STIXPatternParser(Parser): self.repeatedQualifier() pass + self.state = 92 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 5, self._ctx) + _alt = self._interp.adaptivePredict(self._input,5,self._ctx) except RecognitionException as re: localctx.exception = re @@ -694,13 +731,15 @@ class STIXPatternParser(Parser): self.parser = parser def comparisonExpressionAnd(self): - return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionAndContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionAndContext,0) + def comparisonExpression(self, i=None): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ComparisonExpressionContext) else: - return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext, i) + return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext,i) + def OR(self): return self.getToken(STIXPatternParser.OR, 0) @@ -716,10 +755,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitComparisonExpression"): listener.exitComparisonExpression(self) + + def comparisonExpression(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ComparisonExpressionContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 10 self.enterRecursionRule(localctx, 10, self.RULE_comparisonExpression, _p) try: @@ -729,11 +771,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 101 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 6, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,6,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.ComparisonExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_comparisonExpression) self.state = 96 @@ -746,7 +789,7 @@ class STIXPatternParser(Parser): self.comparisonExpression(3) self.state = 103 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 6, self._ctx) + _alt = self._interp.adaptivePredict(self._input,6,self._ctx) except RecognitionException as re: localctx.exception = re @@ -763,13 +806,15 @@ class STIXPatternParser(Parser): self.parser = parser def propTest(self): - return self.getTypedRuleContext(STIXPatternParser.PropTestContext, 0) + return self.getTypedRuleContext(STIXPatternParser.PropTestContext,0) + def comparisonExpressionAnd(self, i=None): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ComparisonExpressionAndContext) else: - return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionAndContext, i) + return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionAndContext,i) + def AND(self): return self.getToken(STIXPatternParser.AND, 0) @@ -785,10 +830,13 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitComparisonExpressionAnd"): listener.exitComparisonExpressionAnd(self) + + def comparisonExpressionAnd(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ComparisonExpressionAndContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 12 self.enterRecursionRule(localctx, 12, self.RULE_comparisonExpressionAnd, _p) try: @@ -798,11 +846,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 112 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 7, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,7,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.ComparisonExpressionAndContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_comparisonExpressionAnd) self.state = 107 @@ -815,7 +864,7 @@ class STIXPatternParser(Parser): self.comparisonExpressionAnd(3) self.state = 114 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 7, self._ctx) + _alt = self._interp.adaptivePredict(self._input,7,self._ctx) except RecognitionException as re: localctx.exception = re @@ -831,27 +880,29 @@ class STIXPatternParser(Parser): super(STIXPatternParser.PropTestContext, self).__init__(parent, invokingState) self.parser = parser + def getRuleIndex(self): return STIXPatternParser.RULE_propTest + def copyFrom(self, ctx): super(STIXPatternParser.PropTestContext, self).copyFrom(ctx) + + class PropTestRegexContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestRegexContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def MATCHES(self): return self.getToken(STIXPatternParser.MATCHES, 0) - def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -863,30 +914,27 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestRegex"): listener.exitPropTestRegex(self) + class PropTestOrderContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestOrderContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def orderableLiteral(self): - return self.getTypedRuleContext(STIXPatternParser.OrderableLiteralContext, 0) + return self.getTypedRuleContext(STIXPatternParser.OrderableLiteralContext,0) def GT(self): return self.getToken(STIXPatternParser.GT, 0) - def LT(self): return self.getToken(STIXPatternParser.LT, 0) - def GE(self): return self.getToken(STIXPatternParser.GE, 0) - def LE(self): return self.getToken(STIXPatternParser.LE, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -898,21 +946,20 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestOrder"): listener.exitPropTestOrder(self) + class PropTestLikeContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestLikeContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def LIKE(self): return self.getToken(STIXPatternParser.LIKE, 0) - def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -924,24 +971,23 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestLike"): listener.exitPropTestLike(self) + class PropTestEqualContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestEqualContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def primitiveLiteral(self): - return self.getTypedRuleContext(STIXPatternParser.PrimitiveLiteralContext, 0) + return self.getTypedRuleContext(STIXPatternParser.PrimitiveLiteralContext,0) def EQ(self): return self.getToken(STIXPatternParser.EQ, 0) - def NEQ(self): return self.getToken(STIXPatternParser.NEQ, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -953,20 +999,20 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestEqual"): listener.exitPropTestEqual(self) + class PropTestSetContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestSetContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def IN(self): return self.getToken(STIXPatternParser.IN, 0) - def setLiteral(self): - return self.getTypedRuleContext(STIXPatternParser.SetLiteralContext, 0) + return self.getTypedRuleContext(STIXPatternParser.SetLiteralContext,0) def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -979,21 +1025,20 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestSet"): listener.exitPropTestSet(self) + class PropTestIsSubsetContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestIsSubsetContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def ISSUBSET(self): return self.getToken(STIXPatternParser.ISSUBSET, 0) - def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -1005,17 +1050,17 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestIsSubset"): listener.exitPropTestIsSubset(self) + class PropTestParenContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestParenContext, self).__init__(parser) self.copyFrom(ctx) def LPAREN(self): return self.getToken(STIXPatternParser.LPAREN, 0) - def comparisonExpression(self): - return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ComparisonExpressionContext,0) def RPAREN(self): return self.getToken(STIXPatternParser.RPAREN, 0) @@ -1028,21 +1073,20 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestParen"): listener.exitPropTestParen(self) + class PropTestIsSupersetContext(PropTestContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.PropTestContext) super(STIXPatternParser.PropTestIsSupersetContext, self).__init__(parser) self.copyFrom(ctx) def objectPath(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathContext,0) def ISSUPERSET(self): return self.getToken(STIXPatternParser.ISSUPERSET, 0) - def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) - def NOT(self): return self.getToken(STIXPatternParser.NOT, 0) @@ -1054,14 +1098,17 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPropTestIsSuperset"): listener.exitPropTestIsSuperset(self) + + def propTest(self): + localctx = STIXPatternParser.PropTestContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_propTest) - self._la = 0 # Token type + self._la = 0 # Token type try: self.state = 168 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 15, self._ctx) + la_ = self._interp.adaptivePredict(self._input,15,self._ctx) if la_ == 1: localctx = STIXPatternParser.PropTestEqualContext(self, localctx) self.enterOuterAlt(localctx, 1) @@ -1070,13 +1117,14 @@ class STIXPatternParser(Parser): self.state = 117 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 116 self.match(STIXPatternParser.NOT) + self.state = 119 _la = self._input.LA(1) - if not(_la == STIXPatternParser.EQ or _la == STIXPatternParser.NEQ): + if not(_la==STIXPatternParser.EQ or _la==STIXPatternParser.NEQ): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1093,17 +1141,14 @@ class STIXPatternParser(Parser): self.state = 124 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 123 self.match(STIXPatternParser.NOT) + self.state = 126 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ( - (1 << STIXPatternParser.LT) | - (1 << STIXPatternParser.LE) | - (1 << STIXPatternParser.GT) | - (1 << STIXPatternParser.GE))) != 0)): + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << STIXPatternParser.LT) | (1 << STIXPatternParser.LE) | (1 << STIXPatternParser.GT) | (1 << STIXPatternParser.GE))) != 0)): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1120,10 +1165,11 @@ class STIXPatternParser(Parser): self.state = 131 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 130 self.match(STIXPatternParser.NOT) + self.state = 133 self.match(STIXPatternParser.IN) self.state = 134 @@ -1138,10 +1184,11 @@ class STIXPatternParser(Parser): self.state = 138 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 137 self.match(STIXPatternParser.NOT) + self.state = 140 self.match(STIXPatternParser.LIKE) self.state = 141 @@ -1156,10 +1203,11 @@ class STIXPatternParser(Parser): self.state = 145 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 144 self.match(STIXPatternParser.NOT) + self.state = 147 self.match(STIXPatternParser.MATCHES) self.state = 148 @@ -1174,10 +1222,11 @@ class STIXPatternParser(Parser): self.state = 152 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 151 self.match(STIXPatternParser.NOT) + self.state = 154 self.match(STIXPatternParser.ISSUBSET) self.state = 155 @@ -1192,10 +1241,11 @@ class STIXPatternParser(Parser): self.state = 159 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.NOT: + if _la==STIXPatternParser.NOT: self.state = 158 self.match(STIXPatternParser.NOT) + self.state = 161 self.match(STIXPatternParser.ISSUPERSET) self.state = 162 @@ -1213,6 +1263,7 @@ class STIXPatternParser(Parser): self.match(STIXPatternParser.RPAREN) pass + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1250,7 +1301,11 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitStartStopQualifier"): listener.exitStartStopQualifier(self) + + + def startStopQualifier(self): + localctx = STIXPatternParser.StartStopQualifierContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_startStopQualifier) try: @@ -1283,11 +1338,11 @@ class STIXPatternParser(Parser): def SECONDS(self): return self.getToken(STIXPatternParser.SECONDS, 0) - def IntLiteral(self): - return self.getToken(STIXPatternParser.IntLiteral, 0) + def IntPosLiteral(self): + return self.getToken(STIXPatternParser.IntPosLiteral, 0) - def FloatLiteral(self): - return self.getToken(STIXPatternParser.FloatLiteral, 0) + def FloatPosLiteral(self): + return self.getToken(STIXPatternParser.FloatPosLiteral, 0) def getRuleIndex(self): return STIXPatternParser.RULE_withinQualifier @@ -1300,17 +1355,21 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitWithinQualifier"): listener.exitWithinQualifier(self) + + + def withinQualifier(self): + localctx = STIXPatternParser.WithinQualifierContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_withinQualifier) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 175 self.match(STIXPatternParser.WITHIN) self.state = 176 _la = self._input.LA(1) - if not(_la == STIXPatternParser.IntLiteral or _la == STIXPatternParser.FloatLiteral): + if not(_la==STIXPatternParser.IntPosLiteral or _la==STIXPatternParser.FloatPosLiteral): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1334,8 +1393,8 @@ class STIXPatternParser(Parser): def REPEATS(self): return self.getToken(STIXPatternParser.REPEATS, 0) - def IntLiteral(self): - return self.getToken(STIXPatternParser.IntLiteral, 0) + def IntPosLiteral(self): + return self.getToken(STIXPatternParser.IntPosLiteral, 0) def TIMES(self): return self.getToken(STIXPatternParser.TIMES, 0) @@ -1351,7 +1410,11 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitRepeatedQualifier"): listener.exitRepeatedQualifier(self) + + + def repeatedQualifier(self): + localctx = STIXPatternParser.RepeatedQualifierContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_repeatedQualifier) try: @@ -1359,7 +1422,7 @@ class STIXPatternParser(Parser): self.state = 179 self.match(STIXPatternParser.REPEATS) self.state = 180 - self.match(STIXPatternParser.IntLiteral) + self.match(STIXPatternParser.IntPosLiteral) self.state = 181 self.match(STIXPatternParser.TIMES) except RecognitionException as re: @@ -1377,16 +1440,19 @@ class STIXPatternParser(Parser): self.parser = parser def objectType(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectTypeContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectTypeContext,0) + def COLON(self): return self.getToken(STIXPatternParser.COLON, 0) def firstPathComponent(self): - return self.getTypedRuleContext(STIXPatternParser.FirstPathComponentContext, 0) + return self.getTypedRuleContext(STIXPatternParser.FirstPathComponentContext,0) + def objectPathComponent(self): - return self.getTypedRuleContext(STIXPatternParser.ObjectPathComponentContext, 0) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathComponentContext,0) + def getRuleIndex(self): return STIXPatternParser.RULE_objectPath @@ -1399,10 +1465,14 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObjectPath"): listener.exitObjectPath(self) + + + def objectPath(self): + localctx = STIXPatternParser.ObjectPathContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_objectPath) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 183 @@ -1414,9 +1484,11 @@ class STIXPatternParser(Parser): self.state = 187 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == STIXPatternParser.DOT or _la == STIXPatternParser.LBRACK: + if _la==STIXPatternParser.DOT or _la==STIXPatternParser.LBRACK: self.state = 186 self.objectPathComponent(0) + + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1448,15 +1520,19 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitObjectType"): listener.exitObjectType(self) + + + def objectType(self): + localctx = STIXPatternParser.ObjectTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_objectType) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 189 _la = self._input.LA(1) - if not(_la == STIXPatternParser.IdentifierWithoutHyphen or _la == STIXPatternParser.IdentifierWithHyphen): + if not(_la==STIXPatternParser.IdentifierWithoutHyphen or _la==STIXPatternParser.IdentifierWithHyphen): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1492,15 +1568,19 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitFirstPathComponent"): listener.exitFirstPathComponent(self) + + + def firstPathComponent(self): + localctx = STIXPatternParser.FirstPathComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_firstPathComponent) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 191 _la = self._input.LA(1) - if not(_la == STIXPatternParser.StringLiteral or _la == STIXPatternParser.IdentifierWithoutHyphen): + if not(_la==STIXPatternParser.StringLiteral or _la==STIXPatternParser.IdentifierWithoutHyphen): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1519,27 +1599,29 @@ class STIXPatternParser(Parser): super(STIXPatternParser.ObjectPathComponentContext, self).__init__(parent, invokingState) self.parser = parser + def getRuleIndex(self): return STIXPatternParser.RULE_objectPathComponent + def copyFrom(self, ctx): super(STIXPatternParser.ObjectPathComponentContext, self).copyFrom(ctx) + class IndexPathStepContext(ObjectPathComponentContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) super(STIXPatternParser.IndexPathStepContext, self).__init__(parser) self.copyFrom(ctx) def LBRACK(self): return self.getToken(STIXPatternParser.LBRACK, 0) - def RBRACK(self): return self.getToken(STIXPatternParser.RBRACK, 0) - - def IntLiteral(self): - return self.getToken(STIXPatternParser.IntLiteral, 0) - + def IntPosLiteral(self): + return self.getToken(STIXPatternParser.IntPosLiteral, 0) + def IntNegLiteral(self): + return self.getToken(STIXPatternParser.IntNegLiteral, 0) def ASTERISK(self): return self.getToken(STIXPatternParser.ASTERISK, 0) @@ -1551,9 +1633,10 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitIndexPathStep"): listener.exitIndexPathStep(self) + class PathStepContext(ObjectPathComponentContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) super(STIXPatternParser.PathStepContext, self).__init__(parser) self.copyFrom(ctx) @@ -1561,7 +1644,8 @@ class STIXPatternParser(Parser): if i is None: return self.getTypedRuleContexts(STIXPatternParser.ObjectPathComponentContext) else: - return self.getTypedRuleContext(STIXPatternParser.ObjectPathComponentContext, i) + return self.getTypedRuleContext(STIXPatternParser.ObjectPathComponentContext,i) + def enterRule(self, listener): if hasattr(listener, "enterPathStep"): @@ -1571,15 +1655,15 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPathStep"): listener.exitPathStep(self) + class KeyPathStepContext(ObjectPathComponentContext): - def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) + def __init__(self, parser, ctx): # actually a STIXPatternParser.ObjectPathComponentContext) super(STIXPatternParser.KeyPathStepContext, self).__init__(parser) self.copyFrom(ctx) def IdentifierWithoutHyphen(self): return self.getToken(STIXPatternParser.IdentifierWithoutHyphen, 0) - def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) @@ -1591,13 +1675,16 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitKeyPathStep"): listener.exitKeyPathStep(self) + + def objectPathComponent(self, _p=0): _parentctx = self._ctx _parentState = self.state localctx = STIXPatternParser.ObjectPathComponentContext(self, self._ctx, _parentState) + _prevctx = localctx _startState = 28 self.enterRecursionRule(localctx, 28, self.RULE_objectPathComponent, _p) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 199 @@ -1606,12 +1693,13 @@ class STIXPatternParser(Parser): if token in [STIXPatternParser.DOT]: localctx = STIXPatternParser.KeyPathStepContext(self, localctx) self._ctx = localctx + _prevctx = localctx self.state = 194 self.match(STIXPatternParser.DOT) self.state = 195 _la = self._input.LA(1) - if not(_la == STIXPatternParser.StringLiteral or _la == STIXPatternParser.IdentifierWithoutHyphen): + if not(_la==STIXPatternParser.StringLiteral or _la==STIXPatternParser.IdentifierWithoutHyphen): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1620,11 +1708,12 @@ class STIXPatternParser(Parser): elif token in [STIXPatternParser.LBRACK]: localctx = STIXPatternParser.IndexPathStepContext(self, localctx) self._ctx = localctx + _prevctx = localctx self.state = 196 self.match(STIXPatternParser.LBRACK) self.state = 197 _la = self._input.LA(1) - if not(_la == STIXPatternParser.IntLiteral or _la == STIXPatternParser.ASTERISK): + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << STIXPatternParser.IntNegLiteral) | (1 << STIXPatternParser.IntPosLiteral) | (1 << STIXPatternParser.ASTERISK))) != 0)): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1638,11 +1727,12 @@ class STIXPatternParser(Parser): self._ctx.stop = self._input.LT(-1) self.state = 205 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 18, self._ctx) - while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: - if _alt == 1: + _alt = self._interp.adaptivePredict(self._input,18,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() + _prevctx = localctx localctx = STIXPatternParser.PathStepContext(self, STIXPatternParser.ObjectPathComponentContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_objectPathComponent) self.state = 201 @@ -1653,7 +1743,7 @@ class STIXPatternParser(Parser): self.objectPathComponent(4) self.state = 207 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input, 18, self._ctx) + _alt = self._interp.adaptivePredict(self._input,18,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1679,7 +1769,8 @@ class STIXPatternParser(Parser): if i is None: return self.getTypedRuleContexts(STIXPatternParser.PrimitiveLiteralContext) else: - return self.getTypedRuleContext(STIXPatternParser.PrimitiveLiteralContext, i) + return self.getTypedRuleContext(STIXPatternParser.PrimitiveLiteralContext,i) + def COMMA(self, i=None): if i is None: @@ -1698,14 +1789,18 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitSetLiteral"): listener.exitSetLiteral(self) + + + def setLiteral(self): + localctx = STIXPatternParser.SetLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_setLiteral) - self._la = 0 # Token type + self._la = 0 # Token type try: self.state = 221 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 20, self._ctx) + la_ = self._interp.adaptivePredict(self._input,20,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) self.state = 208 @@ -1723,7 +1818,7 @@ class STIXPatternParser(Parser): self.state = 216 self._errHandler.sync(self) _la = self._input.LA(1) - while _la == STIXPatternParser.COMMA: + while _la==STIXPatternParser.COMMA: self.state = 212 self.match(STIXPatternParser.COMMA) self.state = 213 @@ -1736,6 +1831,7 @@ class STIXPatternParser(Parser): self.match(STIXPatternParser.RPAREN) pass + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1751,7 +1847,8 @@ class STIXPatternParser(Parser): self.parser = parser def orderableLiteral(self): - return self.getTypedRuleContext(STIXPatternParser.OrderableLiteralContext, 0) + return self.getTypedRuleContext(STIXPatternParser.OrderableLiteralContext,0) + def BoolLiteral(self): return self.getToken(STIXPatternParser.BoolLiteral, 0) @@ -1767,19 +1864,18 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitPrimitiveLiteral"): listener.exitPrimitiveLiteral(self) + + + def primitiveLiteral(self): + localctx = STIXPatternParser.PrimitiveLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_primitiveLiteral) try: self.state = 225 self._errHandler.sync(self) token = self._input.LA(1) - if token in [STIXPatternParser.IntLiteral, - STIXPatternParser.FloatLiteral, - STIXPatternParser.HexLiteral, - STIXPatternParser.BinaryLiteral, - STIXPatternParser.StringLiteral, - STIXPatternParser.TimestampLiteral]: + if token in [STIXPatternParser.IntNegLiteral, STIXPatternParser.IntPosLiteral, STIXPatternParser.FloatNegLiteral, STIXPatternParser.FloatPosLiteral, STIXPatternParser.HexLiteral, STIXPatternParser.BinaryLiteral, STIXPatternParser.StringLiteral, STIXPatternParser.TimestampLiteral]: self.enterOuterAlt(localctx, 1) self.state = 223 self.orderableLiteral() @@ -1791,6 +1887,7 @@ class STIXPatternParser(Parser): pass else: raise NoViableAltException(self) + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1805,11 +1902,17 @@ class STIXPatternParser(Parser): super(STIXPatternParser.OrderableLiteralContext, self).__init__(parent, invokingState) self.parser = parser - def IntLiteral(self): - return self.getToken(STIXPatternParser.IntLiteral, 0) + def IntPosLiteral(self): + return self.getToken(STIXPatternParser.IntPosLiteral, 0) + + def IntNegLiteral(self): + return self.getToken(STIXPatternParser.IntNegLiteral, 0) - def FloatLiteral(self): - return self.getToken(STIXPatternParser.FloatLiteral, 0) + def FloatPosLiteral(self): + return self.getToken(STIXPatternParser.FloatPosLiteral, 0) + + def FloatNegLiteral(self): + return self.getToken(STIXPatternParser.FloatNegLiteral, 0) def StringLiteral(self): return self.getToken(STIXPatternParser.StringLiteral, 0) @@ -1834,21 +1937,19 @@ class STIXPatternParser(Parser): if hasattr(listener, "exitOrderableLiteral"): listener.exitOrderableLiteral(self) + + + def orderableLiteral(self): + localctx = STIXPatternParser.OrderableLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_orderableLiteral) - self._la = 0 # Token type + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 227 _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ( - (1 << STIXPatternParser.IntLiteral) | - (1 << STIXPatternParser.FloatLiteral) | - (1 << STIXPatternParser.HexLiteral) | - (1 << STIXPatternParser.BinaryLiteral) | - (1 << STIXPatternParser.StringLiteral) | - (1 << STIXPatternParser.TimestampLiteral))) != 0)): + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << STIXPatternParser.IntNegLiteral) | (1 << STIXPatternParser.IntPosLiteral) | (1 << STIXPatternParser.FloatNegLiteral) | (1 << STIXPatternParser.FloatPosLiteral) | (1 << STIXPatternParser.HexLiteral) | (1 << STIXPatternParser.BinaryLiteral) | (1 << STIXPatternParser.StringLiteral) | (1 << STIXPatternParser.TimestampLiteral))) != 0)): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1861,8 +1962,10 @@ class STIXPatternParser(Parser): self.exitRule() return localctx + + def sempred(self, localctx, ruleIndex, predIndex): - if self._predicates is None: + if self._predicates == None: self._predicates = dict() self._predicates[1] = self.observationExpressions_sempred self._predicates[2] = self.observationExpressionOr_sempred @@ -1881,32 +1984,40 @@ class STIXPatternParser(Parser): if predIndex == 0: return self.precpred(self._ctx, 2) + def observationExpressionOr_sempred(self, localctx, predIndex): if predIndex == 1: return self.precpred(self._ctx, 2) + def observationExpressionAnd_sempred(self, localctx, predIndex): if predIndex == 2: return self.precpred(self._ctx, 2) + def observationExpression_sempred(self, localctx, predIndex): if predIndex == 3: return self.precpred(self._ctx, 3) + if predIndex == 4: return self.precpred(self._ctx, 2) + if predIndex == 5: return self.precpred(self._ctx, 1) + def comparisonExpression_sempred(self, localctx, predIndex): if predIndex == 6: return self.precpred(self._ctx, 2) + def comparisonExpressionAnd_sempred(self, localctx, predIndex): if predIndex == 7: return self.precpred(self._ctx, 2) + def objectPathComponent_sempred(self, localctx, predIndex): if predIndex == 8: return self.precpred(self._ctx, 3) diff --git a/stix2patterns/inspector.py b/stix2patterns/inspector.py index c172ab7..faa77ec 100644 --- a/stix2patterns/inspector.py +++ b/stix2patterns/inspector.py @@ -73,14 +73,14 @@ class InspectionListener(STIXPatternListener): def exitWithinQualifier(self, ctx): self.__qualifiers.add( u"WITHIN {0} SECONDS".format( - ctx.IntLiteral() or ctx.FloatLiteral() + ctx.IntPosLiteral() or ctx.FloatPosLiteral() ) ) def exitRepeatedQualifier(self, ctx): self.__qualifiers.add( u"REPEATS {0} TIMES".format( - ctx.IntLiteral() + ctx.IntPosLiteral() ) ) @@ -172,4 +172,4 @@ class InspectionListener(STIXPatternListener): if ctx.ASTERISK(): self.__obj_path.append(INDEX_STAR) else: - self.__obj_path.append(int(ctx.IntLiteral().getText())) + self.__obj_path.append(int(ctx.IntPosLiteral().getText()))
Pattern passes unexpectedly The following pattern passes with the latest version installed via `pip` ``` $ pip3 install stix2-patterns Requirement already satisfied: stix2-patterns in /usr/local/lib/python3.6/site-packages Requirement already satisfied: six in /usr/local/lib/python3.6/site-packages (from stix2-patterns) Requirement already satisfied: antlr4-python3-runtime==4.7; python_version >= "3" in /usr/local/lib/python3.6/site-packages (from stix2-patterns) $ validate-patterns Enter a pattern to validate: [file:hashes.'SHA-256' =? 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] AND [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'] PASS: [file:hashes.'SHA-256' =? 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] AND [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'] ``` I don't see where `=?` matches in patterns operator. In our STIX2 Patterns parser, this fails. Our parser has a catch-all pattern at the end that returns an `unexpected` token. Maybe that would fix this issue for your ANTLR implementation?
oasis-open/cti-pattern-validator
diff --git a/stix2patterns/test/test_validator.py b/stix2patterns/test/test_validator.py index 7ed81c7..2256e3e 100644 --- a/stix2patterns/test/test_validator.py +++ b/stix2patterns/test/test_validator.py @@ -32,11 +32,19 @@ FAIL_CASES = [ ("[file:name MATCHES /.*\\.dll/]", # Quotes around regular expression "FAIL: Error found at line 1:19. mismatched input '/' expecting StringLiteral"), ("[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN ]", # Missing Qualifier value - "FAIL: Error found at line 1:63. mismatched input ']' expecting {IntLiteral, FloatLiteral}"), + "FAIL: Error found at line 1:63. mismatched input ']' expecting {IntPosLiteral, FloatPosLiteral}"), ("[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN 5 HOURS]", # SECONDS is the only valid time unit "FAIL: Error found at line 1:65. mismatched input 'HOURS' expecting SECONDS"), + ("[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN -5 SECONDS]", # Negative integer is invalid + "FAIL: Error found at line 1:63. mismatched input '-5' expecting {IntPosLiteral, FloatPosLiteral}"), ("[network-traffic:dst_ref.value ISSUBSET ]", # Missing second Comparison operand "FAIL: Error found at line 1:40. missing StringLiteral at ']'"), + ("[file:hashes.MD5 =? 'cead3f77f6cda6ec00f57d76c9a6879f']", # '=?' isn't a valid operator + "FAIL: Error found at line 1:18. extraneous input '?'"), + ("[x_whatever:detected == t'2457-73-22T32:81:84.1Z']", # Not a valid date + "FAIL: Error found at line 1:24. extraneous input 't'"), + ("[artifact:payload_bin = b'====']", # Not valid Base64 + "FAIL: Error found at line 1:24. extraneous input 'b'"), # TODO: add more failing test cases. ] @@ -69,6 +77,8 @@ PASS_CASES = [ "[file:size IN (1024, 2048, 4096)]", "[network-connection:extended_properties[0].source_payload MATCHES 'dGVzdHRlc3R0ZXN0']", "[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN 5 SECONDS", + "[x_whatever:detected == t'2018-03-22T12:11:14.1Z']", + "[artifact:payload_bin = b'dGhpcyBpcyBhIHRlc3Q=']", ]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 4 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 antlr4-python3-runtime==4.7 attrs==22.2.0 Babel==2.11.0 bump2version==1.0.1 bumpversion==0.6.0 certifi==2021.5.30 cfgv==3.3.1 charset-normalizer==2.0.12 coverage==6.2 distlib==0.3.9 docutils==0.18.1 filelock==3.4.1 identify==2.4.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytz==2025.2 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-prompt==1.5.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -e git+https://github.com/oasis-open/cti-pattern-validator.git@5f50cd15f293440a4447d01795222eb97c8f495e#egg=stix2_patterns toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 zipp==3.6.0
name: cti-pattern-validator channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - antlr4-python3-runtime==4.7 - attrs==22.2.0 - babel==2.11.0 - bump2version==1.0.1 - bumpversion==0.6.0 - cfgv==3.3.1 - charset-normalizer==2.0.12 - coverage==6.2 - distlib==0.3.9 - docutils==0.18.1 - filelock==3.4.1 - identify==2.4.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytz==2025.2 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-prompt==1.5.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - zipp==3.6.0 prefix: /opt/conda/envs/cti-pattern-validator
[ "stix2patterns/test/test_validator.py::test_fail_patterns[[file:hashes.MD5", "stix2patterns/test/test_validator.py::test_fail_patterns[[win-registry-key:key", "stix2patterns/test/test_validator.py::test_fail_patterns[[x_whatever:detected", "stix2patterns/test/test_validator.py::test_fail_patterns[[artifact:payload_bin" ]
[]
[ "stix2patterns/test/test_validator.py::test_spec_patterns[[file:hashes.'SHA-256'", "stix2patterns/test/test_validator.py::test_spec_patterns[[email-message:from_ref.value", "stix2patterns/test/test_validator.py::test_spec_patterns[([file:hashes.MD5", "stix2patterns/test/test_validator.py::test_spec_patterns[[user-account:account_type", "stix2patterns/test/test_validator.py::test_spec_patterns[[artifact:mime_type", "stix2patterns/test/test_validator.py::test_spec_patterns[[file:name", "stix2patterns/test/test_validator.py::test_spec_patterns[[file:extensions.'windows-pebinary-ext'.sections[*].entropy", "stix2patterns/test/test_validator.py::test_spec_patterns[[file:mime_type", "stix2patterns/test/test_validator.py::test_spec_patterns[[network-traffic:dst_ref.type", "stix2patterns/test/test_validator.py::test_spec_patterns[[domain-name:value", "stix2patterns/test/test_validator.py::test_spec_patterns[[url:value", "stix2patterns/test/test_validator.py::test_spec_patterns[[x509-certificate:issuer", "stix2patterns/test/test_validator.py::test_spec_patterns[[windows-registry-key:key", "stix2patterns/test/test_validator.py::test_spec_patterns[[(file:name", "stix2patterns/test/test_validator.py::test_spec_patterns[[email-message:sender_ref.value", "stix2patterns/test/test_validator.py::test_spec_patterns[[x-usb-device:usbdrive.serial_number", "stix2patterns/test/test_validator.py::test_spec_patterns[[process:command_line", "stix2patterns/test/test_validator.py::test_spec_patterns[[network-traffic:dst_ref.value", "stix2patterns/test/test_validator.py::test_spec_patterns[([file:name", "stix2patterns/test/test_validator.py::test_fail_patterns[file:size", "stix2patterns/test/test_validator.py::test_fail_patterns[[file:size", "stix2patterns/test/test_validator.py::test_fail_patterns[[file.size", "stix2patterns/test/test_validator.py::test_fail_patterns[[file:name", "stix2patterns/test/test_validator.py::test_fail_patterns[[network-traffic:dst_ref.value", "stix2patterns/test/test_validator.py::test_pass_patterns[[file:size", "stix2patterns/test/test_validator.py::test_pass_patterns[[file:file_name", "stix2patterns/test/test_validator.py::test_pass_patterns[[file:extended_properties.'ntfs-ext'.sid", "stix2patterns/test/test_validator.py::test_pass_patterns[[emailaddr:value", "stix2patterns/test/test_validator.py::test_pass_patterns[[ipv4addr:value", "stix2patterns/test/test_validator.py::test_pass_patterns[[user-account:value", "stix2patterns/test/test_validator.py::test_pass_patterns[[file:file_system_properties.file_name", "stix2patterns/test/test_validator.py::test_pass_patterns[[network-connection:extended_properties[0].source_payload", "stix2patterns/test/test_validator.py::test_pass_patterns[[win-registry-key:key", "stix2patterns/test/test_validator.py::test_pass_patterns[[x_whatever:detected", "stix2patterns/test/test_validator.py::test_pass_patterns[[artifact:payload_bin" ]
[]
BSD 3-Clause "New" or "Revised" License
2,267
[ "stix2patterns/grammars/STIXPatternListener.py", "stix2patterns/grammars/STIXPatternParser.py", "stix2patterns/inspector.py", "stix2patterns/grammars/STIXPatternLexer.py" ]
[ "stix2patterns/grammars/STIXPatternListener.py", "stix2patterns/grammars/STIXPatternParser.py", "stix2patterns/inspector.py", "stix2patterns/grammars/STIXPatternLexer.py" ]
elastic__rally-425
4d05fa88ea0920ec1f3178c3705201a53f6420db
2018-03-07 20:59:22
a5408e0d0d07b271b509df8057a7c73303604c10
diff --git a/docs/car.rst b/docs/car.rst index 5d7f21c2..855d38e8 100644 --- a/docs/car.rst +++ b/docs/car.rst @@ -94,6 +94,11 @@ These values are derived by Rally internally based on command line flags and you If you specify multiple configurations, e.g. ``--car="4gheap,ea"``, Rally will apply them in order. It will first read all variables in ``4gheap.ini``, then in ``ea.ini``. Afterwards, it will copy all configuration files from the corresponding config base of ``4gheap`` and *append* all configuration files from ``ea``. This also shows when to define a separate "car" and when to define a "mixin": If you need to amend configuration files, use a mixin, if you need to have a specific configuration, define a car. +Simple customizations +^^^^^^^^^^^^^^^^^^^^^ + +For simple customizations you can create the directory hierarchy as outlined above and use the ``--team-path`` command line parameter to refer to this configuration. For more complex use cases and distributed multi-node benchmarks, we recommend to use custom team repositories. + Custom Team Repositories ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/command_line_reference.rst b/docs/command_line_reference.rst index 7b33c618..16aa80e7 100644 --- a/docs/command_line_reference.rst +++ b/docs/command_line_reference.rst @@ -141,6 +141,14 @@ You can use ``--include-tasks`` to specify a comma-separated list of tasks that Selects the team repository that Rally should use to resolve cars. By default the ``default`` team repository is used, which is available in the Github project `rally-teams <https://github.com/elastic/rally-teams>`__. See the documentation about :doc:`cars </car>` on how to add your own team repositories. +``team-path`` +~~~~~~~~~~~~~ + +A directory that contains a team configuration. ``--team-path`` and ``--team-repository`` are mutually exclusive. See the :doc:`car reference </car>` for the required directory structure. + +Example:: + + esrally --team-path=~/Projects/es-teams ``car`` ~~~~~~~ diff --git a/docs/developing.rst b/docs/developing.rst index 318060fa..f3a69fe2 100644 --- a/docs/developing.rst +++ b/docs/developing.rst @@ -23,6 +23,8 @@ Installation Instructions for Development git clone https://github.com/elastic/rally.git cd rally + virtualenv -p python3 .venv + source .venv/bin/activate ./rally diff --git a/esrally/config.py b/esrally/config.py index 02f0dbe8..b8e0d79d 100644 --- a/esrally/config.py +++ b/esrally/config.py @@ -185,7 +185,7 @@ class Config: """ :param section: The configuration section. :param key: The configuration key. - :return: True iff a value for the specified key exists in the specified configuration section. + :return: True iff a value for the specified key exists in the specified configuration section. """ return self.opts(section, key, mandatory=False) is not None @@ -447,6 +447,7 @@ class ConfigFactory: # the Elasticsearch directory is just the last path component (relative to the source root directory) config["source"]["elasticsearch.src.subdir"] = io.basename(source_dir) + if gradle_bin: config["build"] = {} config["build"]["gradle.bin"] = gradle_bin @@ -648,33 +649,39 @@ def migrate(config_file, current_version, target_version, out=print, i=input): config["reporting"].pop("output.html.report.filename") if current_version == 3 and target_version > current_version: root_dir = config["system"]["root.dir"] - out("*****************************************************************************************") - out("") - out("You have an old configuration of Rally. Rally has now a much simpler setup") - out("routine which will autodetect lots of settings for you and it also does not") - out("require you to setup a metrics store anymore.") - out("") - out("Rally will now migrate your configuration but if you don't need advanced features") - out("like a metrics store, then you should delete the configuration directory:") - out("") - out(" rm -rf %s" % config_file.config_dir) - out("") - out("and then rerun Rally's configuration routine:") - out("") - out(" %s configure" % PROGRAM_NAME) - out("") - out("Please also note you have %.1f GB of data in your current benchmark directory at" - % convert.bytes_to_gb(io.get_size(root_dir))) - out() - out(" %s" % root_dir) - out("") - out("You might want to clean up this directory also.") - out() - out("For more details please see %s" % console.format.link("https://github.com/elastic/rally/blob/master/CHANGELOG.md#030")) - out("") - out("*****************************************************************************************") - out("") - out("Pausing for 10 seconds to let you consider this message.") + out( + """ + ***************************************************************************************** + + You have an old configuration of Rally. Rally has now a much simpler setup + routine which will autodetect lots of settings for you and it also does not + require you to setup a metrics store anymore. + + Rally will now migrate your configuration but if you don't need advanced features + like a metrics store, then you should delete the configuration directory: + + rm -rf {0} + + and then rerun Rally's configuration routine: + + {1} configure + + Please also note you have {2:.1f} GB of data in your current benchmark directory at + + {3} + + You might want to clean up this directory also. + + For more details please see {4} + + ***************************************************************************************** + + Pausing for 10 seconds to let you consider this message. + """.format(config_file.config_dir, + PROGRAM_NAME, + convert.bytes_to_gb(io.get_size(root_dir)), + root_dir, + console.format.link("https://github.com/elastic/rally/blob/master/CHANGELOG.md#030"))) time.sleep(10) logger.info("Migrating config from version [3] to [4]") current_version = 4 diff --git a/esrally/mechanic/mechanic.py b/esrally/mechanic/mechanic.py index 1e40957d..b14ec1ca 100644 --- a/esrally/mechanic/mechanic.py +++ b/esrally/mechanic/mechanic.py @@ -589,9 +589,9 @@ def create(cfg, metrics_store, all_node_ips, cluster_settings=None, sources=Fals car = None plugins = [] else: - repo = team.team_repo(cfg) - car = team.load_car(repo, cfg.opts("mechanic", "car.names"), cfg.opts("mechanic", "car.params")) - plugins = team.load_plugins(repo, cfg.opts("mechanic", "car.plugins"), cfg.opts("mechanic", "plugin.params")) + team_path = team.team_path(cfg) + car = team.load_car(team_path, cfg.opts("mechanic", "car.names"), cfg.opts("mechanic", "car.params")) + plugins = team.load_plugins(team_path, cfg.opts("mechanic", "car.plugins"), cfg.opts("mechanic", "plugin.params")) if sources or distribution: s = supplier.create(cfg, sources, distribution, build, challenge_root_path, plugins) diff --git a/esrally/mechanic/team.py b/esrally/mechanic/team.py index e097194f..b56160f2 100644 --- a/esrally/mechanic/team.py +++ b/esrally/mechanic/team.py @@ -11,7 +11,7 @@ logger = logging.getLogger("rally.team") def list_cars(cfg): - loader = CarLoader(team_repo(cfg)) + loader = CarLoader(team_path(cfg)) cars = [] for name in loader.car_names(): cars.append(loader.load_car(name)) @@ -50,7 +50,7 @@ def load_car(repo, name, car_params={}): def list_plugins(cfg): - plugins = PluginLoader(team_repo(cfg)).plugins() + plugins = PluginLoader(team_path(cfg)).plugins() if plugins: console.println("Available Elasticsearch plugins:\n") console.println(tabulate.tabulate([[p.name, p.config] for p in plugins], headers=["Name", "Configuration"])) @@ -83,25 +83,27 @@ def load_plugins(repo, plugin_names, plugin_params={}): return plugins -def team_repo(cfg, update=True): - distribution_version = cfg.opts("mechanic", "distribution.version", mandatory=False) - repo_name = cfg.opts("mechanic", "repository.name") - offline = cfg.opts("system", "offline.mode") - remote_url = cfg.opts("teams", "%s.url" % repo_name, mandatory=False) - root = cfg.opts("node", "root.dir") - team_repositories = cfg.opts("mechanic", "team.repository.dir") - teams_dir = os.path.join(root, team_repositories) - - current_team_repo = repo.RallyRepository(remote_url, teams_dir, repo_name, "teams", offline) - if update: +def team_path(cfg): + root_path = cfg.opts("mechanic", "team.path", mandatory=False) + if root_path: + return root_path + else: + distribution_version = cfg.opts("mechanic", "distribution.version", mandatory=False) + repo_name = cfg.opts("mechanic", "repository.name") + offline = cfg.opts("system", "offline.mode") + remote_url = cfg.opts("teams", "%s.url" % repo_name, mandatory=False) + root = cfg.opts("node", "root.dir") + team_repositories = cfg.opts("mechanic", "team.repository.dir") + teams_dir = os.path.join(root, team_repositories) + + current_team_repo = repo.RallyRepository(remote_url, teams_dir, repo_name, "teams", offline) current_team_repo.update(distribution_version) - return current_team_repo + return current_team_repo.repo_dir class CarLoader: - def __init__(self, repo): - self.repo = repo - self.cars_dir = os.path.join(self.repo.repo_dir, "cars") + def __init__(self, team_root_path): + self.cars_dir = os.path.join(team_root_path, "cars") def car_names(self): def __car_name(path): @@ -199,9 +201,8 @@ class Car: class PluginLoader: - def __init__(self, repo): - self.repo = repo - self.plugins_root_path = os.path.join(self.repo.repo_dir, "plugins") + def __init__(self, team_root_path): + self.plugins_root_path = os.path.join(team_root_path, "plugins") def plugins(self): known_plugins = self._core_plugins() + self._configured_plugins() diff --git a/esrally/rally.py b/esrally/rally.py index 15b04b27..131e89a2 100644 --- a/esrally/rally.py +++ b/esrally/rally.py @@ -278,6 +278,9 @@ def create_arg_parser(): p.add_argument( "--challenge", help="define the challenge to use. List possible challenges for tracks with `%s list tracks`" % PROGRAM_NAME) + p.add_argument( + "--team-path", + help="define the path to the car and plugin configurations to use.") p.add_argument( "--car", help="define the car to use. List possible cars with `%s list cars` (default: defaults)." % PROGRAM_NAME, @@ -676,8 +679,12 @@ def main(): if args.distribution_version: cfg.add(config.Scope.applicationOverride, "mechanic", "distribution.version", args.distribution_version) cfg.add(config.Scope.applicationOverride, "mechanic", "distribution.repository", args.distribution_repository) - cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", args.team_repository) cfg.add(config.Scope.applicationOverride, "mechanic", "car.names", csv_to_list(args.car)) + if args.team_path: + cfg.add(config.Scope.applicationOverride, "mechanic", "team.path", os.path.abspath(io.normalize_path(args.team_path))) + cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", None) + else: + cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", args.team_repository) cfg.add(config.Scope.applicationOverride, "mechanic", "car.plugins", csv_to_list(args.elasticsearch_plugins)) cfg.add(config.Scope.applicationOverride, "mechanic", "car.params", kv_to_map(csv_to_list(args.car_params))) cfg.add(config.Scope.applicationOverride, "mechanic", "plugin.params", kv_to_map(csv_to_list(args.plugin_params)))
esrally fails after initial config run if JDK9 is not installed Steps to reproduce: 1. Run `esrally configure` on a system without JDK 9 (but with Gradle installed) 2. Run `esrally` This will fail with: "No value for mandatory configuration: section=build, key=gradle.bin"
elastic/rally
diff --git a/tests/mechanic/team_test.py b/tests/mechanic/team_test.py index ea717b0e..37f80635 100644 --- a/tests/mechanic/team_test.py +++ b/tests/mechanic/team_test.py @@ -7,41 +7,36 @@ from esrally.mechanic import team current_dir = os.path.dirname(os.path.abspath(__file__)) -class UnitTestRepo: - def __init__(self, repo_dir): - self.repo_dir = repo_dir - - class CarLoaderTests(TestCase): def __init__(self, args): super().__init__(args) - self.repo = None + self.team_dir = None self.loader = None def setUp(self): - self.repo = UnitTestRepo(os.path.join(current_dir, "data")) - self.loader = team.CarLoader(self.repo) + self.team_dir = os.path.join(current_dir, "data") + self.loader = team.CarLoader(self.team_dir) def test_lists_car_names(self): # contrary to the name this assertion compares contents but does not care about order. self.assertCountEqual(["default", "32gheap", "missing_config_base", "empty_config_base", "ea", "verbose"], self.loader.car_names()) def test_load_known_car(self): - car = team.load_car(self.repo, ["default"], car_params={"data_paths": ["/mnt/disk0", "/mnt/disk1"]}) + car = team.load_car(self.team_dir, ["default"], car_params={"data_paths": ["/mnt/disk0", "/mnt/disk1"]}) self.assertEqual("default", car.name) self.assertEqual([os.path.join(current_dir, "data", "cars", "vanilla")], car.config_paths) self.assertDictEqual({"heap_size": "1g", "data_paths": ["/mnt/disk0", "/mnt/disk1"]}, car.variables) self.assertEqual({}, car.env) def test_load_car_with_mixin_single_config_base(self): - car = team.load_car(self.repo, ["32gheap", "ea"]) + car = team.load_car(self.team_dir, ["32gheap", "ea"]) self.assertEqual("32gheap+ea", car.name) self.assertEqual([os.path.join(current_dir, "data", "cars", "vanilla")], car.config_paths) self.assertEqual({"heap_size": "32g", "assertions": "true"}, car.variables) self.assertEqual({"JAVA_TOOL_OPTS": "A B C D E F"}, car.env) def test_load_car_with_mixin_multiple_config_bases(self): - car = team.load_car(self.repo, ["32gheap", "ea", "verbose"]) + car = team.load_car(self.team_dir, ["32gheap", "ea", "verbose"]) self.assertEqual("32gheap+ea+verbose", car.name) self.assertEqual([ os.path.join(current_dir, "data", "cars", "vanilla"), @@ -52,17 +47,17 @@ class CarLoaderTests(TestCase): def test_raises_error_on_unknown_car(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["don_t-know-you"]) + team.load_car(self.team_dir, ["don_t-know-you"]) self.assertRegex(ctx.exception.args[0], r"Unknown car \[don_t-know-you\]. List the available cars with [^\s]+ list cars.") def test_raises_error_on_empty_config_base(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["empty_config_base"]) + team.load_car(self.team_dir, ["empty_config_base"]) self.assertEqual("At least one config base is required for car ['empty_config_base']", ctx.exception.args[0]) def test_raises_error_on_missing_config_base(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["missing_config_base"]) + team.load_car(self.team_dir, ["missing_config_base"]) self.assertEqual("At least one config base is required for car ['missing_config_base']", ctx.exception.args[0]) @@ -72,8 +67,7 @@ class PluginLoaderTests(TestCase): self.loader = None def setUp(self): - repo = UnitTestRepo(os.path.join(current_dir, "data")) - self.loader = team.PluginLoader(repo) + self.loader = team.PluginLoader(os.path.join(current_dir, "data")) def test_lists_plugins(self): self.assertCountEqual(
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 7 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "tox", "pytest", "pytest-benchmark" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 distlib==0.3.9 elasticsearch==6.0.0 -e git+https://github.com/elastic/rally.git@4d05fa88ea0920ec1f3178c3705201a53f6420db#egg=esrally filelock==3.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.9.5 jsonschema==2.5.1 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work platformdirs==2.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==5.4.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work py-cpuinfo==3.2.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-benchmark==3.4.1 six==1.17.0 tabulate==0.8.1 thespian==3.9.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tox==3.28.0 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.22 virtualenv==20.17.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: rally channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - distlib==0.3.9 - elasticsearch==6.0.0 - filelock==3.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - jinja2==2.9.5 - jsonschema==2.5.1 - markupsafe==2.0.1 - platformdirs==2.4.0 - psutil==5.4.0 - py-cpuinfo==3.2.0 - pytest-benchmark==3.4.1 - six==1.17.0 - tabulate==0.8.1 - thespian==3.9.2 - tox==3.28.0 - urllib3==1.22 - virtualenv==20.17.1 prefix: /opt/conda/envs/rally
[ "tests/mechanic/team_test.py::CarLoaderTests::test_lists_car_names", "tests/mechanic/team_test.py::CarLoaderTests::test_load_car_with_mixin_multiple_config_bases", "tests/mechanic/team_test.py::CarLoaderTests::test_load_car_with_mixin_single_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_load_known_car", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_empty_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_missing_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_unknown_car", "tests/mechanic/team_test.py::PluginLoaderTests::test_cannot_load_community_plugin_with_missing_config", "tests/mechanic/team_test.py::PluginLoaderTests::test_cannot_load_plugin_with_missing_config", "tests/mechanic/team_test.py::PluginLoaderTests::test_lists_plugins", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_community_plugin_without_configuration", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_configured_plugin", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_core_plugin" ]
[]
[]
[]
Apache License 2.0
2,268
[ "esrally/mechanic/mechanic.py", "docs/developing.rst", "esrally/mechanic/team.py", "docs/command_line_reference.rst", "docs/car.rst", "esrally/config.py", "esrally/rally.py" ]
[ "esrally/mechanic/mechanic.py", "docs/developing.rst", "esrally/mechanic/team.py", "docs/command_line_reference.rst", "docs/car.rst", "esrally/config.py", "esrally/rally.py" ]
nipy__nipype-2490
88dbce1ce5439440bcc14c9aa46666c40f642152
2018-03-07 21:23:46
704b97dee7848283692bac38f04541c5af2a87b5
diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index 9f228f5c5..21ecbc0ee 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -2,7 +2,7 @@ from __future__ import (print_function, division, unicode_literals, absolute_import) -from builtins import str, open +from builtins import str, open, bytes # This tool exports a Nipype interface in the Boutiques (https://github.com/boutiques) JSON format. # Boutiques tools can be imported in CBRAIN (https://github.com/aces/cbrain) among other platforms. # @@ -40,10 +40,12 @@ def generate_boutiques_descriptor( raise Exception("Undefined module.") # Retrieves Nipype interface - if isinstance(module, str): + if isinstance(module, (str, bytes)): import_module(module) module_name = str(module) module = sys.modules[module] + else: + module_name = str(module.__name__) interface = getattr(module, interface_name)() inputs = interface.input_spec() @@ -249,7 +251,7 @@ def create_tempfile(): Creates a temp file and returns its name. ''' fileTemp = tempfile.NamedTemporaryFile(delete=False) - fileTemp.write("hello") + fileTemp.write(b"hello") fileTemp.close() return fileTemp.name @@ -283,6 +285,8 @@ def must_generate_value(name, type, ignored_template_inputs, spec_info, spec, # Best guess to detect string restrictions... if "' or '" in spec_info: return False + if spec.default or spec.default_value(): + return False if not ignored_template_inputs: return True return not (name in ignored_template_inputs)
UnboundLocalError: local variable 'module_name' referenced before assignment ### Summary Discovered for myself `nipypecli` and decided to give it a try while composing cmdline invocation just following the errors it was spitting out at me and stopping when error didn't give a hint what I could have specified incorrectly: ``` $> nipypecli convert boutiques -m nipype.interfaces.ants.registration -i ANTS -o test Traceback (most recent call last): File "/usr/bin/nipypecli", line 11, in <module> load_entry_point('nipype==1.0.1', 'console_scripts', 'nipypecli')() File "/usr/lib/python2.7/dist-packages/click/core.py", line 722, in __call__ return self.main(*args, **kwargs) File "/usr/lib/python2.7/dist-packages/click/core.py", line 697, in main rv = self.invoke(ctx) File "/usr/lib/python2.7/dist-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) File "/usr/lib/python2.7/dist-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) File "/usr/lib/python2.7/dist-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) File "/usr/lib/python2.7/dist-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) File "/usr/lib/python2.7/dist-packages/nipype/scripts/cli.py", line 254, in boutiques verbose, ignore_template_numbers) File "/usr/lib/python2.7/dist-packages/nipype/utils/nipype2boutiques.py", line 56, in generate_boutiques_descriptor 'command-line'] = "nipype_cmd " + module_name + " " + interface_name + " " UnboundLocalError: local variable 'module_name' referenced before assignment ```
nipy/nipype
diff --git a/nipype/utils/tests/test_nipype2boutiques.py b/nipype/utils/tests/test_nipype2boutiques.py new file mode 100644 index 000000000..f1d0c46ee --- /dev/null +++ b/nipype/utils/tests/test_nipype2boutiques.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +from future import standard_library +standard_library.install_aliases() + +from ..nipype2boutiques import generate_boutiques_descriptor + + +def test_generate(): + generate_boutiques_descriptor(module='nipype.interfaces.ants.registration', + interface_name='ANTS', + ignored_template_inputs=(), + docker_image=None, + docker_index=None, + verbose=False, + ignore_template_numbers=False)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 click==8.0.4 codecov==2.1.13 configparser==5.2.0 coverage==6.2 cycler==0.11.0 decorator==4.4.2 docutils==0.18.1 execnet==1.9.0 funcsigs==1.0.2 future==1.0.0 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 Jinja2==3.0.3 kiwisolver==1.3.1 lxml==5.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 mock==5.2.0 networkx==2.5.1 nibabel==3.2.2 -e git+https://github.com/nipy/nipype.git@88dbce1ce5439440bcc14c9aa46666c40f642152#egg=nipype numpy==1.19.5 numpydoc==1.1.0 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 prov==1.5.0 py==1.11.0 pydot==1.4.2 pydotplus==2.0.2 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-env==0.6.2 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 pytz==2025.2 rdflib==5.0.0 requests==2.27.1 scipy==1.5.4 simplejson==3.20.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 traits==6.4.1 typing_extensions==4.1.1 urllib3==1.26.20 yapf==0.32.0 zipp==3.6.0
name: nipype channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - click==8.0.4 - codecov==2.1.13 - configparser==5.2.0 - coverage==6.2 - cycler==0.11.0 - decorator==4.4.2 - docutils==0.18.1 - execnet==1.9.0 - funcsigs==1.0.2 - future==1.0.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - lxml==5.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - mock==5.2.0 - networkx==2.5.1 - nibabel==3.2.2 - numpy==1.19.5 - numpydoc==1.1.0 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - prov==1.5.0 - py==1.11.0 - pydot==1.4.2 - pydotplus==2.0.2 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-env==0.6.2 - pytest-xdist==3.0.2 - python-dateutil==2.9.0.post0 - pytz==2025.2 - rdflib==5.0.0 - requests==2.27.1 - scipy==1.5.4 - simplejson==3.20.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - traits==6.4.1 - typing-extensions==4.1.1 - urllib3==1.26.20 - yapf==0.32.0 - zipp==3.6.0 prefix: /opt/conda/envs/nipype
[ "nipype/utils/tests/test_nipype2boutiques.py::test_generate" ]
[]
[]
[]
Apache License 2.0
2,269
[ "nipype/utils/nipype2boutiques.py" ]
[ "nipype/utils/nipype2boutiques.py" ]
certbot__certbot-5687
e0ae356aa35adf22d154113e06dd01409df93bba
2018-03-07 22:55:23
e0ae356aa35adf22d154113e06dd01409df93bba
cpu: Thank you @sydneyli :1st_place_medal: :racing_car:
diff --git a/acme/acme/client.py b/acme/acme/client.py index d52c82a5c..9e2478afe 100644 --- a/acme/acme/client.py +++ b/acme/acme/client.py @@ -227,8 +227,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes response = self._post(url, messages.Revocation( certificate=cert, - reason=rsn), - content_type=None) + reason=rsn)) if response.status_code != http_client.OK: raise errors.ClientError( 'Successful revocation must return HTTP OK status')
`acme` module's `client._revoke` sends incorrect `ContentType` header. [ACME draft-10, section 6.2](https://tools.ietf.org/html/draft-ietf-acme-acme-10#section-6.2) explicitly requires POSTs be sent with a `Content-Type` of `application/jose+json`: > Because client requests in ACME carry JWS objects in the Flattened > JSON Serialization, they must have the "Content-Type" header field > set to "application/jose+json". If a request does not meet this > requirement, then the server MUST return a response with status code > 415 (Unsupported Media Type). The good news is that Certbot & the `acme` module already do this in 99.99% of cases :tada: :+1: :balloon: The bad news is that `client._revoke` overrides the default (correct) `ContentType` with `none`: https://github.com/certbot/certbot/blob/77fdb4d7d6194989dcc775f2e0ad81b6147c2359/acme/acme/client.py#L231 This will make revocation requests fail in a world where this restriction is enforced (See https://github.com/letsencrypt/boulder/pull/3532) There doesn't seem to be a reason to want to override `ContentType` in this case so I believe the fix is to change `_revoke` to use the default. Thanks!
certbot/certbot
diff --git a/acme/acme/client_test.py b/acme/acme/client_test.py index a0c27e74f..00b9e19dd 100644 --- a/acme/acme/client_test.py +++ b/acme/acme/client_test.py @@ -635,8 +635,7 @@ class ClientTest(ClientTestBase): def test_revoke(self): self.client.revoke(self.certr.body, self.rsn) self.net.post.assert_called_once_with( - self.directory[messages.Revocation], mock.ANY, content_type=None, - acme_version=1) + self.directory[messages.Revocation], mock.ANY, acme_version=1) def test_revocation_payload(self): obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn) @@ -776,8 +775,7 @@ class ClientV2Test(ClientTestBase): def test_revoke(self): self.client.revoke(messages_test.CERT, self.rsn) self.net.post.assert_called_once_with( - self.directory["revokeCert"], mock.ANY, content_type=None, - acme_version=2) + self.directory["revokeCert"], mock.ANY, acme_version=2) class MockJSONDeSerializable(jose.JSONDeSerializable):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.21
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pylint" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
acme==1.23.0 astroid==1.3.5 attrs==22.2.0 backcall==0.2.0 bleach==4.1.0 -e git+https://github.com/certbot/certbot.git@e0ae356aa35adf22d154113e06dd01409df93bba#egg=certbot certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 colorama==0.4.5 ConfigArgParse==1.7 configobj==5.0.8 coverage==6.2 cryptography==40.0.2 decorator==5.1.1 dill==0.3.4 distlib==0.3.9 docutils==0.18.1 execnet==1.9.0 filelock==3.4.1 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 ipdb==0.13.13 ipython==7.16.3 ipython-genutils==0.2.0 isort==5.10.1 jedi==0.17.2 jeepney==0.7.1 josepy==1.13.0 keyring==23.4.1 lazy-object-proxy==1.7.1 logilab-common==1.9.7 mccabe==0.7.0 mock==5.2.0 mypy-extensions==1.0.0 packaging==21.3 parsedatetime==2.6 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pkginfo==1.10.0 platformdirs==2.4.0 pluggy==1.0.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 pycparser==2.21 Pygments==2.14.0 pylint==1.4.2 pyOpenSSL==23.2.0 pyparsing==3.1.4 pyRFC3339==2.0.1 pytest==7.0.1 pytest-cov==4.0.0 pytest-xdist==3.0.2 pytz==2025.2 readme-renderer==34.0 requests==2.27.1 requests-toolbelt==1.0.0 rfc3986==1.5.0 SecretStorage==3.3.3 six==1.17.0 toml==0.10.2 tomli==1.2.3 tox==3.28.0 tqdm==4.64.1 traitlets==4.3.3 twine==3.8.0 typed-ast==1.5.5 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.17.1 wcwidth==0.2.13 webencodings==0.5.1 wrapt==1.16.0 zipp==3.6.0 zope.component==5.1.0 zope.event==4.6 zope.hookable==5.4 zope.interface==5.5.2
name: certbot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - acme==1.23.0 - astroid==1.3.5 - attrs==22.2.0 - backcall==0.2.0 - bleach==4.1.0 - cffi==1.15.1 - charset-normalizer==2.0.12 - colorama==0.4.5 - configargparse==1.7 - configobj==5.0.8 - coverage==6.2 - cryptography==40.0.2 - decorator==5.1.1 - dill==0.3.4 - distlib==0.3.9 - docutils==0.18.1 - execnet==1.9.0 - filelock==3.4.1 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - ipdb==0.13.13 - ipython==7.16.3 - ipython-genutils==0.2.0 - isort==5.10.1 - jedi==0.17.2 - jeepney==0.7.1 - josepy==1.13.0 - keyring==23.4.1 - lazy-object-proxy==1.7.1 - logilab-common==1.9.7 - mccabe==0.7.0 - mock==5.2.0 - mypy-extensions==1.0.0 - packaging==21.3 - parsedatetime==2.6 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pkginfo==1.10.0 - platformdirs==2.4.0 - pluggy==1.0.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pycparser==2.21 - pygments==2.14.0 - pylint==1.4.2 - pyopenssl==23.2.0 - pyparsing==3.1.4 - pyrfc3339==2.0.1 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-xdist==3.0.2 - pytz==2025.2 - readme-renderer==34.0 - requests==2.27.1 - requests-toolbelt==1.0.0 - rfc3986==1.5.0 - secretstorage==3.3.3 - six==1.17.0 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - tqdm==4.64.1 - traitlets==4.3.3 - twine==3.8.0 - typed-ast==1.5.5 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.17.1 - wcwidth==0.2.13 - webencodings==0.5.1 - wrapt==1.16.0 - zipp==3.6.0 - zope-component==5.1.0 - zope-event==4.6 - zope-hookable==5.4 - zope-interface==5.5.2 prefix: /opt/conda/envs/certbot
[ "acme/acme/client_test.py::ClientTest::test_revoke", "acme/acme/client_test.py::ClientV2Test::test_revoke" ]
[]
[ "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_finalize_order_v1_fetch_chain_error", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_finalize_order_v1_success", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_finalize_order_v1_timeout", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_finalize_order_v2", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_forwarding", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_init_acme_version", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_init_downloads_directory", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_new_account_and_tos", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_new_order_v1", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_new_order_v2", "acme/acme/client_test.py::BackwardsCompatibleClientV2Test::test_revoke", "acme/acme/client_test.py::ClientTest::test_agree_to_tos", "acme/acme/client_test.py::ClientTest::test_answer_challenge", "acme/acme/client_test.py::ClientTest::test_answer_challenge_missing_next", "acme/acme/client_test.py::ClientTest::test_check_cert", "acme/acme/client_test.py::ClientTest::test_check_cert_missing_location", "acme/acme/client_test.py::ClientTest::test_deactivate_account", "acme/acme/client_test.py::ClientTest::test_fetch_chain_max", "acme/acme/client_test.py::ClientTest::test_fetch_chain_no_up_link", "acme/acme/client_test.py::ClientTest::test_fetch_chain_single", "acme/acme/client_test.py::ClientTest::test_fetch_chain_too_many", "acme/acme/client_test.py::ClientTest::test_init_downloads_directory", "acme/acme/client_test.py::ClientTest::test_poll", "acme/acme/client_test.py::ClientTest::test_poll_and_request_issuance", "acme/acme/client_test.py::ClientTest::test_query_registration", "acme/acme/client_test.py::ClientTest::test_refresh", "acme/acme/client_test.py::ClientTest::test_register", "acme/acme/client_test.py::ClientTest::test_request_challenges", "acme/acme/client_test.py::ClientTest::test_request_challenges_custom_uri", "acme/acme/client_test.py::ClientTest::test_request_challenges_deprecated_arg", "acme/acme/client_test.py::ClientTest::test_request_challenges_unexpected_update", "acme/acme/client_test.py::ClientTest::test_request_challenges_wildcard", "acme/acme/client_test.py::ClientTest::test_request_domain_challenges", "acme/acme/client_test.py::ClientTest::test_request_issuance", "acme/acme/client_test.py::ClientTest::test_request_issuance_missing_location", "acme/acme/client_test.py::ClientTest::test_request_issuance_missing_up", "acme/acme/client_test.py::ClientTest::test_retry_after_date", "acme/acme/client_test.py::ClientTest::test_retry_after_invalid", "acme/acme/client_test.py::ClientTest::test_retry_after_missing", "acme/acme/client_test.py::ClientTest::test_retry_after_overflow", "acme/acme/client_test.py::ClientTest::test_retry_after_seconds", "acme/acme/client_test.py::ClientTest::test_revocation_payload", "acme/acme/client_test.py::ClientTest::test_revoke_bad_status_raises_error", "acme/acme/client_test.py::ClientTest::test_update_registration", "acme/acme/client_test.py::ClientV2Test::test_finalize_order_error", "acme/acme/client_test.py::ClientV2Test::test_finalize_order_success", "acme/acme/client_test.py::ClientV2Test::test_finalize_order_timeout", "acme/acme/client_test.py::ClientV2Test::test_new_account", "acme/acme/client_test.py::ClientV2Test::test_new_order", "acme/acme/client_test.py::ClientV2Test::test_poll_and_finalize", "acme/acme/client_test.py::ClientV2Test::test_poll_authorizations_failure", "acme/acme/client_test.py::ClientV2Test::test_poll_authorizations_success", "acme/acme/client_test.py::ClientV2Test::test_poll_authorizations_timeout", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_conflict", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_jobj", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_not_ok_jobj_error", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_not_ok_jobj_no_error", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_not_ok_no_jobj", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_ok_no_jobj_ct_required", "acme/acme/client_test.py::ClientNetworkTest::test_check_response_ok_no_jobj_no_ct", "acme/acme/client_test.py::ClientNetworkTest::test_del", "acme/acme/client_test.py::ClientNetworkTest::test_del_error", "acme/acme/client_test.py::ClientNetworkTest::test_init", "acme/acme/client_test.py::ClientNetworkTest::test_requests_error_passthrough", "acme/acme/client_test.py::ClientNetworkTest::test_send_request", "acme/acme/client_test.py::ClientNetworkTest::test_send_request_get_der", "acme/acme/client_test.py::ClientNetworkTest::test_send_request_post", "acme/acme/client_test.py::ClientNetworkTest::test_send_request_timeout", "acme/acme/client_test.py::ClientNetworkTest::test_send_request_user_agent", "acme/acme/client_test.py::ClientNetworkTest::test_send_request_verify_ssl", "acme/acme/client_test.py::ClientNetworkTest::test_urllib_error", "acme/acme/client_test.py::ClientNetworkTest::test_wrap_in_jws", "acme/acme/client_test.py::ClientNetworkTest::test_wrap_in_jws_v2", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_get", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_head", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_head_get_post_error_passthrough", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_failed_retry", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_no_content_type", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_not_retried", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_successful_retry", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_wrong_initial_nonce", "acme/acme/client_test.py::ClientNetworkWithMockedResponseTest::test_post_wrong_post_response_nonce" ]
[]
Apache License 2.0
2,270
[ "acme/acme/client.py" ]
[ "acme/acme/client.py" ]
oscarbranson__cbsyst-17
e85f6ebff4bd0e78ad229835af146ec7ff4d4aa1
2018-03-08 04:35:16
a917bf613b37ea897b6cc3fade5177ee2ffcf89e
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5524ea6..f26c8bb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -45,16 +45,4 @@ Currently using the unittest module, and testing via ``setup.py test``. Current unittests check internal consistency of functions against stable-state reference values, and compare the output of Csys against reference carbon speciation data. -*Do not change existing tests without good reason*. - -## Project Structure - -``` -cbsyst/ - |--- boron_fns.py : Functions for calculating relating to B speciation / isotopes. - |--- carbon_fns.py : Functions for calculating C speciation. - |--- cbsyst.py : User-facing functions used for calculating seawater chemistry. - |--- helpers.py : General functions that are used elsewhere. - |--- MyAMI_V2.py : Functions for K0, K1, K2, KB, KW, KS, KspA, KspC and [Mg] and [Ca] corrections. - |--- non_MyAMI_constants.py : Functions to calculate any constants that are not handled by MyAMI. -``` \ No newline at end of file +*Do not change existing tests without good reason*. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..f076406 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +graft cbsyst/test_data/Lueker2000 +graft cbsyst/test_data/GLODAP_data \ No newline at end of file diff --git a/README.md b/README.md index dc42ad5..930efa9 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,10 @@ +# cbsyst + <div align="right"> <a href="https://travis-ci.org/oscarbranson/cbsyst"><img src="https://travis-ci.org/oscarbranson/cbsyst.svg?branch=master" alt="TravisCI build" height="18"></a> <a href="https://badge.fury.io/py/cbsyst"><img src="https://badge.fury.io/py/cbsyst.svg" alt="PyPI version" height="18"></a> - <a href="https://anaconda.org/conda-forge/cbsyst"> <img src="https://anaconda.org/conda-forge/cbsyst/badges/version.svg" ald="conda-forge version" height="18"/></a> </div> -# cbsyst - **A Python module for calculating seawater carbon and boron chemistry.** This will be particularly useful for anyone thinking about oceans in the distant past, when Mg and Ca concentrations were different. I use [Mathis Hain's MyAMI model](http://www.mathis-hain.net/resources/Hain_et_al_2015_GBC.pdf) to adjust speciation constants for Mg and Ca concentration. @@ -20,7 +19,7 @@ This will be particularly useful for anyone thinking about oceans in the distant - [ ] [Compare to CO2SYS](https://github.com/oscarbranson/cbsyst/issues/6), a la [Orr et al (2015)](http://www.biogeosciences.net/12/1483/2015/bg-12-1483-2015.pdf)? If anyone wants to help with any of this, please do contribute! -A full list of bite-sized tasks that need doing is available on the [Issues](https://github.com/oscarbranson/cbsyst/issues) page. +A full list of bite-sized tasks that need doing is available in the [TODO list](). ## Acknowledgement The development of `cbsyst` has been greatly aided by [CO2SYS](http://cdiac.ornl.gov/oceans/co2rprt.html), and the [Matlab conversion of CO2SYS](http://cdiac.ornl.gov/ftp/oceans/co2sys/). @@ -81,15 +80,12 @@ Delta values can be provided as an input, and are given as an output. **Requires Python 3.5+**. Does *not* work in 2.7. Sorry. -### PyPi ```bash pip install cbsyst ``` -### Conda-Forge -```bash -conda install cbsyst -c conda-forge -``` +## Build Status + ## Example Usage diff --git a/cbsyst/MyAMI_V2.py b/cbsyst/MyAMI_V2.py index 6b5286c..d887f87 100644 --- a/cbsyst/MyAMI_V2.py +++ b/cbsyst/MyAMI_V2.py @@ -1266,7 +1266,7 @@ start_params = {'K0': np.array([-60.2409, 93.4517, 23.3585, 0.023517, 'KB': np.array([148.0248, 137.1942, 1.62142, -8966.90, -2890.53, -77.942, 1.728, -0.0996, -24.4344, -25.085, -0.2474, 0.053105]), - 'KW': np.array([148.9652, -13847.26, -23.6521, 118.67, + 'KW': np.array([148.9802, -13847.26, -23.6521, 118.67, -5.977, 1.0495, -0.01615]), 'KspC': np.array([-171.9065, -0.077993, 2839.319, 71.595, -0.77712, 0.0028426, 178.34, -0.07711, 0.0041249]), @@ -1591,7 +1591,7 @@ def MyAMI_K_calc(TempC=25., Sal=35., Ca=0.0102821, Mg=0.0528171, P=None, param_d return Ks -def MyAMI_K_calc_multi(T=25., S=35., Ca=0.0102821, Mg=0.0528171, P=None): +def MyAMI_K_calc_multi(TempC=25., Sal=35., Ca=0.0102821, Mg=0.0528171, P=None): """ Calculate MyAMI equilibrium constants for multiple T, S and Mg and Ca conditions. @@ -1617,8 +1617,8 @@ def MyAMI_K_calc_multi(T=25., S=35., Ca=0.0102821, Mg=0.0528171, P=None): """ # package data in a bunch of 1D arrays. d = Bunch() - d.T = np.array(T, ndmin=1) - d.S = np.array(S, ndmin=1) + d.T = np.array(TempC, ndmin=1) + d.S = np.array(Sal, ndmin=1) d.Ca = np.array(Ca, ndmin=1) d.Mg = np.array(Mg, ndmin=1) d.P = np.array(P, ndmin=1) diff --git a/cbsyst/boron_fns.py b/cbsyst/boron_fns.py index 61939b8..cd8790e 100644 --- a/cbsyst/boron_fns.py +++ b/cbsyst/boron_fns.py @@ -1,5 +1,5 @@ import numpy as np -from cbsyst.helpers import ch +from cbsyst.helpers import ch, cp, Bunch # B conc fns @@ -160,3 +160,39 @@ def R11_2_A11(R11): Convert Ratio to Abundance notation. """ return R11 / (1 + R11) + + +def calc_B_species(pHtot=None, BT=None, BO3=None, BO4=None, Ks=None): + # B system calculations + if pHtot is not None and BT is not None: + H = ch(pHtot) + elif BT is not None and BO3 is not None: + H = BT_BO3(BT, BO3, Ks) + elif BT is not None and BO4 is not None: + H = BT_BO4(BT, BO4, Ks) + elif BO3 is not None and BO4 is not None: + BT = BO3 + BO3 + H = BT_BO3(BT, BO3, Ks) + elif pHtot is not None and BO3 is not None: + H = ch(pHtot) + BT = pH_BO3(pHtot, BO3, Ks) + elif pHtot is not None and BO4 is not None: + H = ch(pHtot) + BT = pH_BO4(pHtot, BO4, Ks) + + # The above makes sure that BT and H are known, + # this next bit calculates all the missing species + # from BT and H. + + if BO3 is None: + BO3 = cBO3(BT, H, Ks) + if BO4 is None: + BO4 = cBO4(BT, H, Ks) + if pHtot is None: + pHtot = np.array(cp(H), ndmin=1) + + return Bunch({'pHtot': pHtot, + 'H': H, + 'BT': BT, + 'BO3': BO3, + 'BO4': BO4}) diff --git a/cbsyst/carbon_fns.py b/cbsyst/carbon_fns.py index 2fda38c..13093b2 100644 --- a/cbsyst/carbon_fns.py +++ b/cbsyst/carbon_fns.py @@ -1,6 +1,6 @@ import scipy.optimize as opt import numpy as np -from cbsyst.helpers import ch, noms, cast_array, maxL +from cbsyst.helpers import ch, noms, cast_array, maxL, calc_pH_scales, Bunch, cp # from cbsyst.boron_fns import cBO4 @@ -353,9 +353,9 @@ def TA_DIC(TA, DIC, BT, TP, TSi, TS, TF, Ks): # Returns H # """ # TA, DIC, BT = noms(TA, DIC, BT) # get nominal values of inputs -# par = cast_array(TA, DIC, BT, Ks.K1, Ks.K2, Ks.KB, Ks.KW) # cast parameters into array +# = cast_array(TA, DIC, BT, Ks.K1, Ks.K2, Ks.KB, Ks.KW) # cast meters into array -# return np.apply_along_axis(_zero_wrapper, 0, par, fn=zero_TA_DIC) +# return np.apply_along_axis(_zero_wrapper, 0, fn=zero_TA_DIC) def zero_TA_DIC(h, TA, DIC, BT, K1, K2, KB, KW): @@ -503,3 +503,156 @@ def fCO2_to_pCO2(fCO2, Tc): return fCO2 / np.exp(P * (B + 2 * delta) / RT) + +def calc_C_species(pHtot=None, DIC=None, CO2=None, + HCO3=None, CO3=None, TA=None, + fCO2=None, pCO2=None, + T_in=None, BT=None, TP=0, TSi=0, + TS=0, TF=0, Ks=None): + """ + Calculate all carbon species from minimal input. + """ + + # if fCO2 is given but CO2 is not, calculate CO2 + if CO2 is None: + if fCO2 is not None: + CO2 = fCO2_to_CO2(fCO2, Ks) + elif pCO2 is not None: + CO2 = fCO2_to_CO2(pCO2_to_fCO2(pCO2, T_in), Ks) + + # Carbon System Calculations (from Zeebe & Wolf-Gladrow, Appendix B) + # 1. CO2 and pH + if CO2 is not None and pHtot is not None: + H = ch(pHtot) + DIC = CO2_pH(CO2, pHtot, Ks) + # 2. CO2 and HCO3 + elif CO2 is not None and HCO3 is not None: + H = CO2_HCO3(CO2, HCO3, Ks) + DIC = CO2_pH(CO2, cp(H), Ks) + # 3. CO2 and CO3 + elif CO2 is not None and CO3 is not None: + H = CO2_CO3(CO2, CO3, Ks) + DIC = CO2_pH(CO2, cp(H), Ks) + # 4. CO2 and TA + elif CO2 is not None and TA is not None: + # unit conversion because OH and H wrapped + # up in TA fns - all need to be in same units. + pHtot = CO2_TA(CO2=CO2, + TA=TA, + BT=BT, + TP=TP, + TSi=TSi, + TS=TS, + TF=TF, + Ks=Ks) + H = ch(pHtot) + DIC = CO2_pH(CO2, pHtot, Ks) + # 5. CO2 and DIC + elif CO2 is not None and DIC is not None: + H = CO2_DIC(CO2, DIC, Ks) + # 6. pHtot and HCO3 + elif pHtot is not None and HCO3 is not None: + H = ch(pHtot) + DIC = pH_HCO3(pHtot, HCO3, Ks) + # 7. pHtot and CO3 + elif pHtot is not None and CO3 is not None: + H = ch(pHtot) + DIC = pH_CO3(pHtot, CO3, Ks) + # 8. pHtot and TA + elif pHtot is not None and TA is not None: + H = ch(pHtot) + DIC = pH_TA(pH=pHtot, + TA=TA, + BT=BT, + TP=TP, + TSi=TSi, + TS=TS, + TF=TF, + Ks=Ks) + # 9. pHtot and DIC + elif pHtot is not None and DIC is not None: + H = ch(pHtot) + # 10. HCO3 and CO3 + elif HCO3 is not None and CO3 is not None: + H = HCO3_CO3(HCO3, CO3, Ks) + DIC = pH_CO3(cp(H), CO3, Ks) + # 11. HCO3 and TA + elif HCO3 is not None and TA is not None: + Warning('Nutrient alkalinity not implemented for this input combination.\nCalculations use only C and B alkalinity.') + H = HCO3_TA(HCO3, + TA, + BT, + Ks) + DIC = pH_HCO3(cp(H), HCO3, Ks) + # 12. HCO3 amd DIC + elif HCO3 is not None and DIC is not None: + H = HCO3_DIC(HCO3, DIC, Ks) + # 13. CO3 and TA + elif CO3 is not None and TA is not None: + Warning('Nutrient alkalinity not implemented for this input combination.\nCalculations use only C and B alkalinity.') + H = CO3_TA(CO3, + TA, + BT, + Ks) + DIC = pH_CO3(cp(H), CO3, Ks) + # 14. CO3 and DIC + elif CO3 is not None and DIC is not None: + H = CO3_DIC(CO3, DIC, Ks) + # 15. TA and DIC + elif TA is not None and DIC is not None: + pHtot = TA_DIC(TA=TA, + DIC=DIC, + BT=BT, + TP=TP, + TSi=TSi, + TS=TS, + TF=TF, + Ks=Ks) + H = ch(pHtot) + + # The above makes sure that DIC and H are known, + # this next bit calculates all the missing species + # from DIC and H. + if CO2 is None: + CO2 = cCO2(H, DIC, Ks) + if fCO2 is None: + fCO2 = CO2_to_fCO2(CO2, Ks) + if pCO2 is None: + pCO2 = fCO2_to_pCO2(fCO2, T_in) + if HCO3 is None: + HCO3 = cHCO3(H, DIC, Ks) + if CO3 is None: + CO3 = cCO3(H, DIC, Ks) + # Calculate all elements of Alkalinity + (TA, CAlk, BAlk, + PAlk, SiAlk, OH, + Hfree, HSO4, HF) = cTA(H=H, + DIC=DIC, + BT=BT, + TP=TP, + TSi=TSi, + TS=TS, + TF=TF, + Ks=Ks, mode='multi') + + # if pH not calced yet, calculate on all scales. + if pHtot is None: + pHtot = np.array(cp(H), ndmin=1) + + return Bunch({'pHtot': pHtot, + 'TA': TA, + 'DIC': DIC, + 'CO2': CO2, + 'H': H, + 'HCO3': HCO3, + 'fCO2': fCO2, + 'pCO2': pCO2, + 'CO3': CO3, + 'CAlk': CAlk, + 'BAlk': BAlk, + 'PAlk': PAlk, + 'SiAlk': SiAlk, + 'OH': OH, + 'Hfree': Hfree, + 'HSO4': HSO4, + 'HF': HF}) \ No newline at end of file diff --git a/cbsyst/cbsyst.py b/cbsyst/cbsyst.py index 31c7cc6..f7a06b1 100644 --- a/cbsyst/cbsyst.py +++ b/cbsyst/cbsyst.py @@ -9,48 +9,48 @@ from cbsyst.non_MyAMI_constants import * # Helper functions # ---------------- -def get_Ks(ps): +def calc_Ks(T, S, P, Mg, Ca, TS, TF, Ks=None): """ Helper function to calculate Ks. - If ps.Ks is a dict, those Ks are used - transparrently, with no pressure modification. + If Ks is a dict, those Ks are used + transparrently (i.e. no pressure modification). """ - if isinstance(ps.Ks, dict): - Ks = Bunch(ps.Ks) + if isinstance(Ks, dict): + Ks = Bunch(Ks) else: - if maxL(ps.Mg, ps.Ca) == 1: - if ps.Mg is None: - ps.Mg = 0.0528171 - if ps.Ca is None: - ps.Ca = 0.0102821 - Ks = MyAMI_K_calc(TempC=ps.T, Sal=ps.S, - Mg=ps.Mg, Ca=ps.Ca, P=ps.P) + if maxL(Mg, Ca) == 1: + if Mg is None: + Mg = 0.0528171 + if Ca is None: + Ca = 0.0102821 + Ks = MyAMI_K_calc(TempC=T, Sal=S, P=P, + Mg=Mg, Ca=Ca) else: # if only Ca or Mg provided, fill in other with modern - if ps.Mg is None: - ps.Mg = 0.0528171 - if ps.Ca is None: - ps.Ca = 0.0102821 + if Mg is None: + Mg = 0.0528171 + if Ca is None: + Ca = 0.0102821 # calculate Ca and Mg specific Ks - Ks = MyAMI_K_calc_multi(TempC=ps.T, Sal=ps.S, - Ca=ps.Ca, Mg=ps.Mg, P=ps.P) + Ks = MyAMI_K_calc_multi(TempC=T, Sal=S, P=P, + Ca=Ca, Mg=Mg) - # non-MyAMI Constants - Ks.update(calc_KPs(ps.T, ps.S, ps.P)) - Ks.update(calc_KF(ps.T, ps.S, ps.P)) - Ks.update(calc_KSi(ps.T, ps.S, ps.P)) + # non-MyAMI Constants + Ks.update(calc_KPs(T, S, P)) + Ks.update(calc_KF(T, S, P)) + Ks.update(calc_KSi(T, S, P)) - # pH conversions to total scale. - # - KPn are all on SWS - # - KSi is on SWS - # - MyAMI KW is on SWS... DOES THIS MATTER? + # pH conversions to total scale. + # - KP1, KP2, KP3 are all on SWS + # - KSi is on SWS + # - MyAMI KW is on SWS... DOES THIS MATTER? - SWStoTOT = (1 + ps.TS / Ks.KSO4) / (1 + ps.TS / Ks.KSO4 + ps.TF / Ks.KF) - # FREEtoTOT = 1 + ps.TS / Ks.KSO4 - conv = ['KP1', 'KP2', 'KP3', 'KSi', 'KW'] - for c in conv: - Ks[c] *= SWStoTOT + SWStoTOT = (1 + TS / Ks.KSO4) / (1 + TS / Ks.KSO4 + TF / Ks.KF) + # FREEtoTOT = 1 + 'T_' + mode]S / Ks.KSO4 + conv = ['KP1', 'KP2', 'KP3', 'KSi', 'KW'] + for c in conv: + Ks[c] *= SWStoTOT return Ks @@ -61,9 +61,10 @@ def Csys(pHtot=None, DIC=None, CO2=None, HCO3=None, CO3=None, TA=None, fCO2=None, pCO2=None, BT=None, Ca=None, Mg=None, - T=25., S=35., P=None, + T_in=25., S_in=35., P_in=None, + T_out=None, S_out=None, P_out=None, TP=0., TSi=0., - pHsws=None, pHfree=None, + pHsws=None, pHfree=None, pHNBS=None, Ks=None, pdict=None, unit='umol'): """ Calculate the carbon chemistry of seawater from a minimal parameter set. @@ -97,12 +98,20 @@ def Csys(pHtot=None, DIC=None, CO2=None, Ca, Mg : arra-like The [Ca] and [Mg] of the seawater, in mol / kg. Used in calculating MyAMI constants. - T, S : array-like - Temperature in Celcius and Salinity in PSU. - Used in calculating MyAMI constants. - P : array-like - Pressure in Bar. - Used in calculating MyAMI constants. + T_in, S_in : array-like + Temperature in Celcius and Salinity in PSU that the + measurements were conducted under. + Used in calculating constants. + P_in : array-like + Pressure in Bar that the measurements were conducted under. + Used in pressure-correcting constants. + T_out, S_out : array-like + Temperature in Celcius and Salinity in PSU of the desired + output conditions. + Used in calculating constants. + P_in : array-like + Pressure in Bar of the desired output conditions. + Used in pressure-correcting constants. unit : str Concentration units of C and B parameters (all must be in the same units). @@ -150,164 +159,70 @@ def Csys(pHtot=None, DIC=None, CO2=None, # Conserved seawater chemistry if 'TS' not in ps: - ps.TS = calc_TS(ps.S) + ps.TS = calc_TS(ps.S_in) if 'TF' not in ps: - ps.TF = calc_TF(ps.S) + ps.TF = calc_TF(ps.S_in) if ps.BT is None: - ps.BT = calc_TB(ps.S) - elif isinstance(BT, (int, float)): - ps.BT = ps.BT * ps.S / 35. - - # Calculate Ks - ps.Ks = get_Ks(ps) - - # Calculate pH scales (does nothing if no pH given) - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) - - # if fCO2 is given but CO2 is not, calculate CO2 - if ps.CO2 is None: - if ps.fCO2 is not None: - ps.CO2 = fCO2_to_CO2(ps.fCO2, ps.Ks) - elif ps.pCO2 is not None: - ps.CO2 = fCO2_to_CO2(pCO2_to_fCO2(ps.pCO2, ps.T), ps.Ks) - - # Carbon System Calculations (from Zeebe & Wolf-Gladrow, Appendix B) - # 1. CO2 and pH - if ps.CO2 is not None and ps.pHtot is not None: - ps.H = ch(ps.pHtot) - ps.DIC = CO2_pH(ps.CO2, ps.pHtot, ps.Ks) - # 2. ps.CO2 and ps.HCO3 - elif ps.CO2 is not None and ps.HCO3 is not None: - ps.H = CO2_HCO3(ps.CO2, ps.HCO3, ps.Ks) - ps.DIC = CO2_pH(ps.CO2, cp(ps.H), ps.Ks) - # 3. ps.CO2 and ps.CO3 - elif ps.CO2 is not None and ps.CO3 is not None: - ps.H = CO2_CO3(ps.CO2, ps.CO3, ps.Ks) - ps.DIC = CO2_pH(ps.CO2, cp(ps.H), ps.Ks) - # 4. ps.CO2 and ps.TA - elif ps.CO2 is not None and ps.TA is not None: - # unit conversion because OH and H wrapped - # up in TA fns - all need to be in same units. - ps.pHtot = CO2_TA(CO2=ps.CO2, - TA=ps.TA, - BT=ps.BT, - TP=ps.TP, - TSi=ps.TSi, - TS=ps.TS, - TF=ps.TF, - Ks=ps.Ks) - ps.H = ch(ps.pHtot) - ps.DIC = CO2_pH(ps.CO2, ps.pHtot, ps.Ks) - # 5. ps.CO2 and ps.DIC - elif ps.CO2 is not None and ps.DIC is not None: - ps.H = CO2_DIC(ps.CO2, ps.DIC, ps.Ks) - # 6. ps.pHtot and ps.HCO3 - elif ps.pHtot is not None and ps.HCO3 is not None: - ps.H = ch(ps.pHtot) - ps.DIC = pH_HCO3(ps.pHtot, ps.HCO3, ps.Ks) - # 7. ps.pHtot and ps.CO3 - elif ps.pHtot is not None and ps.CO3 is not None: - ps.H = ch(ps.pHtot) - ps.DIC = pH_CO3(ps.pHtot, ps.CO3, ps.Ks) - # 8. ps.pHtot and ps.TA - elif ps.pHtot is not None and ps.TA is not None: - ps.H = ch(ps.pHtot) - ps.DIC = pH_TA(pH=ps.pHtot, - TA=ps.TA, - BT=ps.BT, - TP=ps.TP, - TSi=ps.TSi, - TS=ps.TS, - TF=ps.TF, - Ks=ps.Ks) - # 9. ps.pHtot and ps.DIC - elif ps.pHtot is not None and ps.DIC is not None: - ps.H = ch(ps.pHtot) - # 10. ps.HCO3 and ps.CO3 - elif ps.HCO3 is not None and ps.CO3 is not None: - ps.H = HCO3_CO3(ps.HCO3, ps.CO3, ps.Ks) - ps.DIC = pH_CO3(cp(ps.H), ps.CO3, ps.Ks) - # 11. ps.HCO3 and ps.TA - elif ps.HCO3 is not None and ps.TA is not None: - Warning('Nutrient alkalinity not implemented for this input combination.\nCalculations use only C and B alkalinity.') - ps.H = HCO3_TA(ps.HCO3, - ps.TA, - ps.BT, - ps.Ks) - ps.DIC = pH_HCO3(cp(ps.H), ps.HCO3, ps.Ks) - # 12. ps.HCO3 amd ps.DIC - elif ps.HCO3 is not None and ps.DIC is not None: - ps.H = HCO3_DIC(ps.HCO3, ps.DIC, ps.Ks) - # 13. ps.CO3 and ps.TA - elif ps.CO3 is not None and ps.TA is not None: - Warning('Nutrient alkalinity not implemented for this input combination.\nCalculations use only C and B alkalinity.') - ps.H = CO3_TA(ps.CO3, - ps.TA, - ps.BT, - ps.Ks) - ps.DIC = pH_CO3(cp(ps.H), ps.CO3, ps.Ks) - # 14. ps.CO3 and ps.DIC - elif ps.CO3 is not None and ps.DIC is not None: - ps.H = CO3_DIC(ps.CO3, ps.DIC, ps.Ks) - # 15. ps.TA and ps.DIC - elif ps.TA is not None and ps.DIC is not None: - ps.pHtot = TA_DIC(TA=ps.TA, - DIC=ps.DIC, - BT=ps.BT, - TP=ps.TP, - TSi=ps.TSi, - TS=ps.TS, - TF=ps.TF, - Ks=ps.Ks) - ps.H = ch(ps.pHtot) - - # The above makes sure that DIC and H are known, - # this next bit calculates all the missing species - # from DIC and H. - if ps.CO2 is None: - ps.CO2 = cCO2(ps.H, ps.DIC, ps.Ks) - if ps.fCO2 is None: - ps.fCO2 = CO2_to_fCO2(ps.CO2, ps.Ks) - if ps.pCO2 is None: - ps.pCO2 = fCO2_to_pCO2(ps.fCO2, ps.T) - if ps.HCO3 is None: - ps.HCO3 = cHCO3(ps.H, ps.DIC, ps.Ks) - if ps.CO3 is None: - ps.CO3 = cCO3(ps.H, ps.DIC, ps.Ks) - # Always calculate elements of alkalinity - try: - # necessary for use with CBsyst in special cases - # where BT is not known before Csys is run. - (ps.TA, ps.CAlk, ps.BAlk, - ps.PAlk, ps.SiAlk, ps.OH, - ps.Hfree, ps.HSO4, ps.HF) = cTA(H=ps.H, - DIC=ps.DIC, - BT=ps.BT, - TP=ps.TP, - TSi=ps.TSi, - TS=ps.TS, - TF=ps.TF, - Ks=ps.Ks, mode='multi') - except TypeError: - pass - if ps.pHtot is None: - ps.pHtot = np.array(cp(ps.H), ndmin=1) - # Calculate other pH scales - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) - - # clean up for output + ps.BT = calc_TB(ps.S_in) + # elif isinstance(BT, (int, float)): + # ps.BT = ps.BT * ps.S_in / 35. + + # Calculate Ks at input conditions + ps.Ks = calc_Ks(ps.T_in, ps.S_in, ps.P_in, + ps.Mg, ps.Ca, ps.TS, ps.TF, ps.Ks) + + # Calculate pH scales at input conditions (does nothing if no pH given) + ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, ps.pHNBS, + ps.TS, ps.TF, ps.T_in + 273.15, ps.S_in, ps.Ks)) + + # calculate C system at input conditions + ps.update(calc_C_species(pHtot=ps.pHtot, DIC=ps.DIC, CO2=ps.CO2, + HCO3=ps.HCO3, CO3=ps.CO3, TA=ps.TA, + fCO2=ps.fCO2, pCO2=ps.pCO2, + T_in=ps.T_in, BT=ps.BT, TP=ps.TP, TSi=ps.TSi, + TS=ps.TS, TF=ps.TF, Ks=ps.Ks)) + + # clean up output for k in ['BT', 'CO2', 'CO3', 'Ca', 'DIC', 'H', - 'HCO3', 'Mg', 'S', 'T', 'TA', + 'HCO3', 'Mg', 'S_in', 'T_in', 'TA', 'CAlk', 'PAlk', 'SiAlk', 'OH']: if not isinstance(ps[k], np.ndarray): # convert all outputs to (min) 1D numpy arrays. ps[k] = np.array(ps[k], ndmin=1) if ps.unit != 1: - for p in upar + ['CAlk', 'BAlk', 'PAlk', 'SiAlk', 'OH', 'HSO4', 'HF', 'Hfree']: + for p in upar + ['CAlk', 'BAlk', 'PAlk', 'SiAlk', + 'OH', 'HSO4', 'HF', 'Hfree']: ps[p] *= ps.unit # convert back to input units + # Calculate Output Conditions + # =========================== + if ps.T_out is not None or ps.S_out is not None or ps.P_out is not None: + if ps.T_out is None: + ps.T_out = ps.T_in + if ps.S_out is None: + ps.S_out = ps.S_in + if ps.P_out is None: + ps.P_out = ps.P_in + # assumes conserved alkalinity and DIC + out_cond = Csys(TA=ps.TA, DIC=ps.DIC, + T_in=ps.T_out, + S_in=ps.S_out, + P_in=ps.P_out, + unit=ps.unit) + # Calculate pH scales at output conditions (does nothing if no pH given) + out_cond.update(calc_pH_scales(out_cond.pHtot, out_cond.pHfree, out_cond.pHsws, out_cond.pHNBS, + out_cond.TS, out_cond.TF, out_cond.T_in + 273.15, out_cond.S_in, out_cond.Ks)) + + # rename parameters in output conditions + outputs = ['BAlk', 'BT', 'CAlk', 'CO2', 'CO3', + 'DIC', 'H', 'HCO3', 'HF', + 'HSO4', 'Hfree', 'Ks', 'OH', + 'PAlk', 'SiAlk', 'TA', 'TF', 'TP', + 'TS', 'TSi', 'fCO2', 'pCO2', + 'pHfree', 'pHsws', 'pHtot', 'pHNBS'] + + ps.update({k + '_out': out_cond[k] for k in outputs}) + # remove some superfluous outputs rem = ['pdict'] for r in rem: @@ -323,9 +238,10 @@ def Bsys(pHtot=None, BT=None, BO3=None, BO4=None, ABT=None, ABO3=None, ABO4=None, dBT=None, dBO3=None, dBO4=None, alphaB=None, - T=25., S=35., P=None, + T_in=25., S_in=35., P_in=None, + T_out=None, S_out=None, P_out=None, Ca=None, Mg=None, - pHsws=None, pHfree=None, + pHsws=None, pHfree=None, pHNBS=None, Ks=None, pdict=None): """ Calculate the boron chemistry of seawater from a minimal parameter set. @@ -382,52 +298,33 @@ def Bsys(pHtot=None, BT=None, BO3=None, BO4=None, # Conserved seawater chemistry if 'TS' not in ps: - ps.TS = calc_TS(ps.S) + ps.TS = calc_TS(ps.S_in) if 'TF' not in ps: - ps.TF = calc_TF(ps.S) + ps.TF = calc_TF(ps.S_in) - # if neither Ca nor Mg provided, use default Ks - ps.Ks = get_Ks(ps) + # Calculate Ks + ps.Ks = calc_Ks(ps.T_in, ps.S_in, ps.P_in, + ps.Mg, ps.Ca, ps.TS, ps.TF, ps.Ks) # Calculate pH scales (does nothing if none pH given) - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) + ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, ps.pHNBS, + ps.TS, ps.TF, ps.T_in + 273.15, ps.S_in, ps.Ks)) - # B system calculations - if ps.pHtot is not None and ps.BT is not None: - ps.H = ch(ps.pHtot) - elif ps.BT is not None and ps.BO3 is not None: - ps.H = BT_BO3(ps.BT, ps.BO3, ps.Ks) - elif ps.BT is not None and ps.BO4 is not None: - ps.H = BT_BO4(ps.BT, ps.BO4, ps.Ks) - elif ps.BO3 is not None and ps.BO4 is not None: - ps.BT = ps.BO3 + ps.BO3 - ps.H = BT_BO3(ps.BT, ps.BO3, ps.Ks) - elif ps.pHtot is not None and ps.BO3 is not None: - ps.H = ch(ps.pHtot) - ps.BT = pH_BO3(ps.pHtot, ps.BO3, ps.Ks) - elif ps.pHtot is not None and ps.BO4 is not None: - ps.H = ch(ps.pHtot) - ps.BT = pH_BO4(ps.pHtot, ps.BO4, ps.Ks) - - # The above makes sure that BT and H are known, - # this next bit calculates all the missing species - # from BT and H. - if ps.BO3 is None: - ps.BO3 = cBO3(ps.BT, ps.H, ps.Ks) - if ps.BO4 is None: - ps.BO4 = cBO4(ps.BT, ps.H, ps.Ks) + ps.update(calc_B_species(pHtot=ps.pHtot, BT=ps.BT, BO3=ps.BO3, BO4=ps.BO4, Ks=ps.Ks)) + + # If pH not calced yet, calculate on all scales if ps.pHtot is None: ps.pHtot = np.array(cp(ps.H), ndmin=1) # Calculate other pH scales - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) + ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, ps.pHNBS, + ps.TS, ps.TF, ps.T_in + 273.15, ps.S_in, ps.Ks)) + # If any isotope parameter specified, calculate the isotope systen. if NnotNone(ps.ABT, ps.ABO3, ps.ABO4, ps.dBT, ps.dBO3, ps.dBO4) != 0: ps.update(ABsys(pdict=ps)) for k in ['BT', 'H', 'BO3', 'BO4', - 'Ca', 'Mg', 'S', 'T']: + 'Ca', 'Mg', 'S_in', 'T_in']: # convert all outputs to (min) 1D numpy arrays. if not isinstance(ps[k], np.ndarray): # convert all outputs to (min) 1D numpy arrays. @@ -448,9 +345,9 @@ def ABsys(pHtot=None, ABT=None, ABO3=None, ABO4=None, dBT=None, dBO3=None, dBO4=None, alphaB=None, - T=25., S=35., P=None, + T_in=25., S_in=35., P_in=None, Ca=None, Mg=None, - pHsws=None, pHfree=None, + pHsws=None, pHfree=None, pHNBS=None, Ks=None, pdict=None): """ Calculate the boron isotope chemistry of seawater from a minimal parameter set. @@ -512,19 +409,19 @@ def ABsys(pHtot=None, if isinstance(pdict, dict): ps.update(pdict) - # Conserved seawater chemistry if 'TS' not in ps: - ps.TS = calc_TS(ps.S) + ps.TS = calc_TS(ps.S_in) if 'TF' not in ps: - ps.TF = calc_TF(ps.S) + ps.TF = calc_TF(ps.S_in) - # if neither Ca nor Mg provided, use default Ks - ps.Ks = get_Ks(ps) + # Calculate Ks + ps.Ks = calc_Ks(ps.T_in, ps.S_in, ps.P_in, + ps.Mg, ps.Ca, ps.TS, ps.TF, ps.Ks) # Calculate pH scales (does nothing if none pH given) - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) + ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, ps.pHNBS, + ps.TS, ps.TF, ps.T_in + 273.15, ps.S_in, ps.Ks)) # if deltas provided, calculate corresponding As if ps.dBT is not None: @@ -580,9 +477,10 @@ def CBsys(pHtot=None, DIC=None, CO2=None, HCO3=None, CO3=None, TA=None, fCO2=Non BT=None, BO3=None, BO4=None, ABT=None, ABO3=None, ABO4=None, dBT=None, dBO3=None, dBO4=None, alphaB=None, - T=25., S=35., P=None, + T_in=25., S_in=35., P_in=None, + T_out=None, S_out=None, P_out=None, Ca=None, Mg=None, TP=0., TSi=0., - pHsws=None, pHfree=None, + pHsws=None, pHfree=None, pHNBS=None, Ks=None, pdict=None, unit='umol'): """ Calculate carbon, boron and boron isotope chemistry of seawater from a minimal parameter set. @@ -659,7 +557,6 @@ def CBsys(pHtot=None, DIC=None, CO2=None, HCO3=None, CO3=None, TA=None, fCO2=Non ------- dict(/Bunch) containing all calculated parameters. """ - # Bunch inputs ps = Bunch(locals()) if isinstance(pdict, dict): @@ -683,109 +580,107 @@ def CBsys(pHtot=None, DIC=None, CO2=None, HCO3=None, CO3=None, TA=None, fCO2=Non if ps[p] is not None: ps[p] = np.divide(ps[p], ps.unit) # convert to molar - # reassign unit, so conversions aren't repeated by Csys + # reassign unit, convert back at end orig_unit = ps.unit ps.unit = 1. - # determin max lengths - kexcl = ['Ks', 'pdict', 'unit'] - ks = [k for k in ps.keys() if k not in kexcl] - L = maxL(*[ps[k] for k in ks]) - # make inputs same length - for k in ks: - if ps[k] is not None: - if isinstance(ps[k], (int, float)): - ps[k] = np.full(L, ps[k]) - # Conserved seawater chemistry if 'TS' not in ps: - ps.TS = calc_TS(ps.S) + ps.TS = calc_TS(ps.S_in) if 'TF' not in ps: - ps.TF = calc_TF(ps.S) + ps.TF = calc_TF(ps.S_in) # Calculate Ks - ps.Ks = get_Ks(ps) + ps.Ks = calc_Ks(ps.T_in, ps.S_in, ps.P_in, + ps.Mg, ps.Ca, ps.TS, ps.TF, ps.Ks) # Calculate pH scales (does nothing if none pH given) - ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, - ps.TS, ps.TF, ps.Ks)) + ps.update(calc_pH_scales(ps.pHtot, ps.pHfree, ps.pHsws, ps.pHNBS, + ps.TS, ps.TF, ps.T_in + 273.15, ps.S_in, ps.Ks)) # if fCO2 is given but CO2 is not, calculate CO2 if ps.CO2 is None: if ps.fCO2 is not None: ps.CO2 = fCO2_to_CO2(ps.fCO2, ps.Ks) elif ps.pCO2 is not None: - ps.CO2 = fCO2_to_CO2(pCO2_to_fCO2(ps.pCO2, ps.T), ps.Ks) + ps.CO2 = fCO2_to_CO2(pCO2_to_fCO2(ps.pCO2, ps.T_in), ps.Ks) # if no B info provided, assume modern conc. nBspec = NnotNone(ps.BT, ps.BO3, ps.BO4) if nBspec == 0: - ps.BT = calc_TB(ps.S) + ps.BT = calc_TB(ps.S_in) elif isinstance(BT, (int, float)): - ps.BT = ps.BT * ps.S / 35. - - # This section works out the order that things should be calculated in. + ps.BT = ps.BT * ps.S_in / 35. + # count number of not None C parameters + nCspec = NnotNone(ps.DIC, ps.CO2, ps.HCO3, ps.CO3) # used below + + # if pH is given, it's easy + if ps.pHtot is not None or nBspec == 2: + ps.update(calc_B_species(pHtot=ps.pHtot, BT=ps.BT, BO3=ps.BO3, BO4=ps.BO4, Ks=ps.Ks)) + ps.update(calc_C_species(pHtot=ps.pHtot, DIC=ps.DIC, CO2=ps.CO2, + HCO3=ps.HCO3, CO3=ps.CO3, TA=ps.TA, + fCO2=ps.fCO2, pCO2=ps.pCO2, + T_in=ps.T_in, BT=ps.BT, TP=ps.TP, TSi=ps.TSi, + TS=ps.TS, TF=ps.TF, Ks=ps.Ks)) + # if not, this section works out the order that things should be calculated in. # Special case: if pH is missing, must have: - # a) two C - # b) two B + # a) two C or one C and both TA and BT + # b) two B (above) # c) one pH-dependent B, one pH-dependent C... But that's cray... # (c not implemented!) - if ps.pHtot is None: - nCspec = NnotNone(ps.DIC, ps.CO2, ps.HCO3, ps.CO3) - # a) if there are 2 C species, or one C species and TA and BT - if ((nCspec == 2) | ((nCspec == 1) & (NnotNone(ps.TA, ps.BT) == 2))): - ps.update(Csys(pdict=ps)) # calculate C first - ps.update(Bsys(pdict=ps)) # then B - # Note on the peculiar syntax here: - # ps is a dict of parameters, where - # everything that needs to be calculated - # is None. - # We give this to the [N]sys function - # as pdict, which passes the paramters from THIS - # function to [N]sys. - # As the output of Csys is also a dict (Bunch) - # with exactly the same form as ps, we can then - # use the .update attribute of ps to update - # all the paramters that were calculated by - # Csyst. - # Thus, all calculation is incremental, working - # with the same parameter set. As dicts are - # mutable, this has the added benefit of the - # parameters only being stored in memory once. - if ps.TA is None: - (ps.TA, ps.CAlk, ps.BAlk, - ps.PAlk, ps.SiAlk, ps.OH, - ps.Hfree, ps.HSO4, ps.HF) = cTA(H=ps.H, - DIC=ps.DIC, - BT=ps.BT, - TP=ps.TP, - TSi=ps.TSi, - TS=ps.TS, - TF=ps.TF, - Ks=ps.Ks, mode='multi') - # necessary becayse TA in Csys fails if there's no BT - # b) if there are 2 B species - elif nBspec == 2: - ps.update(Bsys(pdict=ps)) # calculate B first - ps.update(Csys(pdict=ps)) # then C - else: # if neither condition is met, throw an error - raise ValueError(("Impossible! You haven't provided enough parameters.\n" + - "If you don't know pH, you must provide either:\n" + - " - Two of [DIC, CO2, HCO3, CO3], and one of [BT, BO3, BO4]\n" + - " - One of [DIC, CO2, HCO3, CO3], and TA and BT\n" + - " - Two of [BT, BO3, BO4] and one of [DIC, CO2, HCO3, CO3]")) - - else: # if we DO have pH, it's dead easy! - ps.update(Bsys(pdict=ps)) # calculate B first - ps.update(Csys(pdict=ps)) # then C + elif ((nCspec == 2) | ((nCspec == 1) & (NnotNone(ps.TA, ps.BT) == 2))): # case A + ps.update(calc_C_species(pHtot=ps.pHtot, DIC=ps.DIC, CO2=ps.CO2, + HCO3=ps.HCO3, CO3=ps.CO3, TA=ps.TA, + fCO2=ps.fCO2, pCO2=ps.pCO2, + T_in=ps.T_in, BT=ps.BT, TP=ps.TP, TSi=ps.TSi, + TS=ps.TS, TF=ps.TF, Ks=ps.Ks)) + ps.update(calc_B_species(pHtot=ps.pHtot, BT=ps.BT, BO3=ps.BO3, BO4=ps.BO4, Ks=ps.Ks)) + # elif nBspec == 2: # case B -- moved up + # ps.update(calc_B_species(pHtot=ps.pHtot, BT=ps.BT, BO3=ps.BO3, BO4=ps.BO4, Ks=ps.Ks)) + # ps.update(calc_C_species(pHtot=ps.pHtot, DIC=ps.DIC, CO2=ps.CO2, + # HCO3=ps.HCO3, CO3=ps.CO3, TA=ps.TA, + # fCO2=ps.fCO2, pCO2=ps.pCO2, + # T_in=ps.T_in, BT=ps.BT, TP=ps.TP, TSi=ps.TSi, + # TS=ps.TS, TF=ps.TF, Ks=ps.Ks)) # then C + else: # if neither condition is met, throw an error + raise ValueError(("Impossible! You haven't provided enough parameters.\n" + + "If you don't know pH, you must provide either:\n" + + " - Two of [DIC, CO2, HCO3, CO3], and one of [BT, BO3, BO4]\n" + + " - One of [DIC, CO2, HCO3, CO3], and TA and BT\n" + + " - Two of [BT, BO3, BO4] and one of [DIC, CO2, HCO3, CO3]")) for p in upar + ['CAlk', 'BAlk', 'PAlk', 'SiAlk', 'OH', 'HSO4', 'HF', 'Hfree']: ps[p] *= orig_unit # convert back to input units + # Recursive approach to calculate output params. + # if output conditions specified, calculate outputs. + if ps.T_out is not None or ps.S_out is not None or ps.P_out is not None: + if ps.T_out is None: + ps.T_out = ps.T_in + if ps.S_out is None: + ps.S_out = ps.S_in + if ps.P_out is None: + ps.P_out = ps.P_in + # assumes conserved alkalinity + out_cond = CBsys(TA=ps.TA, DIC=ps.DIC, BT=ps.BT, T_in=ps.T_out, + S_in=ps.S_out, P_in=ps.P_out, unit=ps.unit) + # Calculate pH scales (does nothing if no pH given) + out_cond.update(calc_pH_scales(out_cond.pHtot, out_cond.pHfree, out_cond.pHsws, out_cond.pHNBS, + out_cond.TS, out_cond.TF, out_cond.T_in + 273.15, out_cond.S_in, out_cond.Ks)) + # rename parameters in output conditions + outputs = ['BAlk', 'BT', 'CAlk', 'CO2', 'CO3', + 'DIC', 'H', 'HCO3', 'HF', + 'HSO4', 'Hfree', 'Ks', 'OH', + 'PAlk', 'SiAlk', 'TA', 'TF', 'TP', + 'TS', 'TSi', 'fCO2', 'pCO2', + 'pHfree', 'pHsws', 'pHtot', 'pHNBS', 'BO3', 'BO4', + 'ABO3', 'ABO4', 'dBO3', 'dBO4'] + + ps.update({k + '_out': out_cond[k] for k in outputs}) + # remove some superfluous outputs rem = ['pdict', 'unit'] for r in rem: if r in ps: del ps[r] - return ps diff --git a/cbsyst/helpers.py b/cbsyst/helpers.py index 90b4fb0..6a9d815 100644 --- a/cbsyst/helpers.py +++ b/cbsyst/helpers.py @@ -248,14 +248,27 @@ def calc_TF(Sal): return (a / b) * (Sal / c) # mol/kg-SW +# def calc_TB(Sal): +# """ +# Calculate total Boron + +# Lee, Kim, Byrne, Millero, Feely, Yong-Ming Liu. 2010. +# Geochimica Et Cosmochimica Acta 74 (6): 1801-1811 +# """ +# a, b = (0.0004326, 35.) +# return a * Sal / b + + def calc_TB(Sal): """ Calculate total Boron - - Lee, Kim, Byrne, Millero, Feely, Yong-Ming Liu. 2010. - Geochimica Et Cosmochimica Acta 74 (6): 1801-1811 + + Directly from CO2SYS: + Uppstrom, L., Deep-Sea Research 21:161-162, 1974: + this is 0.000416 * Sal/35. = 0.0000119 * Sal + TB(FF) = (0.000232 / 10.811) * (Sal / 1.80655) in mol/kg-SW """ - a, b = (0.0004326, 35.) + a, b = (0.0004157, 35.) return a * Sal / b @@ -269,28 +282,36 @@ def calc_fH(TempK, Sal): # Convert between pH scales -def calc_pH_scales(pHtot, pHfree, pHsws, TS, TF, Ks): +def calc_pH_scales(pHtot, pHfree, pHsws, pHNBS, TS, TF, TempK, Sal, Ks): """ Calculate pH on all scales, given one. """ # check if any pH scale is given. - npH = NnotNone(pHfree, pHsws, pHtot) + npH = NnotNone(pHfree, pHsws, pHtot, pHNBS) if npH == 1: # pH scale conversions FREEtoTOT = -np.log10((1 + TS / Ks.KSO4)) SWStoTOT = -np.log10((1 + TS / Ks.KSO4) / (1 + TS / Ks.KSO4 + TF / Ks.KF)) + fH = calc_fH(TempK, Sal) if pHtot is not None: return {'pHfree': pHtot - FREEtoTOT, - 'pHsws': pHtot - SWStoTOT} + 'pHsws': pHtot - SWStoTOT, + 'pHNBS': pHtot - SWStoTOT - np.log10(fH)} elif pHsws is not None: return {'pHfree': pHsws + SWStoTOT - FREEtoTOT, - 'pHtot': pHsws + SWStoTOT} + 'pHtot': pHsws + SWStoTOT, + 'pHNBS': pHsws - np.log10(fH)} elif pHfree is not None: return {'pHsws': pHfree + FREEtoTOT - SWStoTOT, - 'pHtot': pHfree + FREEtoTOT} + 'pHtot': pHfree + FREEtoTOT, + 'pHNBS': pHfree + FREEtoTOT - SWStoTOT - np.log10(fH)} + elif pHNBS is not None: + return {'pHsws': pHNBS + np.log10(fH), + 'pHtot': pHNBS + np.log10(fH) + SWStoTOT, + 'pHfree': pHNBS + np.log10(fH) + SWStoTOT - FREEtoTOT} else: return {} diff --git a/setup.py b/setup.py index 3be183c..8508ee5 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,5 @@ setup(name='cbsyst', 'pandas', 'uncertainties', 'tqdm'], - package_data={'cbsyst': ['test_data/GLODAP_data/*.py', - 'test_data/Lueker2000/*.csv']}, + package_data={'cbsyst': ['test_data/*']}, zip_safe=True)
Implement different pH scales - [x] Total - [x] SWS - [x] Free - [x] NBS
oscarbranson/cbsyst
diff --git a/cbsyst/test_data/Orr2015/plot_CO2SYS_comparison.py b/cbsyst/test_data/Orr2015/plot_CO2SYS_comparison.py new file mode 100644 index 0000000..1c049d7 --- /dev/null +++ b/cbsyst/test_data/Orr2015/plot_CO2SYS_comparison.py @@ -0,0 +1,18 @@ +""" +TODO: Script to compare CO2SYS.m output to cbsyst output. +Comparison similar to Orr et al (2015) + +Parameters +---------- +raw_data : str + Path to GLODAP-WOA comparison data. +co2sys_data : str + Path to output from CO2SYS.m +output_dir : str + Output directory for plots + +Returns +------- +Prints a range of statistics comparing CO2SYS and cbsyst. +Creates a variety of plots with statistics in output_dir. +""" diff --git a/test_cbsyst.py b/test_cbsyst.py index 06216fe..4e464e8 100644 --- a/test_cbsyst.py +++ b/test_cbsyst.py @@ -323,21 +323,21 @@ class ReferenceDataTestCase(unittest.TestCase): # Csys calculations # TA from pH and DIC - cTA = Csys(pHtot=pH, DIC=DIC, BT=BT, S=S) + cTA = Csys(pHtot=pH, DIC=DIC, BT=BT, S_in=S) # Calculate % differences from measured dTA = (100 * (TA - cTA.TA) / TA) self.assertLess(max(abs(dTA)), 0.2, msg='TA from DIC and pH') # pH from TA and DIC - cpH = Csys(DIC=DIC, TA=TA, BT=BT, S=S) + cpH = Csys(DIC=DIC, TA=TA, BT=BT, S_in=S) # Calculate % differences from measured dpH = (100 * (pH - cpH.pHtot) / pH) self.assertLess(max(abs(dpH)), 0.2, msg='pH from TA and DIC') # DIC from pH and TA - cDIC = Csys(pHtot=pH, TA=TA, BT=BT, S=S) + cDIC = Csys(pHtot=pH, TA=TA, BT=BT, S_in=S) # Calculate % differences from measured dDIC = (100 * (DIC - cDIC.DIC) / DIC) @@ -365,21 +365,21 @@ class ReferenceDataTestCase(unittest.TestCase): # Csys calculations # TA from pH and DIC - cTA = CBsys(pHtot=pH, DIC=DIC, BT=BT, S=S) + cTA = CBsys(pHtot=pH, DIC=DIC, BT=BT, S_in=S) # Calculate % differences from measured dTA = (100 * (TA - cTA.TA) / TA) self.assertLess(max(abs(dTA)), 0.2, msg='TA from DIC and pH') # pH from TA and DIC - cpH = CBsys(DIC=DIC, TA=TA, BT=BT, S=S) + cpH = CBsys(DIC=DIC, TA=TA, BT=BT, S_in=S) # Calculate % differences from measured dpH = (100 * (pH - cpH.pHtot) / pH) self.assertLess(max(abs(dpH)), 0.2, msg='pH from TA and DIC') # DIC from pH and TA - cDIC = CBsys(pHtot=pH, TA=TA, BT=BT, S=S) + cDIC = CBsys(pHtot=pH, TA=TA, BT=BT, S_in=S) # Calculate % differences from measured dDIC = (100 * (DIC - cDIC.DIC) / DIC) @@ -395,7 +395,7 @@ class ReferenceDataTestCase(unittest.TestCase): # Calculate using cbsys # TA from DIC and fCO2 - cTA = Csys(DIC=ld.DIC.values, fCO2=ld.fCO2.values, T=ld.Temp.values, S=ld.Sal.values) + cTA = Csys(DIC=ld.DIC.values, fCO2=ld.fCO2.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dTA = ld.TA - cTA.TA dTA_median = np.median(dTA) dTA_pc95 = np.percentile(dTA, [2.5, 97.5]) @@ -403,7 +403,7 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(dTA_pc95 - dTA_median) <= 16), msg='TA 95% Conf <= 16') # fCO2 from TA and DIC - cfCO2 = Csys(TA=ld.TA.values, DIC=ld.DIC.values, T=ld.Temp.values, S=ld.Sal.values) + cfCO2 = Csys(TA=ld.TA.values, DIC=ld.DIC.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dfCO2 = ld.fCO2 - cfCO2.fCO2 dfCO2_median = np.median(dfCO2) # dfCO2_pc95 = np.percentile(dfCO2, [2.5, 97.5]) @@ -414,7 +414,7 @@ class ReferenceDataTestCase(unittest.TestCase): # self.assertTrue(all(abs(dfCO2_pc95) <= 70), msg='fCO2 95% Conc <= 70') # DIC from TA and fCO2 - cDIC = Csys(TA=ld.TA.values, fCO2=ld.fCO2.values, T=ld.Temp.values, S=ld.Sal.values) + cDIC = Csys(TA=ld.TA.values, fCO2=ld.fCO2.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dDIC = ld.DIC - cDIC.DIC dDIC_median = np.median(dDIC) dDIC_pc95 = np.percentile(dDIC, [2.5, 97.5]) @@ -431,7 +431,7 @@ class ReferenceDataTestCase(unittest.TestCase): # Calculate using cbsys # TA from DIC and fCO2 - cTA = CBsys(DIC=ld.DIC.values, fCO2=ld.fCO2.values, T=ld.Temp.values, S=ld.Sal.values) + cTA = CBsys(DIC=ld.DIC.values, fCO2=ld.fCO2.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dTA = ld.TA - cTA.TA dTA_median = np.median(dTA) dTA_pc95 = np.percentile(dTA, [2.5, 97.5]) @@ -439,7 +439,7 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(dTA_pc95 - dTA_median) <= 16), msg='TA 95% Conf <= 16') # fCO2 from TA and DIC - cfCO2 = CBsys(TA=ld.TA.values, DIC=ld.DIC.values, T=ld.Temp.values, S=ld.Sal.values) + cfCO2 = CBsys(TA=ld.TA.values, DIC=ld.DIC.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dfCO2 = ld.fCO2 - cfCO2.fCO2 dfCO2_median = np.median(dfCO2) # dfCO2_pc95 = np.percentile(dfCO2, [2.5, 97.5]) @@ -450,7 +450,7 @@ class ReferenceDataTestCase(unittest.TestCase): # self.assertTrue(all(abs(dfCO2_pc95) <= 70), msg='fCO2 95% Conc <= 70') # DIC from TA and fCO2 - cDIC = CBsys(TA=ld.TA.values, fCO2=ld.fCO2.values, T=ld.Temp.values, S=ld.Sal.values) + cDIC = CBsys(TA=ld.TA.values, fCO2=ld.fCO2.values, T_in=ld.Temp.values, S_in=ld.Sal.values) dDIC = ld.DIC - cDIC.DIC dDIC_median = np.median(dDIC) dDIC_pc95 = np.percentile(dDIC, [2.5, 97.5]) @@ -477,8 +477,8 @@ class ReferenceDataTestCase(unittest.TestCase): gd = gd.loc[gd.cruise != 270] # calculate pH from TA and DIC - cpH = Csys(TA=gd.talk, DIC=gd.tco2, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cpH = Csys(TA=gd.talk, DIC=gd.tco2, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) pH_resid = gd.phtsinsitutp - cpH.pHtot pH_median = np.median(pH_resid) pH_pc95 = np.percentile(pH_resid, [2.5, 97.5]) @@ -487,8 +487,8 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(pH_pc95) <= 0.05), msg='pH 95% Conf <= 0.05') # calculate TA from pH and DIC - cTA = Csys(pHtot=gd.phtsinsitutp, DIC=gd.tco2, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cTA = Csys(pHtot=gd.phtsinsitutp, DIC=gd.tco2, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) TA_resid = gd.talk - cTA.TA TA_median = np.median(TA_resid) TA_pc95 = np.percentile(TA_resid, [2.5, 97.5]) @@ -497,8 +497,8 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(TA_pc95) < 13), msg='TA 95% Conf <= 15') # calculate DIC from TA and pH - cDIC = Csys(pHtot=gd.phtsinsitutp, TA=gd.talk, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cDIC = Csys(pHtot=gd.phtsinsitutp, TA=gd.talk, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) DIC_resid = gd.tco2 - cDIC.DIC DIC_median = np.median(DIC_resid) DIC_pc95 = np.percentile(DIC_resid, [2.5, 97.5]) @@ -526,8 +526,8 @@ class ReferenceDataTestCase(unittest.TestCase): gd = gd.loc[gd.cruise != 270] # calculate pH from TA and DIC - cpH = CBsys(TA=gd.talk, DIC=gd.tco2, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cpH = CBsys(TA=gd.talk, DIC=gd.tco2, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) pH_resid = gd.phtsinsitutp - cpH.pHtot pH_median = np.median(pH_resid) pH_pc95 = np.percentile(pH_resid, [2.5, 97.5]) @@ -536,8 +536,8 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(pH_pc95) <= 0.05), msg='pH 95% Conf <= 0.05') # calculate TA from pH and DIC - cTA = CBsys(pHtot=gd.phtsinsitutp, DIC=gd.tco2, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cTA = CBsys(pHtot=gd.phtsinsitutp, DIC=gd.tco2, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) TA_resid = gd.talk - cTA.TA TA_median = np.median(TA_resid) TA_pc95 = np.percentile(TA_resid, [2.5, 97.5]) @@ -546,8 +546,8 @@ class ReferenceDataTestCase(unittest.TestCase): self.assertTrue(all(abs(TA_pc95) < 13), msg='TA 95% Conf <= 15') # calculate DIC from TA and pH - cDIC = CBsys(pHtot=gd.phtsinsitutp, TA=gd.talk, T=gd.temperature, S=gd.salinity, - P=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) + cDIC = CBsys(pHtot=gd.phtsinsitutp, TA=gd.talk, T_in=gd.temperature, S_in=gd.salinity, + P_in=gd.pressure, TP=gd.phosphate, TSi=gd.silicate, BT=415.7) DIC_resid = gd.tco2 - cDIC.DIC DIC_median = np.median(DIC_resid) DIC_pc95 = np.percentile(DIC_resid, [2.5, 97.5])
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 8 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/oscarbranson/cbsyst.git@e85f6ebff4bd0e78ad229835af146ec7ff4d4aa1#egg=cbsyst exceptiongroup==1.2.2 iniconfig==2.1.0 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 scipy==1.13.1 six==1.17.0 tomli==2.2.1 tqdm==4.67.1 tzdata==2025.2 uncertainties==3.2.2
name: cbsyst channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - scipy==1.13.1 - six==1.17.0 - tomli==2.2.1 - tqdm==4.67.1 - tzdata==2025.2 - uncertainties==3.2.2 prefix: /opt/conda/envs/cbsyst
[ "test_cbsyst.py::ReferenceDataTestCase::test_Bockmon_Data_CBsys", "test_cbsyst.py::ReferenceDataTestCase::test_Bockmon_Data_Csys", "test_cbsyst.py::ReferenceDataTestCase::test_Lueker_Data_CBsys", "test_cbsyst.py::ReferenceDataTestCase::test_Lueker_Data_Csys" ]
[ "test_cbsyst.py::ReferenceDataTestCase::test_GLODAPv2_CBsys", "test_cbsyst.py::ReferenceDataTestCase::test_GLODAPv2_Csys" ]
[ "test_cbsyst.py::BoronFnTestCase::test_Boron_Fns", "test_cbsyst.py::CarbonFnTestCase::test_Carbon_Fns" ]
[]
MIT License
2,271
[ "cbsyst/helpers.py", "MANIFEST.in", "cbsyst/boron_fns.py", "cbsyst/carbon_fns.py", "setup.py", "CONTRIBUTING.md", "cbsyst/cbsyst.py", "cbsyst/MyAMI_V2.py", "README.md" ]
[ "cbsyst/helpers.py", "MANIFEST.in", "cbsyst/boron_fns.py", "cbsyst/carbon_fns.py", "setup.py", "CONTRIBUTING.md", "cbsyst/cbsyst.py", "cbsyst/MyAMI_V2.py", "README.md" ]
elastic__rally-427
e62d73bd14ae0caaabb1c726e147eefeafc6ebd2
2018-03-08 08:56:58
a5408e0d0d07b271b509df8057a7c73303604c10
diff --git a/docs/car.rst b/docs/car.rst index 5d7f21c2..855d38e8 100644 --- a/docs/car.rst +++ b/docs/car.rst @@ -94,6 +94,11 @@ These values are derived by Rally internally based on command line flags and you If you specify multiple configurations, e.g. ``--car="4gheap,ea"``, Rally will apply them in order. It will first read all variables in ``4gheap.ini``, then in ``ea.ini``. Afterwards, it will copy all configuration files from the corresponding config base of ``4gheap`` and *append* all configuration files from ``ea``. This also shows when to define a separate "car" and when to define a "mixin": If you need to amend configuration files, use a mixin, if you need to have a specific configuration, define a car. +Simple customizations +^^^^^^^^^^^^^^^^^^^^^ + +For simple customizations you can create the directory hierarchy as outlined above and use the ``--team-path`` command line parameter to refer to this configuration. For more complex use cases and distributed multi-node benchmarks, we recommend to use custom team repositories. + Custom Team Repositories ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/command_line_reference.rst b/docs/command_line_reference.rst index 7b33c618..16aa80e7 100644 --- a/docs/command_line_reference.rst +++ b/docs/command_line_reference.rst @@ -141,6 +141,14 @@ You can use ``--include-tasks`` to specify a comma-separated list of tasks that Selects the team repository that Rally should use to resolve cars. By default the ``default`` team repository is used, which is available in the Github project `rally-teams <https://github.com/elastic/rally-teams>`__. See the documentation about :doc:`cars </car>` on how to add your own team repositories. +``team-path`` +~~~~~~~~~~~~~ + +A directory that contains a team configuration. ``--team-path`` and ``--team-repository`` are mutually exclusive. See the :doc:`car reference </car>` for the required directory structure. + +Example:: + + esrally --team-path=~/Projects/es-teams ``car`` ~~~~~~~ diff --git a/esrally/mechanic/mechanic.py b/esrally/mechanic/mechanic.py index 1e40957d..b14ec1ca 100644 --- a/esrally/mechanic/mechanic.py +++ b/esrally/mechanic/mechanic.py @@ -589,9 +589,9 @@ def create(cfg, metrics_store, all_node_ips, cluster_settings=None, sources=Fals car = None plugins = [] else: - repo = team.team_repo(cfg) - car = team.load_car(repo, cfg.opts("mechanic", "car.names"), cfg.opts("mechanic", "car.params")) - plugins = team.load_plugins(repo, cfg.opts("mechanic", "car.plugins"), cfg.opts("mechanic", "plugin.params")) + team_path = team.team_path(cfg) + car = team.load_car(team_path, cfg.opts("mechanic", "car.names"), cfg.opts("mechanic", "car.params")) + plugins = team.load_plugins(team_path, cfg.opts("mechanic", "car.plugins"), cfg.opts("mechanic", "plugin.params")) if sources or distribution: s = supplier.create(cfg, sources, distribution, build, challenge_root_path, plugins) diff --git a/esrally/mechanic/team.py b/esrally/mechanic/team.py index e097194f..b56160f2 100644 --- a/esrally/mechanic/team.py +++ b/esrally/mechanic/team.py @@ -11,7 +11,7 @@ logger = logging.getLogger("rally.team") def list_cars(cfg): - loader = CarLoader(team_repo(cfg)) + loader = CarLoader(team_path(cfg)) cars = [] for name in loader.car_names(): cars.append(loader.load_car(name)) @@ -50,7 +50,7 @@ def load_car(repo, name, car_params={}): def list_plugins(cfg): - plugins = PluginLoader(team_repo(cfg)).plugins() + plugins = PluginLoader(team_path(cfg)).plugins() if plugins: console.println("Available Elasticsearch plugins:\n") console.println(tabulate.tabulate([[p.name, p.config] for p in plugins], headers=["Name", "Configuration"])) @@ -83,25 +83,27 @@ def load_plugins(repo, plugin_names, plugin_params={}): return plugins -def team_repo(cfg, update=True): - distribution_version = cfg.opts("mechanic", "distribution.version", mandatory=False) - repo_name = cfg.opts("mechanic", "repository.name") - offline = cfg.opts("system", "offline.mode") - remote_url = cfg.opts("teams", "%s.url" % repo_name, mandatory=False) - root = cfg.opts("node", "root.dir") - team_repositories = cfg.opts("mechanic", "team.repository.dir") - teams_dir = os.path.join(root, team_repositories) - - current_team_repo = repo.RallyRepository(remote_url, teams_dir, repo_name, "teams", offline) - if update: +def team_path(cfg): + root_path = cfg.opts("mechanic", "team.path", mandatory=False) + if root_path: + return root_path + else: + distribution_version = cfg.opts("mechanic", "distribution.version", mandatory=False) + repo_name = cfg.opts("mechanic", "repository.name") + offline = cfg.opts("system", "offline.mode") + remote_url = cfg.opts("teams", "%s.url" % repo_name, mandatory=False) + root = cfg.opts("node", "root.dir") + team_repositories = cfg.opts("mechanic", "team.repository.dir") + teams_dir = os.path.join(root, team_repositories) + + current_team_repo = repo.RallyRepository(remote_url, teams_dir, repo_name, "teams", offline) current_team_repo.update(distribution_version) - return current_team_repo + return current_team_repo.repo_dir class CarLoader: - def __init__(self, repo): - self.repo = repo - self.cars_dir = os.path.join(self.repo.repo_dir, "cars") + def __init__(self, team_root_path): + self.cars_dir = os.path.join(team_root_path, "cars") def car_names(self): def __car_name(path): @@ -199,9 +201,8 @@ class Car: class PluginLoader: - def __init__(self, repo): - self.repo = repo - self.plugins_root_path = os.path.join(self.repo.repo_dir, "plugins") + def __init__(self, team_root_path): + self.plugins_root_path = os.path.join(team_root_path, "plugins") def plugins(self): known_plugins = self._core_plugins() + self._configured_plugins() diff --git a/esrally/rally.py b/esrally/rally.py index 15b04b27..131e89a2 100644 --- a/esrally/rally.py +++ b/esrally/rally.py @@ -278,6 +278,9 @@ def create_arg_parser(): p.add_argument( "--challenge", help="define the challenge to use. List possible challenges for tracks with `%s list tracks`" % PROGRAM_NAME) + p.add_argument( + "--team-path", + help="define the path to the car and plugin configurations to use.") p.add_argument( "--car", help="define the car to use. List possible cars with `%s list cars` (default: defaults)." % PROGRAM_NAME, @@ -676,8 +679,12 @@ def main(): if args.distribution_version: cfg.add(config.Scope.applicationOverride, "mechanic", "distribution.version", args.distribution_version) cfg.add(config.Scope.applicationOverride, "mechanic", "distribution.repository", args.distribution_repository) - cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", args.team_repository) cfg.add(config.Scope.applicationOverride, "mechanic", "car.names", csv_to_list(args.car)) + if args.team_path: + cfg.add(config.Scope.applicationOverride, "mechanic", "team.path", os.path.abspath(io.normalize_path(args.team_path))) + cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", None) + else: + cfg.add(config.Scope.applicationOverride, "mechanic", "repository.name", args.team_repository) cfg.add(config.Scope.applicationOverride, "mechanic", "car.plugins", csv_to_list(args.elasticsearch_plugins)) cfg.add(config.Scope.applicationOverride, "mechanic", "car.params", kv_to_map(csv_to_list(args.car_params))) cfg.add(config.Scope.applicationOverride, "mechanic", "plugin.params", kv_to_map(csv_to_list(args.plugin_params)))
Allow simpler configuration of cluster configuration If we want to customize the benchmarked Elasticsearch cluster, we currently need to create a custom team repository which requires git. For simple use cases, we should allow users to just provide a path to a configuration directory (similarly to `--track-path` for tracks). We will therefore add a new command line parameter `--team-path` for that purpose.
elastic/rally
diff --git a/tests/mechanic/team_test.py b/tests/mechanic/team_test.py index ea717b0e..37f80635 100644 --- a/tests/mechanic/team_test.py +++ b/tests/mechanic/team_test.py @@ -7,41 +7,36 @@ from esrally.mechanic import team current_dir = os.path.dirname(os.path.abspath(__file__)) -class UnitTestRepo: - def __init__(self, repo_dir): - self.repo_dir = repo_dir - - class CarLoaderTests(TestCase): def __init__(self, args): super().__init__(args) - self.repo = None + self.team_dir = None self.loader = None def setUp(self): - self.repo = UnitTestRepo(os.path.join(current_dir, "data")) - self.loader = team.CarLoader(self.repo) + self.team_dir = os.path.join(current_dir, "data") + self.loader = team.CarLoader(self.team_dir) def test_lists_car_names(self): # contrary to the name this assertion compares contents but does not care about order. self.assertCountEqual(["default", "32gheap", "missing_config_base", "empty_config_base", "ea", "verbose"], self.loader.car_names()) def test_load_known_car(self): - car = team.load_car(self.repo, ["default"], car_params={"data_paths": ["/mnt/disk0", "/mnt/disk1"]}) + car = team.load_car(self.team_dir, ["default"], car_params={"data_paths": ["/mnt/disk0", "/mnt/disk1"]}) self.assertEqual("default", car.name) self.assertEqual([os.path.join(current_dir, "data", "cars", "vanilla")], car.config_paths) self.assertDictEqual({"heap_size": "1g", "data_paths": ["/mnt/disk0", "/mnt/disk1"]}, car.variables) self.assertEqual({}, car.env) def test_load_car_with_mixin_single_config_base(self): - car = team.load_car(self.repo, ["32gheap", "ea"]) + car = team.load_car(self.team_dir, ["32gheap", "ea"]) self.assertEqual("32gheap+ea", car.name) self.assertEqual([os.path.join(current_dir, "data", "cars", "vanilla")], car.config_paths) self.assertEqual({"heap_size": "32g", "assertions": "true"}, car.variables) self.assertEqual({"JAVA_TOOL_OPTS": "A B C D E F"}, car.env) def test_load_car_with_mixin_multiple_config_bases(self): - car = team.load_car(self.repo, ["32gheap", "ea", "verbose"]) + car = team.load_car(self.team_dir, ["32gheap", "ea", "verbose"]) self.assertEqual("32gheap+ea+verbose", car.name) self.assertEqual([ os.path.join(current_dir, "data", "cars", "vanilla"), @@ -52,17 +47,17 @@ class CarLoaderTests(TestCase): def test_raises_error_on_unknown_car(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["don_t-know-you"]) + team.load_car(self.team_dir, ["don_t-know-you"]) self.assertRegex(ctx.exception.args[0], r"Unknown car \[don_t-know-you\]. List the available cars with [^\s]+ list cars.") def test_raises_error_on_empty_config_base(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["empty_config_base"]) + team.load_car(self.team_dir, ["empty_config_base"]) self.assertEqual("At least one config base is required for car ['empty_config_base']", ctx.exception.args[0]) def test_raises_error_on_missing_config_base(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: - team.load_car(self.repo, ["missing_config_base"]) + team.load_car(self.team_dir, ["missing_config_base"]) self.assertEqual("At least one config base is required for car ['missing_config_base']", ctx.exception.args[0]) @@ -72,8 +67,7 @@ class PluginLoaderTests(TestCase): self.loader = None def setUp(self): - repo = UnitTestRepo(os.path.join(current_dir, "data")) - self.loader = team.PluginLoader(repo) + self.loader = team.PluginLoader(os.path.join(current_dir, "data")) def test_lists_plugins(self): self.assertCountEqual(
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 5 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-benchmark" ], "pre_install": [ "apt-get update", "apt-get install -y gcc python3-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 elasticsearch==6.0.0 -e git+https://github.com/elastic/rally.git@e62d73bd14ae0caaabb1c726e147eefeafc6ebd2#egg=esrally importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.9.5 jsonschema==2.5.1 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==5.4.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work py-cpuinfo==3.2.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-benchmark==3.4.1 tabulate==0.8.1 thespian==3.9.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.22 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: rally channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - elasticsearch==6.0.0 - jinja2==2.9.5 - jsonschema==2.5.1 - markupsafe==2.0.1 - psutil==5.4.0 - py-cpuinfo==3.2.0 - pytest-benchmark==3.4.1 - tabulate==0.8.1 - thespian==3.9.2 - urllib3==1.22 prefix: /opt/conda/envs/rally
[ "tests/mechanic/team_test.py::CarLoaderTests::test_lists_car_names", "tests/mechanic/team_test.py::CarLoaderTests::test_load_car_with_mixin_multiple_config_bases", "tests/mechanic/team_test.py::CarLoaderTests::test_load_car_with_mixin_single_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_load_known_car", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_empty_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_missing_config_base", "tests/mechanic/team_test.py::CarLoaderTests::test_raises_error_on_unknown_car", "tests/mechanic/team_test.py::PluginLoaderTests::test_cannot_load_community_plugin_with_missing_config", "tests/mechanic/team_test.py::PluginLoaderTests::test_cannot_load_plugin_with_missing_config", "tests/mechanic/team_test.py::PluginLoaderTests::test_lists_plugins", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_community_plugin_without_configuration", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_configured_plugin", "tests/mechanic/team_test.py::PluginLoaderTests::test_loads_core_plugin" ]
[]
[]
[]
Apache License 2.0
2,272
[ "esrally/mechanic/mechanic.py", "esrally/mechanic/team.py", "docs/command_line_reference.rst", "docs/car.rst", "esrally/rally.py" ]
[ "esrally/mechanic/mechanic.py", "esrally/mechanic/team.py", "docs/command_line_reference.rst", "docs/car.rst", "esrally/rally.py" ]
G-Node__python-odml-237
2830914c8b5b67290d891d89aa28df344c5cacc7
2018-03-08 09:54:23
eeff5922987b064681d1328f81af317d8171808f
coveralls: [![Coverage Status](https://coveralls.io/builds/15870919/badge)](https://coveralls.io/builds/15870919) Coverage increased (+0.3%) to 71.249% when pulling **a98af4a02c51348bf188bdb2c7124f27e94e6527 on jgrewe:property** into **2830914c8b5b67290d891d89aa28df344c5cacc7 on G-Node:master**. JuliaSprenger: In general I am happy with this implementation, there are just a few cases, where I think the behaviour should be different - providing a generator as value should list the generated items, not the string version of the generator (eg ```odml.Property(name='prop', value=range(10))``` should contain a list of numbers, not a string) - The automatic dtype conversion should not introduce a loss of information ```python prop1 = odml.Property(name='prop1', value=[3]) prop2 = odml.Property(name='prop2', value=[3.5]) prop1.merge(prop2) ``` should result in an Error rather than ```prop1.value == [3,3]``` Also I suggest to also add a section.extend() function similar to the property.extend() function, buts that's probably a different issue. JuliaSprenger: The ```strict``` option is a good idea, but I think it should be also available for section.merge and from there on be passed on to all merges of subsections and properties. jgrewe: yes, agreed, would you mind to add it to issue #246, which was opened by Michi for the section append/extend/merge point you made JuliaSprenger: Cool! I added a comment to the other issue. Then I think we can merge now.
diff --git a/.travis.yml b/.travis.yml index 64226cd..ba6c228 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,28 +25,44 @@ matrix: # env: # - OSXENV=2.7 -before_install: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then bash resources/install_osx_virtualenv.sh; fi - install: - export PYVER=${TRAVIS_PYTHON_VERSION:0:1} + - if [ $PYVER = 3 ]; then + export PYCMD=python3; + export PIPCMD=pip3; + else + export PYCMD=python; + export PIPCMD=pip; + fi; + - if [ $COVERALLS = 1 ]; then - pip install --upgrade coveralls; + $PIPCMD install --upgrade coveralls; fi; + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then - which python; - source venv/bin/activate; - which python; - fi - - pip install lxml enum34 pyyaml rdflib + if [[ "$OSXENV" == "2.7" ]]; then + brew install python@2; + virtualenv venv -p python; + source venv/bin/activate; + export PYCMD=python; + export PIPCMD=pip; + else + brew upgrade python; + source venv/bin/activate; + export PYCMD=python3; + export PIPCMD=pip3; + fi; + fi; + + - $PIPCMD install lxml enum34 pyyaml rdflib script: - - which python - - python setup.py build + - which $PYCMD + - $PYCMD setup.py build - if [ $COVERALLS = 1 ]; then coverage${PYVER} run --source=odml setup.py test && coverage${PYVER} report -m; else - python setup.py test; + $PYCMD setup.py test; fi; after_success: diff --git a/odml/property.py b/odml/property.py index b02bb1f..a81f0af 100644 --- a/odml/property.py +++ b/odml/property.py @@ -150,7 +150,35 @@ class BaseProperty(base.baseobject, Property): @property def value(self): - return self._value + """ + Returns the value(s) stored in this property. Method always returns a list that + is a copy (!) of the stored value. Changing this list will NOT change the property. + For manipulation of the stored values use the append, extend, and direct access methods + (using brackets). + + For example: + >> p = odml.Property("prop", value=[1, 2, 3]) + >> print(p.value) + [1, 2, 3] + >> p.value.append(4) + >> print(p.value) + [1, 2, 3] + + Individual values can be accessed and manipulated like this: + >>> print(p[0]) + [1] + >> p[0] = 4 + >> print(p[0]) + [4] + + The values can be iterated e.g. with a loop: + >> for v in p.value: + print(v) + 4 + 2 + 3 + """ + return list(self._value) def value_str(self, index=0): """ @@ -164,6 +192,8 @@ class BaseProperty(base.baseobject, Property): Method ensures that the passed value(s) can be cast to the same dtype, i.e. that associated with this property or the inferred dtype of the first entry of the values list. + + :param values an iterable that contains the values """ for v in values: try: @@ -172,18 +202,49 @@ class BaseProperty(base.baseobject, Property): return False return True + def _convert_value_input(self, new_value): + """ + This method ensures, that the passed new value is a list. + If new_value is a string, it will convert it to a list of + strings if the new_value contains embracing brackets. + + returns list of new_value + """ + if isinstance(new_value, str): + if new_value[0] == "[" and new_value[-1] == "]": + new_value = list(map(str.strip, new_value[1:-1].split(","))) + else: + new_value = [new_value] + elif isinstance(new_value, dict): + new_value = [str(new_value)] + elif hasattr(new_value, '__iter__') or hasattr(new_value, '__next__'): + new_value = list(new_value) + elif not isinstance(new_value, list): + new_value = [new_value] + else: + raise ValueError("odml.Property._convert_value_input: unsupported data type for values: %s" % type(new_value)) + return new_value + @value.setter def value(self, new_value): + """ + + Set the value of the property discarding any previous information. + Method will try to convert the passed value to the dtype of + the property and raise an ValueError, if not possible + + :param new_value a single value or list of values. + """ # Make sure boolean value 'False' gets through as well... - if new_value is None or new_value == "": + if new_value is None or (isinstance(new_value, (list, tuple, str)) and len(new_value) == 0): + self._value = [] return - if isinstance(new_value, str): - if new_value[0] == "[" and new_value[-1] == "]": - new_value = new_value[1:-1].split(",") - if not isinstance(new_value, list): - new_value = [new_value] + + new_value = self._convert_value_input(new_value) + if self._dtype is None: self._dtype = dtypes.infer_dtype(new_value[0]) + if not self._validate_values(new_value): raise ValueError("odml.Property.value: passed values are not of " "consistent type!") @@ -195,6 +256,8 @@ class BaseProperty(base.baseobject, Property): @value_origin.setter def value_origin(self, new_value): + if new_value == "": + new_value = None self._value_origin = new_value @property @@ -211,6 +274,8 @@ class BaseProperty(base.baseobject, Property): @unit.setter def unit(self, new_value): + if new_value == "": + new_value = None self._unit = new_value @property @@ -219,6 +284,8 @@ class BaseProperty(base.baseobject, Property): @reference.setter def reference(self, new_value): + if new_value == "": + new_value = None self._reference = new_value @property @@ -227,6 +294,8 @@ class BaseProperty(base.baseobject, Property): @definition.setter def definition(self, new_value): + if new_value == "": + new_value = None self._definition = new_value @property @@ -235,6 +304,8 @@ class BaseProperty(base.baseobject, Property): @dependency.setter def dependency(self, new_value): + if new_value == "": + new_value = None self._dependency = new_value @property @@ -243,6 +314,8 @@ class BaseProperty(base.baseobject, Property): @dependency_value.setter def dependency_value(self, new_value): + if new_value == "": + new_value = None self._dependency_value = new_value def remove(self, value): @@ -267,16 +340,64 @@ class BaseProperty(base.baseobject, Property): """ obj = super(BaseProperty, self).clone() obj._section = None - obj.value = self.value + obj.value = self._value return obj - def merge(self, property): - """ - Stub that doesn't do anything for this class - """ - pass + def merge(self, other, strict=True): + """Merges the property 'other' into self, if possible. Information + will be synchronized. Method will raise an ValueError when the + information in this property and the passed property are in + conflict. + + :param other a Property + :param strict Bool value to indicate whether types should be + implicitly converted even when information may be lost. Default is True, i.e. no conversion, and error will be raised if types do not match. - def unmerge(self, property): + """ + assert(isinstance(other, (BaseProperty))) + if strict and self.dtype != other.dtype: + raise ValueError("odml.Property.merge: src and dest dtypes do not match!") + + if self.unit is not None and other.unit is not None and self.unit != other.unit: + raise ValueError("odml.Property.merge: src and dest units (%s, %s) do not match!" + % (other.unit, self.unit)) + + if self.definition is not None and other.definition is not None: + self_def = ''.join(map(str.strip, self.definition.split())).lower() + other_def = ''.join(map(str.strip, other.definition.split())).lower() + if self_def != other_def: + raise ValueError("odml.Property.merge: src and dest definitions do not match!") + + if self.uncertainty is not None and other.uncertainty is not None: + raise ValueError("odml.Property.merge: src and dest uncertainty both set and do not match!") + + if self.reference is not None and other.reference is not None: + self_ref = ''.join(map(str.strip, self.reference.lower().split())) + other_ref = ''.join(map(str.strip, other.reference.lower().split())) + if self_ref != other_ref: + raise ValueError("odml.Property.merge: src and dest references are in conflict!") + + if self.value_origin is not None and other.value_origin is not None: + self_ori = ''.join(map(str.strip, self.value_origin.lower().split())) + other_ori = ''.join(map(str.strip, other.value_origin.lower().split())) + if self_ori != other_ori: + raise ValueError("odml.Property.merge: src and dest value_origin are in conflict!") + + if self.value_origin is None and other.value_origin is not None: + self.value_origin = other.value_origin + if self.uncertainty is None and other.uncertainty is not None: + self.uncertainty = other.uncertainty + if self.reference is None and other.reference is not None: + self.reference = other.reference + if self.definition is None and other.definition is not None: + self.definition = other.definition + if self.unit is None and other.unit is not None: + self.unit = other.unit + + to_add = [v for v in other.value if v not in self._value] + self.extend(to_add, strict=strict) + + def unmerge(self, other): """ Stub that doesn't do anything for this class """ @@ -308,3 +429,62 @@ class BaseProperty(base.baseobject, Property): def __getitem__(self, key): return self._value[key] + + def __setitem__(self, key, item): + if int(key) < 0 or int(key) > self.__len__(): + raise IndexError("odml.Property.__setitem__: key %i invalid for array of length %i" + % (int(key), self.__len__())) + try: + val = dtypes.get(item, self.dtype) + self._value[int(key)] = val + except Exception: + raise ValueError("odml.Property.__setitem__: passed value cannot be converted to data type \'%s\'!" % self._dtype) + + def extend(self, obj, strict=True): + """ + Extend the list of values stored in this property by the passed values. Method will + raise an ValueError, if values cannot be converted to the current dtype. One can also pass + another Property to append all values stored in that one. In this case units must match! + + :param obj single value, list of values or Property + :param strict a Bool that controls whether dtypes must match. Default is True. + """ + if isinstance(obj, BaseProperty): + if (obj.unit != self.unit): + raise ValueError("odml.Property.append: src and dest units (%s, %s) do not match!" + % (obj.unit, self.unit)) + self.extend(obj.value) + return + + if self.__len__() == 0: + self.value = obj + return + + new_value = self._convert_value_input(obj) + if len(new_value) > 0 and strict and dtypes.infer_dtype(new_value[0]) != self.dtype: + raise ValueError("odml.Property.extend: passed value data type does not match dtype!"); + + if not self._validate_values(new_value): + raise ValueError("odml.Property.append: passed value(s) cannot be converted to " + "data type \'%s\'!" % self._dtype) + self._value.extend([dtypes.get(v, self.dtype) for v in new_value]) + + def append(self, obj, strict=True): + """ + Append a single value to the list of stored values. Method will raise an ValueError if + the passed value cannot be converted to the current dtype. + + :param obj the additional value. + :param strict a Bool that controls whether dtypes must match. Default is True. + """ + new_value = self._convert_value_input(obj) + if len(new_value) > 1: + raise ValueError("odml.property.append: Use extend to add a list of values!") + if len(new_value) > 0 and strict and dtypes.infer_dtype(new_value[0]) != self.dtype: + raise ValueError("odml.Property.extend: passed value data type does not match dtype!"); + + if not self._validate_values(new_value): + raise ValueError("odml.Property.append: passed value(s) cannot be converted to " + "data type \'%s\'!" % self._dtype) + self._value.append(dtypes.get(new_value[0], self.dtype)) + diff --git a/odml/tools/dict_parser.py b/odml/tools/dict_parser.py index 47ea641..0d2cdf0 100644 --- a/odml/tools/dict_parser.py +++ b/odml/tools/dict_parser.py @@ -80,8 +80,9 @@ class DictWriter: if hasattr(prop, attr): tag = getattr(prop, attr) - - if (tag == []) or tag: # Even if 'value' is empty, allow '[]' + if isinstance(tag, tuple): + prop_dict[attr] = list(tag) + elif (tag == []) or tag: # Even if 'value' is empty, allow '[]' prop_dict[attr] = tag props_seq.append(prop_dict) @@ -187,7 +188,7 @@ class DictReader: prop_attrs[attr] = _property[attr] prop = odmlfmt.Property.create(**prop_attrs) - prop._value = values + prop.value = values odml_props.append(prop) return odml_props diff --git a/odml/tools/dumper.py b/odml/tools/dumper.py index aa01b7a..dfb34a7 100644 --- a/odml/tools/dumper.py +++ b/odml/tools/dumper.py @@ -10,7 +10,7 @@ def get_props(obj, props): if hasattr(obj, p): x = getattr(obj, p) if x is not None: - if isinstance(x, list): + if isinstance(x, list) or isinstance(x, tuple): out.append("%s=%s" % (p, to_csv(x))) else: out.append("%s=%s" % (p, repr(x))) diff --git a/resources/install_osx_virtualenv.sh b/resources/install_osx_virtualenv.sh index 6005fd4..c38ed9a 100644 --- a/resources/install_osx_virtualenv.sh +++ b/resources/install_osx_virtualenv.sh @@ -1,19 +1,26 @@ #!/bin/bash - +echo %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% echo Running install_osx_virtalenv.sh +python --version; +echo travis python version: $TRAVIS_PYTHON_VERSION +echo %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; if [[ "$OSXENV" == "2.7" ]]; then + echo python 2.7 brew install python; virtualenv venv -p python; source venv/bin/activate; else + echo some other version brew install python3; virtualenv venv -p python3; source venv/bin/activate; + export PYCMD=python3; + export PIPCMD=pip3; fi fi
Usage of python list as odml value circumvents dtype checks Since we removed the odml.Value class and simply replaced it with a python `list`, this `list` provides of course methods like `append` which in turn ignores all the nice `dtype` checks we would like to have in place to keep odML values consistent. As an example this currently works w/o any error: ``` p = odml.Property(name="example", dtype=odml.DType.int, value=[2, 4, 16, 23, 42]) p.value.append("lame ending") ```
G-Node/python-odml
diff --git a/test/test_property.py b/test/test_property.py index debe542..03f51f9 100644 --- a/test/test_property.py +++ b/test/test_property.py @@ -13,9 +13,84 @@ class TestProperty(unittest.TestCase): def test_value(self): p = Property("property", 100) self.assertEqual(p.value[0], 100) + self.assertEqual(type(p.value), list) - def test_bool_conversion(self): + p.append(10) + self.assertEqual(len(p), 2) + self.assertRaises(ValueError, p.append, [1,2,3]) + + p.extend([20, 30, '40']) + self.assertEqual(len(p), 5) + with self.assertRaises(ValueError): + p.append('invalid') + p.extend(('5', 6, 7)) + + p2 = Property("property 2", 3) + self.assertRaises(ValueError, p.append, p2) + p.extend(p2) + self.assertEqual(len(p), 6) + + p.value = None + self.assertEqual(len(p), 0) + + p.value = [1, 2, 3] + p.value = "" + self.assertEqual(len(p), 0) + + p.value = [1, 2, 3] + p.value = [] + self.assertEqual(len(p), 0) + + p.value = [1, 2, 3] + p.value = () + self.assertEqual(len(p), 0) + + p3 = Property("test", value=2, unit="Hz") + p4 = Property("test", value=5.5, unit="s") + + with self.assertRaises(ValueError): + p3.append(p4) + + p.value.append(5) + self.assertEqual(len(p.value), 0) + self.assertRaises(ValueError, p.append, 5.5) + + p.append(5.5, strict=False) + self.assertEqual(len(p), 1) + + self.assertRaises(ValueError, p.extend, [3.14, 6.28]) + p.extend([3.14, 6.28], strict=False) + self.assertEqual(len(p), 3) + + p5 = Property("test", value="a string") + p5.append("Freude") + self.assertEqual(len(p5), 2) + self.assertRaises(ValueError, p5.append, "[a, b, c]") + p5.extend("[a, b, c]") + self.assertEqual(len(p5), 5) + + p6 = Property("test", {"name": "Marie", "name":"Johanna"}) + self.assertEqual(len(p6), 1) + + def test_get_set_value(self): + values = [1, 2, 3, 4, 5] + p = Property("property", value=values) + + self.assertEqual(len(p), 5) + for s, d in zip(values, p.value): + self.assertEqual(s, d) + + count = 0 + for v in p: + count += 1 + self.assertEqual(count, len(values)) + p[0] = 10 + self.assertEqual(p[0], 10) + with self.assertRaises(ValueError): + p[1] = 'stringval' + + def test_bool_conversion(self): # Success tests p = Property(name='received', value=[1, 0, 1, 0, 1]) assert(p.dtype == 'int') @@ -47,7 +122,6 @@ class TestProperty(unittest.TestCase): assert(q.dtype == curr_type) def test_str_to_int_convert(self): - # Success Test p = Property(name='cats_onboard', value=['3', '0', '1', '0', '8']) assert(p.dtype == 'string') @@ -102,6 +176,8 @@ class TestProperty(unittest.TestCase): self.assertEqual(p.value_origin, None) p = Property("P", value_origin="V") self.assertEqual(p.value_origin, "V") + p.value_origin = "" + self.assertEqual(p.value_origin, None) def test_set_id(self): p = Property("P", id="79b613eb-a256-46bf-84f6-207df465b8f7") @@ -110,7 +186,58 @@ class TestProperty(unittest.TestCase): Property("P", id="id") self.assertNotEqual(p.id, "id") + def test_merge(self): + p_dst = Property("p1", value=[1, 2, 3], unit="Hz", definition="Freude\t schoener\nGoetterfunken\n", + reference="portal.g-node.org", uncertainty=0.0) + p_src = Property("p2", value=[2, 4, 6], unit="Hz", definition="FREUDE schoener GOETTERfunken") + + test_p = p_dst.clone() + test_p.merge(p_src) + self.assertEqual(len(test_p.value), 5) + + p_inv_unit = p_src.clone() + p_inv_unit.unit = 's' + + p_inv_def = p_src.clone() + p_inv_def.definition = "Freunde schoender Goetterfunken" + + p_inv_uncert = p_src.clone() + p_inv_uncert.uncertainty = 10.0 + + p_inv_ref = p_src.clone() + p_inv_ref.reference = "test" + + test_p = p_dst.clone() + self.assertRaises(ValueError, test_p.merge, p_inv_unit) + self.assertRaises(ValueError, test_p.merge, p_inv_def) + self.assertRaises(ValueError, test_p.merge, p_inv_uncert) + self.assertRaises(ValueError, test_p.merge, p_inv_ref) + + test_p.reference = None + test_p.merge(p_src) + self.assertEqual(test_p.reference, p_src.reference) + + test_p.unit = "" + test_p.merge(p_src) + self.assertEqual(test_p.unit, p_src.unit) + + test_p.uncertainty = None + test_p.merge(p_src) + self.assertEqual(test_p.uncertainty, p_src.uncertainty) + + test_p.definition = "" + test_p.merge(p_src) + self.assertEqual(test_p.definition, p_src.definition) + + double_p = Property("adouble", value=3.14) + int_p = Property("aint", value=3) + self.assertRaises(ValueError, double_p.merge, int_p) + + int_p.merge(double_p, strict=False) + self.assertEqual(len(int_p), 2) + if __name__ == "__main__": print("TestProperty") tp = TestProperty() tp.test_value() + tp.test_merge() diff --git a/test/test_rdf_writer.py b/test/test_rdf_writer.py index dca03c3..74fe195 100644 --- a/test/test_rdf_writer.py +++ b/test/test_rdf_writer.py @@ -100,7 +100,7 @@ class TestRDFWriter(unittest.TestCase): w.convert_to_rdf() self.assertEqual(len(list(w.g.subjects(predicate=RDF.li, object=Literal("val")))), 1) - doc.sections[0].properties[0].value.append("val2") + doc.sections[0].properties[0].append("val2") w = RDFWriter([doc]) w.convert_to_rdf() self.assertEqual(len(list(w.g.subject_objects(predicate=RDF.li))), 2)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 5 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "lxml pyyaml rdflib enum34", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libxml2-dev libxslt1-dev lib32z1-dev" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 iniconfig==2.1.0 isodate @ file:///home/conda/feedstock_root/build_artifacts/isodate_1733230734792/work lxml @ file:///croot/lxml_1737039601731/work -e git+https://github.com/G-Node/python-odml.git@2830914c8b5b67290d891d89aa28df344c5cacc7#egg=odML packaging==24.2 pluggy==1.5.0 pyparsing @ file:///croot/pyparsing_1731445506121/work pytest==8.3.5 PyYAML @ file:///croot/pyyaml_1728657952215/work rdflib @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rdflib_1743255530/work/dist tomli==2.2.1
name: python-odml channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - enum34=1.1.10=py39h06a4308_0 - icu=73.1=h6a678d5_0 - isodate=0.7.2=pyhd8ed1ab_1 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - libxml2=2.13.5=hfdd30dd_0 - libxslt=1.1.41=h097e994_0 - lxml=5.3.0=py39h57af460_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - pyparsing=3.2.0=py39h06a4308_0 - python=3.9.21=he870216_1 - pyyaml=6.0.2=py39h5eee18b_0 - rdflib=7.1.4=pyh29332c3_0 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/python-odml
[ "test/test_property.py::TestProperty::test_get_set_value", "test/test_property.py::TestProperty::test_merge", "test/test_property.py::TestProperty::test_value", "test/test_property.py::TestProperty::test_value_origin" ]
[ "test/test_rdf_writer.py::TestRDFWriter::test_adding_doc_to_the_hub", "test/test_rdf_writer.py::TestRDFWriter::test_adding_other_entities_properties", "test/test_rdf_writer.py::TestRDFWriter::test_adding_properties", "test/test_rdf_writer.py::TestRDFWriter::test_adding_repository", "test/test_rdf_writer.py::TestRDFWriter::test_adding_sections", "test/test_rdf_writer.py::TestRDFWriter::test_adding_values", "test/test_rdf_writer.py::TestRDFWriter::test_convert_to_rdf", "test/test_rdf_writer.py::TestRDFWriter::test_section_subclass" ]
[ "test/test_property.py::TestProperty::test_bool_conversion", "test/test_property.py::TestProperty::test_dtype", "test/test_property.py::TestProperty::test_name", "test/test_property.py::TestProperty::test_parent", "test/test_property.py::TestProperty::test_path", "test/test_property.py::TestProperty::test_set_id", "test/test_property.py::TestProperty::test_str_to_int_convert" ]
[]
BSD 4-Clause "Original" or "Old" License
2,273
[ "odml/tools/dict_parser.py", ".travis.yml", "resources/install_osx_virtualenv.sh", "odml/tools/dumper.py", "odml/property.py" ]
[ "odml/tools/dict_parser.py", ".travis.yml", "resources/install_osx_virtualenv.sh", "odml/tools/dumper.py", "odml/property.py" ]
dask__dask-3254
cc9f8db12e4c43b61fb79bb74bbd71b3cd8ba1f7
2018-03-08 11:31:00
48c4a589393ebc5b335cc5c7df291901401b0b15
mrocklin: Merging tomorrow if there are no further comments
diff --git a/dask/array/core.py b/dask/array/core.py index 6f132e1ad..a4990fe95 100644 --- a/dask/array/core.py +++ b/dask/array/core.py @@ -1754,9 +1754,9 @@ class Array(Base): return cumprod(self, axis, dtype, out=out) @derived_from(np.ndarray) - def squeeze(self): + def squeeze(self, axis=None): from .routines import squeeze - return squeeze(self) + return squeeze(self, axis) def rechunk(self, chunks, threshold=None, block_size_limit=None): """ See da.rechunk for docstring """ diff --git a/dask/array/routines.py b/dask/array/routines.py index 8f1ea653a..da414204b 100644 --- a/dask/array/routines.py +++ b/dask/array/routines.py @@ -5,7 +5,6 @@ import warnings from collections import Iterable from distutils.version import LooseVersion from functools import wraps, partial -from itertools import product from numbers import Integral from operator import getitem @@ -836,20 +835,23 @@ def ravel(array): @wraps(np.squeeze) def squeeze(a, axis=None): - if 1 not in a.shape: - return a if axis is None: axis = tuple(i for i, d in enumerate(a.shape) if d == 1) - b = a.map_blocks(partial(np.squeeze, axis=axis), dtype=a.dtype) - chunks = tuple(bd for bd in b.chunks if bd != (1,)) + elif not isinstance(axis, tuple): + axis = (axis,) + + if any(a.shape[i] != 1 for i in axis): + raise ValueError("cannot squeeze axis with size other than one") + + for i in axis: + if not (-a.ndim <= i < a.ndim): + raise ValueError("%i out of bounds for %i-D array" % (i, a.ndim)) - name = 'squeeze-' + tokenize(a, axis) - old_keys = list(product([b.name], *[range(len(bd)) for bd in b.chunks])) - new_keys = list(product([name], *[range(len(bd)) for bd in chunks])) + axis = tuple(i % a.ndim for i in axis) - dsk = {n: b.dask[o] for o, n in zip(old_keys, new_keys)} + sl = tuple(0 if i in axis else slice(None) for i, s in enumerate(a.shape)) - return Array(sharedict.merge(b.dask, (name, dsk)), name, chunks, dtype=a.dtype) + return a[sl] def topk(k, x): diff --git a/dask/bag/core.py b/dask/bag/core.py index 9ad1ff047..3b30bebf8 100644 --- a/dask/bag/core.py +++ b/dask/bag/core.py @@ -918,21 +918,65 @@ class Bag(Base): def join(self, other, on_self, on_other=None): """ Joins collection with another collection. - Other collection must be an Iterable, and not a Bag. + Other collection must be one of the following: + 1. An iterable. We recommend tuples over lists for internal + performance reasons. + 2. A delayed object, pointing to a tuple. This is recommended if the + other collection is sizable and you're using the distributed + scheduler. Dask is able to pass around data wrapped in delayed + objects with greater sophistication. + 3. A Bag with a single partition + + You might also consider Dask Dataframe, whose join operations are much + more heavily optimized. + + Parameters + ---------- + other: Iterable, Delayed, Bag + Other collection on which to join + on_self: callable + Function to call on elements in this collection to determine a + match + on_other: callable (defaults to on_self) + Function to call on elements in the other collection to determine a + match + + Examples + -------- >>> people = from_sequence(['Alice', 'Bob', 'Charlie']) >>> fruit = ['Apple', 'Apricot', 'Banana'] >>> list(people.join(fruit, lambda x: x[0])) # doctest: +SKIP [('Apple', 'Alice'), ('Apricot', 'Alice'), ('Banana', 'Bob')] """ - assert isinstance(other, Iterable) - assert not isinstance(other, Bag) + name = 'join-' + tokenize(self, other, on_self, on_other) + dsk = {} + if isinstance(other, Bag): + if other.npartitions == 1: + dsk.update(other.dask) + dsk['join-%s-other' % name] = (list, other._keys()[0]) + other = other._keys()[0] + else: + msg = ("Multi-bag joins are not implemented. " + "We recommend Dask dataframe if appropriate") + raise NotImplementedError(msg) + elif isinstance(other, Delayed): + dsk.update(other.dask) + other = other._key + elif isinstance(other, Iterable): + other = other + else: + msg = ("Joined argument must be single-partition Bag, " + " delayed object, or Iterable, got %s" % + type(other).__name) + raise TypeError(msg) + if on_other is None: on_other = on_self - name = 'join-' + tokenize(self, other, on_self, on_other) - dsk = dict(((name, i), (list, (join, on_other, other, - on_self, (self.name, i)))) - for i in range(self.npartitions)) + + dsk.update({(name, i): (list, (join, on_other, other, + on_self, (self.name, i))) + for i in range(self.npartitions)}) return type(self)(merge(self.dask, dsk), name, self.npartitions) def product(self, other): diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 12a4cda26..56632fa08 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -19,10 +19,10 @@ DataFrame - Add ``dd.tseries.Resampler.agg`` (:pr:`3202`) `Richard Postelnik`_ - Support operations that mix dataframes and arrays (:pr:`3230`) `Matthew Rocklin`_ - Bag +++ +- Support joining against single-partitioned bags and delayed objects (:pr:`3254`) `Matthew Rocklin`_ Core ++++ diff --git a/docs/source/index.rst b/docs/source/index.rst index 3edb01201..a045f7771 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -190,7 +190,7 @@ distributed memory clusters. scheduling.rst distributed.rst -**Diagnosing Perofmrance** +**Diagnosing Performance** Parallel code can be tricky to debug and profile. Dask provides a several tools to help make debugging and profiling graph execution easier.
dask.Bag slow and very large memory consumption with join In a certain application I am passing a (moderately) large argument to Bag.join. But I am encountering a few issues: - The graph does not appear in the diagnostics page (presumably the graph is being communicated) - The client consumes GB of memory I suspect this has to do with the fact that tasks are serialized individually and this large argument gets serialized multiple times. This is the code to reproduce the issue: ``` import dask, distributed from dask.distributed import Client from dask.sizeof import sizeof import dask.bag as db import time dask.__version__ # '0.17.1' distributed.__version__ # '1.21.0' c = Client() data = list(range(1000000)) sizeof(data) # 37 Mb start = time.time() bag = db.from_sequence(range(100000), npartitions=100).join(data, lambda x: x) print("Graph creation time", time.time() - start) # Graph creation time 1.0362498760223389 start = time.time() bag.count().compute() print("Graph computation time", time.time() - start) # This consumes GB of memory ```
dask/dask
diff --git a/dask/array/tests/test_routines.py b/dask/array/tests/test_routines.py index f37b9f817..f084adbd2 100644 --- a/dask/array/tests/test_routines.py +++ b/dask/array/tests/test_routines.py @@ -710,13 +710,35 @@ def test_ravel(): assert_eq(np.ravel(x), da.ravel(a)) -def test_squeeze(): - x = da.ones((10, 1), chunks=(3, 1)) [email protected]('is_func', [True, False]) [email protected]('axis', [None, 0, -1, (0, -1)]) +def test_squeeze(is_func, axis): + a = np.arange(10)[None, :, None, None] + d = da.from_array(a, chunks=(1, 3, 1, 1)) + + if is_func: + a_s = np.squeeze(a, axis=axis) + d_s = da.squeeze(d, axis=axis) + else: + a_s = a.squeeze(axis=axis) + d_s = d.squeeze(axis=axis) + + assert_eq(d_s, a_s) + assert same_keys(d_s, da.squeeze(d, axis=axis)) - assert_eq(x.squeeze(), x.compute().squeeze()) + if axis is None: + axis = tuple(range(a.ndim)) + else: + axis = axis if isinstance(axis, tuple) else (axis,) + axis = tuple(i % a.ndim for i in axis) + axis = tuple( + i for i, c in enumerate(d.chunks) if i in axis and len(c) == 1 + ) - assert x.squeeze().chunks == ((3, 3, 3, 1),) - assert same_keys(x.squeeze(), x.squeeze()) + exp_d_s_chunks = tuple( + c for i, c in enumerate(d.chunks) if i not in axis + ) + assert d_s.chunks == exp_d_s_chunks def test_vstack(): diff --git a/dask/bag/tests/test_bag.py b/dask/bag/tests/test_bag.py index 004139fba..cd9bf68d2 100644 --- a/dask/bag/tests/test_bag.py +++ b/dask/bag/tests/test_bag.py @@ -32,6 +32,15 @@ L = list(range(5)) * 3 b = Bag(dsk, 'x', 3) +def assert_eq(a, b): + if hasattr(a, 'compute'): + a = a.compute(get=dask.local.get_sync) + if hasattr(b, 'compute'): + b = b.compute(get=dask.local.get_sync) + + assert a == b + + def iseven(x): return x % 2 == 0 @@ -353,12 +362,18 @@ def test_var(): assert float(b.var()) == 2.0 -def test_join(): - c = b.join([1, 2, 3], on_self=isodd, on_other=iseven) - assert list(c) == list(join(iseven, [1, 2, 3], isodd, list(b))) - assert (list(b.join([1, 2, 3], isodd)) == - list(join(isodd, [1, 2, 3], isodd, list(b)))) - assert c.name == b.join([1, 2, 3], on_self=isodd, on_other=iseven).name [email protected]('transform', [ + identity, + dask.delayed, + lambda x: db.from_sequence(x, npartitions=1) +]) +def test_join(transform): + other = transform([1, 2, 3]) + c = b.join(other, on_self=isodd, on_other=iseven) + assert_eq(c, list(join(iseven, [1, 2, 3], isodd, list(b)))) + assert_eq(b.join(other, isodd), + list(join(isodd, [1, 2, 3], isodd, list(b)))) + assert c.name == b.join(other, on_self=isodd, on_other=iseven).name def test_foldby():
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 5 }
1.21
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 click==8.0.4 cloudpickle==2.2.1 -e git+https://github.com/dask/dask.git@cc9f8db12e4c43b61fb79bb74bbd71b3cd8ba1f7#egg=dask distributed==1.21.8 HeapDict==1.0.1 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work locket==1.0.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work msgpack==1.0.5 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 partd==1.2.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 sortedcontainers==2.4.0 tblib==1.7.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work toolz==0.12.0 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zict==2.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - click==8.0.4 - cloudpickle==2.2.1 - distributed==1.21.8 - heapdict==1.0.1 - locket==1.0.0 - msgpack==1.0.5 - numpy==1.19.5 - pandas==1.1.5 - partd==1.2.0 - psutil==7.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - sortedcontainers==2.4.0 - tblib==1.7.0 - toolz==0.12.0 - tornado==6.1 - zict==2.1.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_routines.py::test_squeeze[None-False]", "dask/array/tests/test_routines.py::test_squeeze[0-True]", "dask/array/tests/test_routines.py::test_squeeze[0-False]", "dask/array/tests/test_routines.py::test_squeeze[-1-True]", "dask/array/tests/test_routines.py::test_squeeze[-1-False]", "dask/array/tests/test_routines.py::test_squeeze[axis3-True]", "dask/array/tests/test_routines.py::test_squeeze[axis3-False]", "dask/bag/tests/test_bag.py::test_join[delayed]", "dask/bag/tests/test_bag.py::test_join[<lambda>]" ]
[]
[ "dask/array/tests/test_routines.py::test_array", "dask/array/tests/test_routines.py::test_atleast_nd_no_args[atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_no_args[atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_no_args[atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape0-chunks0-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape0-chunks0-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape0-chunks0-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape1-chunks1-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape1-chunks1-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape1-chunks1-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape2-chunks2-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape2-chunks2-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape2-chunks2-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape3-chunks3-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape3-chunks3-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape3-chunks3-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape4-chunks4-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape4-chunks4-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_one_arg[shape4-chunks4-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape10-shape20-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape10-shape20-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape10-shape20-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape11-shape21-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape11-shape21-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape11-shape21-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape12-shape22-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape12-shape22-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape12-shape22-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape13-shape23-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape13-shape23-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape13-shape23-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape14-shape24-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape14-shape24-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape14-shape24-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape15-shape25-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape15-shape25-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape15-shape25-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape16-shape26-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape16-shape26-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape16-shape26-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape17-shape27-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape17-shape27-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape17-shape27-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape18-shape28-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape18-shape28-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape18-shape28-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape19-shape29-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape19-shape29-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape19-shape29-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape110-shape210-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape110-shape210-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape110-shape210-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape111-shape211-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape111-shape211-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape111-shape211-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape112-shape212-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape112-shape212-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape112-shape212-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape113-shape213-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape113-shape213-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape113-shape213-atleast_3d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape114-shape214-atleast_1d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape114-shape214-atleast_2d]", "dask/array/tests/test_routines.py::test_atleast_nd_two_args[shape114-shape214-atleast_3d]", "dask/array/tests/test_routines.py::test_transpose", "dask/array/tests/test_routines.py::test_transpose_negative_axes", "dask/array/tests/test_routines.py::test_swapaxes", "dask/array/tests/test_routines.py::test_flip[shape0-flipud-kwargs0]", "dask/array/tests/test_routines.py::test_flip[shape0-fliplr-kwargs1]", "dask/array/tests/test_routines.py::test_flip[shape0-flip-kwargs2]", "dask/array/tests/test_routines.py::test_flip[shape0-flip-kwargs3]", "dask/array/tests/test_routines.py::test_flip[shape0-flip-kwargs4]", "dask/array/tests/test_routines.py::test_flip[shape0-flip-kwargs5]", "dask/array/tests/test_routines.py::test_flip[shape1-flipud-kwargs0]", "dask/array/tests/test_routines.py::test_flip[shape1-fliplr-kwargs1]", "dask/array/tests/test_routines.py::test_flip[shape1-flip-kwargs2]", "dask/array/tests/test_routines.py::test_flip[shape1-flip-kwargs3]", "dask/array/tests/test_routines.py::test_flip[shape1-flip-kwargs4]", "dask/array/tests/test_routines.py::test_flip[shape1-flip-kwargs5]", "dask/array/tests/test_routines.py::test_flip[shape2-flipud-kwargs0]", "dask/array/tests/test_routines.py::test_flip[shape2-fliplr-kwargs1]", "dask/array/tests/test_routines.py::test_flip[shape2-flip-kwargs2]", "dask/array/tests/test_routines.py::test_flip[shape2-flip-kwargs3]", "dask/array/tests/test_routines.py::test_flip[shape2-flip-kwargs4]", "dask/array/tests/test_routines.py::test_flip[shape2-flip-kwargs5]", "dask/array/tests/test_routines.py::test_flip[shape3-flipud-kwargs0]", "dask/array/tests/test_routines.py::test_flip[shape3-fliplr-kwargs1]", "dask/array/tests/test_routines.py::test_flip[shape3-flip-kwargs2]", "dask/array/tests/test_routines.py::test_flip[shape3-flip-kwargs3]", "dask/array/tests/test_routines.py::test_flip[shape3-flip-kwargs4]", "dask/array/tests/test_routines.py::test_flip[shape3-flip-kwargs5]", "dask/array/tests/test_routines.py::test_flip[shape4-flipud-kwargs0]", "dask/array/tests/test_routines.py::test_flip[shape4-fliplr-kwargs1]", "dask/array/tests/test_routines.py::test_flip[shape4-flip-kwargs2]", "dask/array/tests/test_routines.py::test_flip[shape4-flip-kwargs3]", "dask/array/tests/test_routines.py::test_flip[shape4-flip-kwargs4]", "dask/array/tests/test_routines.py::test_flip[shape4-flip-kwargs5]", "dask/array/tests/test_routines.py::test_matmul[x_shape0-y_shape0]", "dask/array/tests/test_routines.py::test_matmul[x_shape1-y_shape1]", "dask/array/tests/test_routines.py::test_matmul[x_shape2-y_shape2]", "dask/array/tests/test_routines.py::test_matmul[x_shape3-y_shape3]", "dask/array/tests/test_routines.py::test_matmul[x_shape4-y_shape4]", "dask/array/tests/test_routines.py::test_matmul[x_shape5-y_shape5]", "dask/array/tests/test_routines.py::test_matmul[x_shape6-y_shape6]", "dask/array/tests/test_routines.py::test_matmul[x_shape7-y_shape7]", "dask/array/tests/test_routines.py::test_matmul[x_shape8-y_shape8]", "dask/array/tests/test_routines.py::test_matmul[x_shape9-y_shape9]", "dask/array/tests/test_routines.py::test_matmul[x_shape10-y_shape10]", "dask/array/tests/test_routines.py::test_matmul[x_shape11-y_shape11]", "dask/array/tests/test_routines.py::test_matmul[x_shape12-y_shape12]", "dask/array/tests/test_routines.py::test_matmul[x_shape13-y_shape13]", "dask/array/tests/test_routines.py::test_matmul[x_shape14-y_shape14]", "dask/array/tests/test_routines.py::test_matmul[x_shape15-y_shape15]", "dask/array/tests/test_routines.py::test_matmul[x_shape16-y_shape16]", "dask/array/tests/test_routines.py::test_matmul[x_shape17-y_shape17]", "dask/array/tests/test_routines.py::test_matmul[x_shape18-y_shape18]", "dask/array/tests/test_routines.py::test_matmul[x_shape19-y_shape19]", "dask/array/tests/test_routines.py::test_matmul[x_shape20-y_shape20]", "dask/array/tests/test_routines.py::test_matmul[x_shape21-y_shape21]", "dask/array/tests/test_routines.py::test_matmul[x_shape22-y_shape22]", "dask/array/tests/test_routines.py::test_matmul[x_shape23-y_shape23]", "dask/array/tests/test_routines.py::test_matmul[x_shape24-y_shape24]", "dask/array/tests/test_routines.py::test_tensordot", "dask/array/tests/test_routines.py::test_tensordot_2[0]", "dask/array/tests/test_routines.py::test_tensordot_2[1]", "dask/array/tests/test_routines.py::test_tensordot_2[axes2]", "dask/array/tests/test_routines.py::test_tensordot_2[axes3]", "dask/array/tests/test_routines.py::test_tensordot_2[axes4]", "dask/array/tests/test_routines.py::test_tensordot_2[axes5]", "dask/array/tests/test_routines.py::test_tensordot_2[axes6]", "dask/array/tests/test_routines.py::test_dot_method", "dask/array/tests/test_routines.py::test_vdot[shape0-chunks0]", "dask/array/tests/test_routines.py::test_vdot[shape1-chunks1]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape0-0-ndim-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape0-0-sum-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape0-0-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape0-0-range2-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape1-1-ndim-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape1-1-sum-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape1-1-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape1-1-range2-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape2-2-ndim-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape2-2-sum-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape2-2-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape2-2-range2-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape3--1-ndim-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape3--1-sum-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape3--1-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_along_axis[shape3--1-range2-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape0-axes0-sum0-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape0-axes0-sum1-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape0-axes0-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape1-0-sum0-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape1-0-sum1-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape1-0-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape2-axes2-sum0-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape2-axes2-sum1-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape2-axes2-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape3-axes3-sum0-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape3-axes3-sum1-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape3-axes3-range-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape4-axes4-sum0-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape4-axes4-sum1-<lambda>]", "dask/array/tests/test_routines.py::test_apply_over_axes[shape4-axes4-range-<lambda>]", "dask/array/tests/test_routines.py::test_ptp[shape0-None]", "dask/array/tests/test_routines.py::test_ptp[shape1-0]", "dask/array/tests/test_routines.py::test_ptp[shape2-1]", "dask/array/tests/test_routines.py::test_ptp[shape3-2]", "dask/array/tests/test_routines.py::test_ptp[shape4--1]", "dask/array/tests/test_routines.py::test_diff[0-shape0-0]", "dask/array/tests/test_routines.py::test_diff[0-shape1-1]", "dask/array/tests/test_routines.py::test_diff[0-shape2-2]", "dask/array/tests/test_routines.py::test_diff[0-shape3--1]", "dask/array/tests/test_routines.py::test_diff[1-shape0-0]", "dask/array/tests/test_routines.py::test_diff[1-shape1-1]", "dask/array/tests/test_routines.py::test_diff[1-shape2-2]", "dask/array/tests/test_routines.py::test_diff[1-shape3--1]", "dask/array/tests/test_routines.py::test_diff[2-shape0-0]", "dask/array/tests/test_routines.py::test_diff[2-shape1-1]", "dask/array/tests/test_routines.py::test_diff[2-shape2-2]", "dask/array/tests/test_routines.py::test_diff[2-shape3--1]", "dask/array/tests/test_routines.py::test_ediff1d[None-None-shape0]", "dask/array/tests/test_routines.py::test_ediff1d[None-None-shape1]", "dask/array/tests/test_routines.py::test_ediff1d[0-0-shape0]", "dask/array/tests/test_routines.py::test_ediff1d[0-0-shape1]", "dask/array/tests/test_routines.py::test_ediff1d[to_end2-to_begin2-shape0]", "dask/array/tests/test_routines.py::test_ediff1d[to_end2-to_begin2-shape1]", "dask/array/tests/test_routines.py::test_topk", "dask/array/tests/test_routines.py::test_topk_k_bigger_than_chunk", "dask/array/tests/test_routines.py::test_bincount", "dask/array/tests/test_routines.py::test_bincount_with_weights", "dask/array/tests/test_routines.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg", "dask/array/tests/test_routines.py::test_digitize", "dask/array/tests/test_routines.py::test_histogram", "dask/array/tests/test_routines.py::test_histogram_alternative_bins_range", "dask/array/tests/test_routines.py::test_histogram_return_type", "dask/array/tests/test_routines.py::test_histogram_extra_args_and_shapes", "dask/array/tests/test_routines.py::test_cov", "dask/array/tests/test_routines.py::test_corrcoef", "dask/array/tests/test_routines.py::test_round", "dask/array/tests/test_routines.py::test_unique_kwargs[False-False-False]", "dask/array/tests/test_routines.py::test_unique_kwargs[False-False-True]", "dask/array/tests/test_routines.py::test_unique_kwargs[False-True-False]", "dask/array/tests/test_routines.py::test_unique_kwargs[False-True-True]", "dask/array/tests/test_routines.py::test_unique_kwargs[True-False-False]", "dask/array/tests/test_routines.py::test_unique_kwargs[True-False-True]", "dask/array/tests/test_routines.py::test_unique_kwargs[True-True-False]", "dask/array/tests/test_routines.py::test_unique_kwargs[True-True-True]", "dask/array/tests/test_routines.py::test_unique_rand[shape0-chunks0-0-10-23]", "dask/array/tests/test_routines.py::test_unique_rand[shape0-chunks0-0-10-796]", "dask/array/tests/test_routines.py::test_unique_rand[shape1-chunks1-0-10-23]", "dask/array/tests/test_routines.py::test_unique_rand[shape1-chunks1-0-10-796]", "dask/array/tests/test_routines.py::test_unique_rand[shape2-chunks2-0-10-23]", "dask/array/tests/test_routines.py::test_unique_rand[shape2-chunks2-0-10-796]", "dask/array/tests/test_routines.py::test_unique_rand[shape3-chunks3-0-10-23]", "dask/array/tests/test_routines.py::test_unique_rand[shape3-chunks3-0-10-796]", "dask/array/tests/test_routines.py::test_roll[None-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[None-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[None-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[None-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[None-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[None-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[None-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[None-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[None-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[None-shift4-chunks1]", "dask/array/tests/test_routines.py::test_roll[0-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[0-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[0-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[0-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[0-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[0-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[0-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[0-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[0-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[0-shift4-chunks1]", "dask/array/tests/test_routines.py::test_roll[1-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[1-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[1-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[1-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[1-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[1-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[1-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[1-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[1-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[1-shift4-chunks1]", "dask/array/tests/test_routines.py::test_roll[-1-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[-1-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[-1-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[-1-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[-1-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[-1-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[-1-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[-1-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[-1-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[-1-shift4-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis4-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis4-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis4-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis4-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis4-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis4-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis4-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis4-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis4-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis4-shift4-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis5-3-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis5-3-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis5-7-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis5-7-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis5-9-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis5-9-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis5-shift3-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis5-shift3-chunks1]", "dask/array/tests/test_routines.py::test_roll[axis5-shift4-chunks0]", "dask/array/tests/test_routines.py::test_roll[axis5-shift4-chunks1]", "dask/array/tests/test_routines.py::test_ravel", "dask/array/tests/test_routines.py::test_squeeze[None-True]", "dask/array/tests/test_routines.py::test_vstack", "dask/array/tests/test_routines.py::test_hstack", "dask/array/tests/test_routines.py::test_dstack", "dask/array/tests/test_routines.py::test_take", "dask/array/tests/test_routines.py::test_take_dask_from_numpy", "dask/array/tests/test_routines.py::test_compress", "dask/array/tests/test_routines.py::test_extract", "dask/array/tests/test_routines.py::test_isnull", "dask/array/tests/test_routines.py::test_isclose", "dask/array/tests/test_routines.py::test_allclose", "dask/array/tests/test_routines.py::test_choose", "dask/array/tests/test_routines.py::test_argwhere", "dask/array/tests/test_routines.py::test_argwhere_obj", "dask/array/tests/test_routines.py::test_argwhere_str", "dask/array/tests/test_routines.py::test_where", "dask/array/tests/test_routines.py::test_where_scalar_dtype", "dask/array/tests/test_routines.py::test_where_bool_optimization", "dask/array/tests/test_routines.py::test_where_nonzero", "dask/array/tests/test_routines.py::test_where_incorrect_args", "dask/array/tests/test_routines.py::test_count_nonzero", "dask/array/tests/test_routines.py::test_count_nonzero_axis[None]", "dask/array/tests/test_routines.py::test_count_nonzero_axis[0]", "dask/array/tests/test_routines.py::test_count_nonzero_axis[axis2]", "dask/array/tests/test_routines.py::test_count_nonzero_axis[axis3]", "dask/array/tests/test_routines.py::test_count_nonzero_obj", "dask/array/tests/test_routines.py::test_count_nonzero_obj_axis[None]", "dask/array/tests/test_routines.py::test_count_nonzero_obj_axis[0]", "dask/array/tests/test_routines.py::test_count_nonzero_obj_axis[axis2]", "dask/array/tests/test_routines.py::test_count_nonzero_obj_axis[axis3]", "dask/array/tests/test_routines.py::test_count_nonzero_str", "dask/array/tests/test_routines.py::test_flatnonzero", "dask/array/tests/test_routines.py::test_nonzero", "dask/array/tests/test_routines.py::test_nonzero_method", "dask/array/tests/test_routines.py::test_coarsen", "dask/array/tests/test_routines.py::test_coarsen_with_excess", "dask/array/tests/test_routines.py::test_insert", "dask/array/tests/test_routines.py::test_multi_insert", "dask/array/tests/test_routines.py::test_result_type", "dask/bag/tests/test_bag.py::test_Bag", "dask/bag/tests/test_bag.py::test_keys", "dask/bag/tests/test_bag.py::test_bag_map", "dask/bag/tests/test_bag.py::test_map_method", "dask/bag/tests/test_bag.py::test_starmap", "dask/bag/tests/test_bag.py::test_filter", "dask/bag/tests/test_bag.py::test_remove", "dask/bag/tests/test_bag.py::test_iter", "dask/bag/tests/test_bag.py::test_repr[str]", "dask/bag/tests/test_bag.py::test_repr[repr]", "dask/bag/tests/test_bag.py::test_pluck", "dask/bag/tests/test_bag.py::test_pluck_with_default", "dask/bag/tests/test_bag.py::test_unzip", "dask/bag/tests/test_bag.py::test_fold", "dask/bag/tests/test_bag.py::test_distinct", "dask/bag/tests/test_bag.py::test_frequencies", "dask/bag/tests/test_bag.py::test_topk", "dask/bag/tests/test_bag.py::test_topk_with_non_callable_key[1]", "dask/bag/tests/test_bag.py::test_topk_with_non_callable_key[2]", "dask/bag/tests/test_bag.py::test_topk_with_multiarg_lambda", "dask/bag/tests/test_bag.py::test_lambdas", "dask/bag/tests/test_bag.py::test_reductions", "dask/bag/tests/test_bag.py::test_reduction_names", "dask/bag/tests/test_bag.py::test_tree_reductions", "dask/bag/tests/test_bag.py::test_aggregation[1]", "dask/bag/tests/test_bag.py::test_aggregation[3]", "dask/bag/tests/test_bag.py::test_aggregation[4]", "dask/bag/tests/test_bag.py::test_non_splittable_reductions[1]", "dask/bag/tests/test_bag.py::test_non_splittable_reductions[10]", "dask/bag/tests/test_bag.py::test_std", "dask/bag/tests/test_bag.py::test_var", "dask/bag/tests/test_bag.py::test_join[identity]", "dask/bag/tests/test_bag.py::test_foldby", "dask/bag/tests/test_bag.py::test_foldby_tree_reduction", "dask/bag/tests/test_bag.py::test_map_partitions", "dask/bag/tests/test_bag.py::test_map_partitions_args_kwargs", "dask/bag/tests/test_bag.py::test_random_sample_size", "dask/bag/tests/test_bag.py::test_random_sample_prob_range", "dask/bag/tests/test_bag.py::test_random_sample_repeated_computation", "dask/bag/tests/test_bag.py::test_random_sample_different_definitions", "dask/bag/tests/test_bag.py::test_random_sample_random_state", "dask/bag/tests/test_bag.py::test_lazify_task", "dask/bag/tests/test_bag.py::test_lazify", "dask/bag/tests/test_bag.py::test_inline_singleton_lists", "dask/bag/tests/test_bag.py::test_take", "dask/bag/tests/test_bag.py::test_take_npartitions", "dask/bag/tests/test_bag.py::test_take_npartitions_warn", "dask/bag/tests/test_bag.py::test_map_is_lazy", "dask/bag/tests/test_bag.py::test_can_use_dict_to_make_concrete", "dask/bag/tests/test_bag.py::test_read_text", "dask/bag/tests/test_bag.py::test_read_text_large", "dask/bag/tests/test_bag.py::test_read_text_encoding", "dask/bag/tests/test_bag.py::test_read_text_large_gzip", "dask/bag/tests/test_bag.py::test_from_sequence", "dask/bag/tests/test_bag.py::test_from_long_sequence", "dask/bag/tests/test_bag.py::test_product", "dask/bag/tests/test_bag.py::test_partition_collect", "dask/bag/tests/test_bag.py::test_groupby", "dask/bag/tests/test_bag.py::test_groupby_with_indexer", "dask/bag/tests/test_bag.py::test_groupby_with_npartitions_changed", "dask/bag/tests/test_bag.py::test_concat", "dask/bag/tests/test_bag.py::test_flatten", "dask/bag/tests/test_bag.py::test_concat_after_map", "dask/bag/tests/test_bag.py::test_args", "dask/bag/tests/test_bag.py::test_to_dataframe", "dask/bag/tests/test_bag.py::test_to_textfiles[gz-GzipFile]", "dask/bag/tests/test_bag.py::test_to_textfiles[-open]", "dask/bag/tests/test_bag.py::test_to_textfiles[bz2-BZ2File]", "dask/bag/tests/test_bag.py::test_to_textfiles_name_function_preserves_order", "dask/bag/tests/test_bag.py::test_to_textfiles_name_function_warn", "dask/bag/tests/test_bag.py::test_to_textfiles_encoding", "dask/bag/tests/test_bag.py::test_to_textfiles_inputs", "dask/bag/tests/test_bag.py::test_to_textfiles_endlines", "dask/bag/tests/test_bag.py::test_string_namespace", "dask/bag/tests/test_bag.py::test_string_namespace_with_unicode", "dask/bag/tests/test_bag.py::test_str_empty_split", "dask/bag/tests/test_bag.py::test_map_with_iterator_function", "dask/bag/tests/test_bag.py::test_ensure_compute_output_is_concrete", "dask/bag/tests/test_bag.py::test_bag_class_extend", "dask/bag/tests/test_bag.py::test_gh715", "dask/bag/tests/test_bag.py::test_bag_compute_forward_kwargs", "dask/bag/tests/test_bag.py::test_to_delayed", "dask/bag/tests/test_bag.py::test_to_delayed_optimize_graph", "dask/bag/tests/test_bag.py::test_from_delayed", "dask/bag/tests/test_bag.py::test_from_delayed_iterator", "dask/bag/tests/test_bag.py::test_range", "dask/bag/tests/test_bag.py::test_zip[1]", "dask/bag/tests/test_bag.py::test_zip[7]", "dask/bag/tests/test_bag.py::test_zip[10]", "dask/bag/tests/test_bag.py::test_zip[28]", "dask/bag/tests/test_bag.py::test_repartition[1-1]", "dask/bag/tests/test_bag.py::test_repartition[1-2]", "dask/bag/tests/test_bag.py::test_repartition[1-7]", "dask/bag/tests/test_bag.py::test_repartition[1-11]", "dask/bag/tests/test_bag.py::test_repartition[1-23]", "dask/bag/tests/test_bag.py::test_repartition[2-1]", "dask/bag/tests/test_bag.py::test_repartition[2-2]", "dask/bag/tests/test_bag.py::test_repartition[2-7]", "dask/bag/tests/test_bag.py::test_repartition[2-11]", "dask/bag/tests/test_bag.py::test_repartition[2-23]", "dask/bag/tests/test_bag.py::test_repartition[5-1]", "dask/bag/tests/test_bag.py::test_repartition[5-2]", "dask/bag/tests/test_bag.py::test_repartition[5-7]", "dask/bag/tests/test_bag.py::test_repartition[5-11]", "dask/bag/tests/test_bag.py::test_repartition[5-23]", "dask/bag/tests/test_bag.py::test_repartition[12-1]", "dask/bag/tests/test_bag.py::test_repartition[12-2]", "dask/bag/tests/test_bag.py::test_repartition[12-7]", "dask/bag/tests/test_bag.py::test_repartition[12-11]", "dask/bag/tests/test_bag.py::test_repartition[12-23]", "dask/bag/tests/test_bag.py::test_repartition[23-1]", "dask/bag/tests/test_bag.py::test_repartition[23-2]", "dask/bag/tests/test_bag.py::test_repartition[23-7]", "dask/bag/tests/test_bag.py::test_repartition[23-11]", "dask/bag/tests/test_bag.py::test_repartition[23-23]", "dask/bag/tests/test_bag.py::test_repartition_names", "dask/bag/tests/test_bag.py::test_accumulate", "dask/bag/tests/test_bag.py::test_groupby_tasks", "dask/bag/tests/test_bag.py::test_groupby_tasks_names", "dask/bag/tests/test_bag.py::test_groupby_tasks_2[1000-20-100]", "dask/bag/tests/test_bag.py::test_groupby_tasks_2[12345-234-1042]", "dask/bag/tests/test_bag.py::test_groupby_tasks_3", "dask/bag/tests/test_bag.py::test_to_textfiles_empty_partitions", "dask/bag/tests/test_bag.py::test_reduction_empty", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[1]", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[2]", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[4]", "dask/bag/tests/test_bag.py::test_reduction_with_non_comparable_objects", "dask/bag/tests/test_bag.py::test_empty", "dask/bag/tests/test_bag.py::test_bag_picklable", "dask/bag/tests/test_bag.py::test_msgpack_unicode", "dask/bag/tests/test_bag.py::test_bag_with_single_callable", "dask/bag/tests/test_bag.py::test_optimize_fuse_keys", "dask/bag/tests/test_bag.py::test_reductions_are_lazy", "dask/bag/tests/test_bag.py::test_repeated_groupby", "dask/bag/tests/test_bag.py::test_temporary_directory", "dask/bag/tests/test_bag.py::test_empty_bag", "dask/bag/tests/test_bag.py::test_bag_paths" ]
[]
BSD 3-Clause "New" or "Revised" License
2,274
[ "dask/bag/core.py", "dask/array/routines.py", "docs/source/index.rst", "dask/array/core.py", "docs/source/changelog.rst" ]
[ "dask/bag/core.py", "dask/array/routines.py", "docs/source/index.rst", "dask/array/core.py", "docs/source/changelog.rst" ]
elastic__rally-430
188495b1fdcd8f9e36625ca2b10f1c9659750290
2018-03-08 13:23:20
a5408e0d0d07b271b509df8057a7c73303604c10
diff --git a/esrally/config.py b/esrally/config.py index a78ebf77..b8e0d79d 100644 --- a/esrally/config.py +++ b/esrally/config.py @@ -447,6 +447,7 @@ class ConfigFactory: # the Elasticsearch directory is just the last path component (relative to the source root directory) config["source"]["elasticsearch.src.subdir"] = io.basename(source_dir) + if gradle_bin: config["build"] = {} config["build"]["gradle.bin"] = gradle_bin diff --git a/esrally/track/loader.py b/esrally/track/loader.py index 513b1e4f..d04de55e 100644 --- a/esrally/track/loader.py +++ b/esrally/track/loader.py @@ -757,9 +757,10 @@ class TrackSpecificationReader: Creates a track instances based on its parsed JSON description. """ - def __init__(self, override_auto_manage_indices=None, source=io.FileSource): + def __init__(self, override_auto_manage_indices=None, track_params=None, source=io.FileSource): self.name = None self.override_auto_manage_indices = override_auto_manage_indices + self.track_params = track_params if track_params else {} self.source = source self.index_op_type_warning_issued = False @@ -770,7 +771,7 @@ class TrackSpecificationReader: meta_data = self._r(track_specification, "meta", mandatory=False) indices = [self._create_index(idx, mapping_dir) for idx in self._r(track_specification, "indices", mandatory=False, default_value=[])] - templates = [self._create_template(tpl, mapping_dir) + templates = [self._create_index_template(tpl, mapping_dir) for tpl in self._r(track_specification, "templates", mandatory=False, default_value=[])] corpora = self._create_corpora(self._r(track_specification, "corpora", mandatory=False, default_value=[]), indices) # TODO: Remove this in Rally 0.10.0 @@ -816,7 +817,7 @@ class TrackSpecificationReader: body_file = self._r(index_spec, "body", mandatory=False) if body_file: with self.source(os.path.join(mapping_dir, body_file), "rt") as f: - body = json.load(f) + body = self._load_template(f.read(), index_name) else: body = None @@ -832,15 +833,25 @@ class TrackSpecificationReader: return track.Index(name=index_name, body=body, auto_managed=auto_managed, types=types) - def _create_template(self, tpl_spec, mapping_dir): + def _create_index_template(self, tpl_spec, mapping_dir): name = self._r(tpl_spec, "name") index_pattern = self._r(tpl_spec, "index-pattern") delete_matching_indices = self._r(tpl_spec, "delete-matching-indices", mandatory=False, default_value=True) template_file = os.path.join(mapping_dir, self._r(tpl_spec, "template")) with self.source(template_file, "rt") as f: - template_content = json.load(f) + template_content = self._load_template(f.read(), name) return track.IndexTemplate(name, index_pattern, template_content, delete_matching_indices) + def _load_template(self, contents, description): + try: + rendered = render_template(loader=jinja2.DictLoader({"default": contents}), + template_name="default", + template_vars=self.track_params) + return json.loads(rendered) + except (json.JSONDecodeError, jinja2.exceptions.TemplateError) as e: + logger.exception("Could not load file template for %s." % description) + raise TrackSyntaxError("Could not load file template for '%s'" % description, str(e)) + def _create_corpora(self, corpora_specs, indices): document_corpora = [] known_corpora_names = set()
Allow to use track parameters also in index / template definitions Currently index definitions are read as is. However, it can be useful to allow track parameters also for index definition files. For consistency, we should allow the same for index templates. Technically, this means that we will treat index definition files also as Jinja templates.
elastic/rally
diff --git a/tests/track/loader_test.py b/tests/track/loader_test.py index 4f9e524e..ea6b4996 100644 --- a/tests/track/loader_test.py +++ b/tests/track/loader_test.py @@ -1426,8 +1426,20 @@ class TrackSpecificationReaderTests(TestCase): } ] } - reader = loader.TrackSpecificationReader(source=io.DictStringFileSourceFactory({ - "/mappings/body.json": ['{"mappings": {"main": "empty-for-test", "secondary": "empty-for-test"}}'] + reader = loader.TrackSpecificationReader( + track_params={"number_of_shards": 3}, + source=io.DictStringFileSourceFactory({ + "/mappings/body.json": [""" + { + "settings": { + "number_of_shards": {{ number_of_shards }} + }, + "mappings": { + "main": "empty-for-test", + "secondary": "empty-for-test" + } + } + """] })) resulting_track = reader("unittest", track_specification, "/mappings") self.assertEqual("unittest", resulting_track.name) @@ -1435,7 +1447,16 @@ class TrackSpecificationReaderTests(TestCase): # indices self.assertEqual(1, len(resulting_track.indices)) self.assertEqual("index-historical", resulting_track.indices[0].name) - self.assertDictEqual({"mappings": {"main": "empty-for-test", "secondary": "empty-for-test"}}, resulting_track.indices[0].body) + self.assertDictEqual({ + "settings": { + "number_of_shards": 3 + }, + "mappings": + { + "main": "empty-for-test", + "secondary": "empty-for-test" + } + }, resulting_track.indices[0].body) self.assertEqual(2, len(resulting_track.indices[0].types)) self.assertEqual("main", resulting_track.indices[0].types[0].name) self.assertEqual("secondary", resulting_track.indices[0].types[1].name) @@ -1492,8 +1513,17 @@ class TrackSpecificationReaderTests(TestCase): "operations": [], "challenges": [] } - reader = loader.TrackSpecificationReader(source=io.DictStringFileSourceFactory({ - "/mappings/default-template.json": ['{"some-index-template": "empty-for-test"}'], + reader = loader.TrackSpecificationReader( + track_params={"index_pattern": "*"}, + source=io.DictStringFileSourceFactory({ + "/mappings/default-template.json": [""" + { + "index_patterns": [ "{{index_pattern}}"], + "settings": { + "number_of_shards": {{ number_of_shards | default(1) }} + } + } + """], })) resulting_track = reader("unittest", track_specification, "/mappings") self.assertEqual("unittest", resulting_track.name) @@ -1502,7 +1532,13 @@ class TrackSpecificationReaderTests(TestCase): self.assertEqual(1, len(resulting_track.templates)) self.assertEqual("my-index-template", resulting_track.templates[0].name) self.assertEqual("*", resulting_track.templates[0].pattern) - self.assertEqual({"some-index-template": "empty-for-test"}, resulting_track.templates[0].content) + self.assertDictEqual( + { + "index_patterns": ["*"], + "settings": { + "number_of_shards": 1 + } + }, resulting_track.templates[0].content) self.assertEqual(0, len(resulting_track.challenges)) def test_types_are_optional_for_user_managed_indices(self):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-benchmark" ], "pre_install": [ "apt-get update", "apt-get install -y gcc python3-pip python3-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 elasticsearch==6.0.0 -e git+https://github.com/elastic/rally.git@188495b1fdcd8f9e36625ca2b10f1c9659750290#egg=esrally importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.9.5 jsonschema==2.5.1 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==5.4.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work py-cpuinfo==3.2.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-benchmark==3.4.1 tabulate==0.8.1 thespian==3.9.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.22 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: rally channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - elasticsearch==6.0.0 - jinja2==2.9.5 - jsonschema==2.5.1 - markupsafe==2.0.1 - psutil==5.4.0 - py-cpuinfo==3.2.0 - pytest-benchmark==3.4.1 - tabulate==0.8.1 - thespian==3.9.2 - urllib3==1.22 prefix: /opt/conda/envs/rally
[ "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_valid_track_specification", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_valid_track_specification_with_index_template" ]
[]
[ "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_directory", "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_directory_without_track", "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_file", "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_file_but_not_json", "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_named_pipe", "tests/track/loader_test.py::SimpleTrackRepositoryTests::test_track_from_non_existing_path", "tests/track/loader_test.py::GitRepositoryTests::test_track_from_existing_repo", "tests/track/loader_test.py::TrackPreparationTests::test_decompresses_if_archive_available", "tests/track/loader_test.py::TrackPreparationTests::test_does_nothing_if_document_file_available", "tests/track/loader_test.py::TrackPreparationTests::test_download_document_archive_if_no_file_available", "tests/track/loader_test.py::TrackPreparationTests::test_download_document_file_if_no_file_available", "tests/track/loader_test.py::TrackPreparationTests::test_prepare_bundled_document_set_decompresses_compressed_docs", "tests/track/loader_test.py::TrackPreparationTests::test_prepare_bundled_document_set_does_nothing_if_no_document_files", "tests/track/loader_test.py::TrackPreparationTests::test_prepare_bundled_document_set_error_compressed_docs_wrong_size", "tests/track/loader_test.py::TrackPreparationTests::test_prepare_bundled_document_set_if_document_file_available", "tests/track/loader_test.py::TrackPreparationTests::test_prepare_bundled_document_set_uncompressed_docs_wrong_size", "tests/track/loader_test.py::TrackPreparationTests::test_raise_download_error_if_no_url_provided_and_file_missing", "tests/track/loader_test.py::TrackPreparationTests::test_raise_download_error_if_no_url_provided_and_wrong_file_size", "tests/track/loader_test.py::TrackPreparationTests::test_raise_download_error_if_offline", "tests/track/loader_test.py::TrackPreparationTests::test_raise_download_error_no_test_mode_file", "tests/track/loader_test.py::TrackPreparationTests::test_raise_download_error_on_connection_problems", "tests/track/loader_test.py::TrackPreparationTests::test_raise_error_if_compressed_does_not_contain_expected_document_file", "tests/track/loader_test.py::TrackPreparationTests::test_raise_error_on_wrong_uncompressed_file_size", "tests/track/loader_test.py::TemplateRenderTests::test_render_simple_template", "tests/track/loader_test.py::TemplateRenderTests::test_render_template_with_external_variables", "tests/track/loader_test.py::TemplateRenderTests::test_render_template_with_globbing", "tests/track/loader_test.py::TemplateRenderTests::test_render_template_with_variables", "tests/track/loader_test.py::TrackPostProcessingTests::test_creates_index_auto_management_operations", "tests/track/loader_test.py::TrackPostProcessingTests::test_post_processes_track_spec", "tests/track/loader_test.py::TrackPathTests::test_sets_absolute_path", "tests/track/loader_test.py::TrackFilterTests::test_create_filters_from_empty_included_tasks", "tests/track/loader_test.py::TrackFilterTests::test_create_filters_from_mixed_included_tasks", "tests/track/loader_test.py::TrackFilterTests::test_filters_tasks", "tests/track/loader_test.py::TrackFilterTests::test_rejects_invalid_syntax", "tests/track/loader_test.py::TrackFilterTests::test_rejects_unknown_filter_type", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_at_least_one_default_challenge", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_can_read_track_info", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_description_is_optional", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_document_count_mandatory_if_file_present", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_exactly_one_default_challenge", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_inline_operations", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_not_more_than_one_default_challenge_possible", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parallel_tasks_with_completed_by_set", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parallel_tasks_with_completed_by_set_multiple_tasks_match", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parallel_tasks_with_completed_by_set_no_task_matches", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parallel_tasks_with_default_clients_does_not_propagate", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parallel_tasks_with_default_values", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_challenge_and_challenges_are_defined", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_duplicate_explicit_task_names", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_duplicate_implicit_task_names", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_missing_challenge_or_challenges", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_unique_task_names", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_with_mixed_warmup_iterations_and_measurement", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_parse_with_mixed_warmup_time_period_and_iterations", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_selects_sole_challenge_implicitly_as_default", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_supports_target_interval", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_supports_target_throughput", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_types_are_optional_for_user_managed_indices", "tests/track/loader_test.py::TrackSpecificationReaderTests::test_unique_challenge_names" ]
[]
Apache License 2.0
2,275
[ "esrally/config.py", "esrally/track/loader.py" ]
[ "esrally/config.py", "esrally/track/loader.py" ]
pypa__setuptools_scm-220
632b02b8ad1f10c79d36c03502069d74c9686c73
2018-03-08 19:18:48
0373c11d2c8968a857ff06c94f101abebf825507
avirshup: Here's my stab at a fix for #219. Please let me know if this seems reasonable and/or if there are any changes that should be made.
diff --git a/setuptools_scm/hg.py b/setuptools_scm/hg.py index 9322fb3..0ba1774 100644 --- a/setuptools_scm/hg.py +++ b/setuptools_scm/hg.py @@ -8,8 +8,14 @@ FILES_COMMAND = 'hg locate -I .' def _hg_tagdist_normalize_tagcommit(root, tag, dist, node): dirty = node.endswith('+') node = 'h' + node.strip('+') - revset = ("(branch(.) and tag({tag!r})::. and file('re:^(?!\.hgtags).*$')" - " - tag({tag!r}))").format(tag=tag) + + # Detect changes since the specified tag + revset = ("(branch(.)" # look for revisions in this branch only + " and tag({tag!r})::." # after the last tag + # ignore commits that only modify .hgtags and nothing else: + " and (merge() or file('re:^(?!\.hgtags).*$'))" + " and not tag({tag!r}))" # ignore the tagged commit itself + ).format(tag=tag) if tag != '0.0': commits = do(['hg', 'log', '-r', revset, '--template', '{node|short}'], root)
Regression: hg repos with setuptools_scm>=1.15 are missing dev version bumps `setuptools_scm` 1.15.* is reporting incorrect version numbers for my hg repositories. Specifically, a merge commit that follows a tag commit does not seem to trigger the version bump like I would expect. In the screenshot below, for instance, I would expect the tip to be version `1.1.dev[N]+[sha]`. However, versions 1.15.* report the tip's version as `1.0`. To reproduce: ```bash mkdir hgmergetest && cd hgmergetest && hg init # create initial commit and tag it touch a && hg add a && hg commit -m "initial commit" hg tag 1.0 # create a branch hg branch branch1 touch b && hg add b && hg commit -m "create branch1" # merge the branch into default hg update default && hg merge branch1 && hg commit -m "merge branch1 into default" ``` Expected behavior (setuptools_scm 1.13 and 1.14): ```bash > pip install "setuptools_scm<1.15" > python -m setuptools_scm Guessed Version 1.1.dev3+n8dce1535e70a ``` Behavior with `1.15.*`: ```bash > pip install setuptools_scm==1.15.7 > python -m setuptools_scm Guessed Version 1.0 ``` ![image](https://user-images.githubusercontent.com/9388007/37140764-b80a0c36-2267-11e8-87f8-bd52d199140c.png)
pypa/setuptools_scm
diff --git a/testing/test_mercurial.py b/testing/test_mercurial.py index 1fe6841..1d91444 100644 --- a/testing/test_mercurial.py +++ b/testing/test_mercurial.py @@ -108,3 +108,47 @@ def test_version_in_merge(wd): def test_parse_no_worktree(tmpdir): ret = parse(str(tmpdir)) assert ret is None + + [email protected] +def version_1_0(wd): + wd('hg branch default') + wd.commit_testfile() + wd('hg tag 1.0 -u test -d "0 0"') + return wd + + [email protected] +def pre_merge_commit_after_tag(wd, version_1_0): + wd('hg branch testbranch') + wd.write('branchfile', 'branchtext') + wd(wd.add_command) + wd.commit() + wd('hg update default') + wd('hg merge testbranch') + return wd + + [email protected]("pre_merge_commit_after_tag") +def test_version_bump_before_merge_commit(wd): + assert wd.version.startswith('1.1.dev1+') + + [email protected](219) [email protected]("pre_merge_commit_after_tag") +def test_version_bump_from_merge_commit(wd): + wd.commit() + assert wd.version.startswith('1.1.dev3+') # issue 219 + + [email protected]("version_1_0") +def test_version_bump_from_commit_including_hgtag_mods(wd): + """ Test the case where a commit includes changes to .hgtags and other files + """ + with wd.cwd.join('.hgtags').open('a') as tagfile: + tagfile.write('0 0\n') + wd.write('branchfile', 'branchtext') + wd(wd.add_command) + assert wd.version.startswith('1.1.dev1+') # bump from dirty version + wd.commit() # commits both the testfile _and_ .hgtags + assert wd.version.startswith('1.1.dev2+')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_media" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.15
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work -e git+https://github.com/pypa/setuptools_scm.git@632b02b8ad1f10c79d36c03502069d74c9686c73#egg=setuptools_scm tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
name: setuptools_scm channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/setuptools_scm
[ "testing/test_mercurial.py::test_version_bump_from_merge_commit" ]
[ "testing/test_mercurial.py::test_archival_to_version[1.2.2.dev0-data4]" ]
[ "testing/test_mercurial.py::test_archival_to_version[0.0-data0]", "testing/test_mercurial.py::test_archival_to_version[1.0-data1]", "testing/test_mercurial.py::test_archival_to_version[1.1.dev3+h000000000000-data2]", "testing/test_mercurial.py::test_archival_to_version[1.2.2-data3]", "testing/test_mercurial.py::test_find_files_stop_at_root_hg", "testing/test_mercurial.py::test_version_from_hg_id", "testing/test_mercurial.py::test_version_from_archival", "testing/test_mercurial.py::test_version_in_merge", "testing/test_mercurial.py::test_parse_no_worktree", "testing/test_mercurial.py::test_version_bump_before_merge_commit", "testing/test_mercurial.py::test_version_bump_from_commit_including_hgtag_mods" ]
[]
MIT License
2,276
[ "setuptools_scm/hg.py" ]
[ "setuptools_scm/hg.py" ]
tableau__server-client-python-274
86e463810be80c2b562845f7c14b775d604f2a86
2018-03-09 02:04:54
59bf8920730e9877675c31885d538748e7e36bfe
shinchris: :rocket:
diff --git a/samples/download_view_image.py b/samples/download_view_image.py index 2da2320..b95a862 100644 --- a/samples/download_view_image.py +++ b/samples/download_view_image.py @@ -43,7 +43,7 @@ def main(): tableau_auth = TSC.TableauAuth(args.username, password, site_id=site_id) server = TSC.Server(args.server) # The new endpoint was introduced in Version 2.5 - server.version = 2.5 + server.version = "2.5" with server.auth.sign_in(tableau_auth): # Step 2: Query for the view that we want an image of diff --git a/tableauserverclient/server/endpoint/endpoint.py b/tableauserverclient/server/endpoint/endpoint.py index deaa94a..e78b2e0 100644 --- a/tableauserverclient/server/endpoint/endpoint.py +++ b/tableauserverclient/server/endpoint/endpoint.py @@ -27,6 +27,17 @@ class Endpoint(object): return headers + @staticmethod + def _safe_to_log(server_response): + '''Checks if the server_response content is not xml (eg binary image or zip) + and and replaces it with a constant + ''' + ALLOWED_CONTENT_TYPES = ('application/xml',) + if server_response.headers.get('Content-Type', None) not in ALLOWED_CONTENT_TYPES: + return '[Truncated File Contents]' + else: + return server_response.content + def _make_request(self, method, url, content=None, request_object=None, auth_token=None, content_type=None, parameters=None): if request_object is not None: @@ -50,7 +61,7 @@ class Endpoint(object): return server_response def _check_status(self, server_response): - logger.debug(server_response.content) + logger.debug(self._safe_to_log(server_response)) if server_response.status_code not in Success_codes: raise ServerResponseError.from_response(server_response.content, self.parent_srv.namespace)
This log line is overly chatty https://github.com/tableau/server-client-python/blob/608aa7694d0560ea3c8c37b10127b11207e56e8d/tableauserverclient/server/endpoint/endpoint.py#L53 When using server client python to download workbooks or data sources and you've got log_level=Debug, this log line ends up blowing up your logs. It outputs the hexadecimal representation of the entire file you're downloading, which is not very helpful and explodes your log size. Can we remove this line, or only log out the response contents when you're not using the endpoint to download a file?
tableau/server-client-python
diff --git a/test/test_regression_tests.py b/test/test_regression_tests.py index 95bdcea..8958c3c 100644 --- a/test/test_regression_tests.py +++ b/test/test_regression_tests.py @@ -1,8 +1,23 @@ import unittest import tableauserverclient.server.request_factory as factory +from tableauserverclient.server.endpoint import Endpoint class BugFix257(unittest.TestCase): def test_empty_request_works(self): result = factory.EmptyRequest().empty_req() self.assertEqual(b'<tsRequest />', result) + + +class BugFix273(unittest.TestCase): + def test_binary_log_truncated(self): + + class FakeResponse(object): + + headers = {'Content-Type': 'application/octet-stream'} + content = b'\x1337' * 1000 + status_code = 200 + + server_response = FakeResponse() + + self.assertEqual(Endpoint._safe_to_log(server_response), '[Truncated File Contents]')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2021.5.30 charset-normalizer==2.0.12 idna==3.10 requests==2.27.1 -e git+https://github.com/tableau/server-client-python.git@86e463810be80c2b562845f7c14b775d604f2a86#egg=tableauserverclient urllib3==1.26.20
name: server-client-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - idna==3.10 - requests==2.27.1 - urllib3==1.26.20 prefix: /opt/conda/envs/server-client-python
[ "test/test_regression_tests.py::BugFix273::test_binary_log_truncated" ]
[]
[ "test/test_regression_tests.py::BugFix257::test_empty_request_works" ]
[]
MIT License
2,277
[ "samples/download_view_image.py", "tableauserverclient/server/endpoint/endpoint.py" ]
[ "samples/download_view_image.py", "tableauserverclient/server/endpoint/endpoint.py" ]
elastic__rally-433
fabefdd6464d996e830806bd5725afe49af12ae4
2018-03-09 07:22:26
a5408e0d0d07b271b509df8057a7c73303604c10
diff --git a/docs/metrics.rst b/docs/metrics.rst index 99f804b3..9f952e04 100644 --- a/docs/metrics.rst +++ b/docs/metrics.rst @@ -11,6 +11,10 @@ Here is a typical metrics record:: { "environment": "nightly", + "trial-timestamp": "20160421T042749Z", + "trial-id": "6ebc6e53-ee20-4b0c-99b4-09697987e9f4", + "@timestamp": 1461213093093, + "relative-time": 10507328, "track": "geonames", "track-params": { "shard-count": 3 @@ -18,9 +22,6 @@ Here is a typical metrics record:: "challenge": "append-no-conflicts", "car": "defaults", "sample-type": "normal", - "trial-timestamp": "20160421T042749Z", - "@timestamp": 1461213093093, - "relative-time": 10507328, "name": "throughput", "value": 27385, "unit": "docs/s", @@ -38,7 +39,7 @@ Here is a typical metrics record:: "node_name": "rally-node0", "source_revision": "a6c0a81", "distribution_version": "5.0.0-SNAPSHOT", - "tag_reference": "Github ticket 1234", + "tag_reference": "Github ticket 1234" } } @@ -66,7 +67,12 @@ Rally runs warmup trials but records all samples. Normally, we are just interest trial-timestamp ~~~~~~~~~~~~~~~ -A constant timestamp (always in UTC) that is determined when Rally is invoked. It is intended to group all samples of a benchmark trial. +A constant timestamp (always in UTC) that is determined when Rally is invoked. + +trial-id +~~~~~~~~ + +A UUID that changes on every invocation of Rally. It is intended to group all samples of a benchmark trial. @timestamp ~~~~~~~~~~ diff --git a/esrally/driver/driver.py b/esrally/driver/driver.py index 9b9defbb..cfb50622 100644 --- a/esrally/driver/driver.py +++ b/esrally/driver/driver.py @@ -326,9 +326,12 @@ class Driver: self.challenge = select_challenge(self.config, self.track) self.quiet = self.config.opts("system", "quiet.mode", mandatory=False, default_value=False) self.throughput_calculator = ThroughputCalculator() - # create - but do not yet open - the metrics store as an internal timer starts when we open it. - cls = metrics.metrics_store_class(self.config) - self.metrics_store = cls(cfg=self.config, meta_info=metrics_meta_info, lap=lap) + self.metrics_store = metrics.metrics_store(cfg=self.config, + track=self.track.name, + challenge=self.challenge.name, + meta_info=metrics_meta_info, + lap=lap, + read_only=False) for host in self.config.opts("driver", "load_driver_hosts"): if host != "localhost": self.load_driver_hosts.append(net.resolve(host)) @@ -339,13 +342,9 @@ class Driver: self.target.on_prepare_track(preps, self.config, self.track) def after_track_prepared(self): - track_name = self.track.name - challenge_name = self.challenge.name - car_name = self.config.opts("mechanic", "car.names") - - logger.info("Benchmark for track [%s], challenge [%s] and car %s is about to start." % (track_name, challenge_name, car_name)) - invocation = self.config.opts("system", "time.start") - self.metrics_store.open(invocation, track_name, challenge_name, car_name) + logger.info("Benchmark is about to start.") + # ensure relative time starts when the benchmark starts. + self.reset_relative_time() allocator = Allocator(self.challenge.schedule) self.allocations = allocator.allocations diff --git a/esrally/metrics.py b/esrally/metrics.py index 4c95cf66..c3d23e5a 100644 --- a/esrally/metrics.py +++ b/esrally/metrics.py @@ -190,7 +190,7 @@ class MetaInfoScope(Enum): """ -def metrics_store(cfg, read_only=True, invocation=None, track=None, challenge=None, car=None): +def metrics_store(cfg, read_only=True, track=None, challenge=None, car=None, meta_info=None, lap=None): """ Creates a proper metrics store based on the current configuration. @@ -199,13 +199,14 @@ def metrics_store(cfg, read_only=True, invocation=None, track=None, challenge=No :return: A metrics store implementation. """ cls = metrics_store_class(cfg) - store = cls(cfg) + store = cls(cfg=cfg, meta_info=meta_info, lap=lap) logger.info("Creating %s" % str(store)) - selected_invocation = cfg.opts("system", "time.start") if invocation is None else invocation + trial_id = cfg.opts("system", "trial.id") + trial_timestamp = cfg.opts("system", "time.start") selected_car = cfg.opts("mechanic", "car.names") if car is None else car - store.open(selected_invocation, track, challenge, selected_car, create=not read_only) + store.open(trial_id, trial_timestamp, track, challenge, selected_car, create=not read_only) return store @@ -267,7 +268,8 @@ class MetricsStore: :param lap: This parameter is optional and intended for creating a metrics store with a previously serialized lap. """ self._config = cfg - self._invocation = None + self._trial_id = None + self._trial_timestamp = None self._track = None self._track_params = cfg.opts("track", "params") self._challenge = None @@ -286,11 +288,12 @@ class MetricsStore: self._clock = clock self._stop_watch = self._clock.stop_watch() - def open(self, invocation=None, track_name=None, challenge_name=None, car_name=None, ctx=None, create=False): + def open(self, trial_id=None, trial_timestamp=None, track_name=None, challenge_name=None, car_name=None, ctx=None, create=False): """ - Opens a metrics store for a specific invocation, track, challenge and car. + Opens a metrics store for a specific trial, track, challenge and car. - :param invocation: The invocation (timestamp). + :param trial_id: The trial id. This attribute is sufficient to uniquely identify a challenge. + :param trial_timestamp: The trial timestamp as a datetime. :param track_name: Track name. :param challenge_name: Challenge name. :param car_name: Car name. @@ -299,24 +302,27 @@ class MetricsStore: False when it is just opened for reading (as we can assume all necessary indices exist at this point). """ if ctx: - self._invocation = ctx["invocation"] + self._trial_id = ctx["trial-id"] + self._trial_timestamp = ctx["trial-timestamp"] self._track = ctx["track"] self._challenge = ctx["challenge"] self._car = ctx["car"] else: - self._invocation = time.to_iso8601(invocation) + self._trial_id = trial_id + self._trial_timestamp = time.to_iso8601(trial_timestamp) self._track = track_name self._challenge = challenge_name self._car = car_name - assert self._invocation is not None, "Attempting to open metrics store without an invocation" + assert self._trial_id is not None, "Attempting to open metrics store without a trial id" + assert self._trial_timestamp is not None, "Attempting to open metrics store without a trial timestamp" assert self._track is not None, "Attempting to open metrics store without a track" assert self._challenge is not None, "Attempting to open metrics store without a challenge" assert self._car is not None, "Attempting to open metrics store without a car" self._car_name = "+".join(self._car) if isinstance(self._car, list) else self._car - logger.info("Opening metrics store for invocation=[%s], track=[%s], challenge=[%s], car=[%s]" % - (self._invocation, self._track, self._challenge, self._car)) + logger.info("Opening metrics store for trial timestamp=[%s], track=[%s], challenge=[%s], car=[%s]" % + (self._trial_timestamp, self._track, self._challenge, self._car)) user_tags = extract_user_tags_from_config(self._config) for k, v in user_tags.items(): @@ -412,7 +418,8 @@ class MetricsStore: @property def open_context(self): return { - "invocation": self._invocation, + "trial-id": self._trial_id, + "trial-timestamp": self._trial_timestamp, "track": self._track, "challenge": self._challenge, "car": self._car @@ -526,7 +533,8 @@ class MetricsStore: doc = { "@timestamp": time.to_epoch_millis(absolute_time), "relative-time": int(relative_time * 1000 * 1000), - "trial-timestamp": self._invocation, + "trial-id": self._trial_id, + "trial-timestamp": self._trial_timestamp, "environment": self._environment_name, "track": self._track, "lap": self._lap, @@ -728,9 +736,9 @@ class EsMetricsStore(MetricsStore): self._index_template_provider = index_template_provider_class(cfg) self._docs = None - def open(self, invocation=None, track_name=None, challenge_name=None, car_name=None, ctx=None, create=False): + def open(self, trial_id=None, trial_timestamp=None, track_name=None, challenge_name=None, car_name=None, ctx=None, create=False): self._docs = [] - MetricsStore.open(self, invocation, track_name, challenge_name, car_name, ctx, create) + MetricsStore.open(self, trial_id, trial_timestamp, track_name, challenge_name, car_name, ctx, create) self._index = self.index_name() # reduce a bit of noise in the metrics cluster log if create: @@ -742,7 +750,7 @@ class EsMetricsStore(MetricsStore): self._client.refresh(index=self._index) def index_name(self): - ts = time.from_is8601(self._invocation) + ts = time.from_is8601(self._trial_timestamp) return "rally-metrics-%04d-%02d" % (ts.year, ts.month) def _get_template(self): @@ -751,8 +759,8 @@ class EsMetricsStore(MetricsStore): def flush(self, refresh=True): if self._docs: self._client.bulk_index(index=self._index, doc_type=EsMetricsStore.METRICS_DOC_TYPE, items=self._docs) - logger.info("Successfully added %d metrics documents for invocation=[%s], track=[%s], challenge=[%s], car=[%s]." % - (len(self._docs), self._invocation, self._track, self._challenge, self._car)) + logger.info("Successfully added %d metrics documents for trial timestamp=[%s], track=[%s], challenge=[%s], car=[%s]." % + (len(self._docs), self._trial_timestamp, self._track, self._challenge, self._car)) self._docs = [] # ensure we can search immediately after flushing if refresh: @@ -862,27 +870,7 @@ class EsMetricsStore(MetricsStore): "filter": [ { "term": { - "trial-timestamp": self._invocation - } - }, - { - "term": { - "environment": self._environment_name - } - }, - { - "term": { - "track": self._track - } - }, - { - "term": { - "challenge": self._challenge - } - }, - { - "term": { - "car": self._car_name + "trial-id": self._trial_id } }, { @@ -1076,7 +1064,8 @@ def list_races(cfg): def create_race(cfg, track, challenge): car = cfg.opts("mechanic", "car.names") - environment_name = cfg.opts("system", "env.name") + environment = cfg.opts("system", "env.name") + trial_id = cfg.opts("system", "trial.id") trial_timestamp = cfg.opts("system", "time.start") total_laps = cfg.opts("race", "laps") user_tags = extract_user_tags_from_config(cfg) @@ -1084,11 +1073,11 @@ def create_race(cfg, track, challenge): track_params = cfg.opts("track", "params") rally_version = version.version() - return Race(rally_version, environment_name, trial_timestamp, pipeline, user_tags, track, track_params, challenge, car, total_laps) + return Race(rally_version, environment, trial_id, trial_timestamp, pipeline, user_tags, track, track_params, challenge, car, total_laps) class Race: - def __init__(self, rally_version, environment_name, trial_timestamp, pipeline, user_tags, track, track_params, challenge, car, + def __init__(self, rally_version, environment_name, trial_id, trial_timestamp, pipeline, user_tags, track, track_params, challenge, car, total_laps, cluster=None, lap_results=None, results=None): if results is None: results = {} @@ -1096,6 +1085,7 @@ class Race: lap_results = [] self.rally_version = rally_version self.environment_name = environment_name + self.trial_id = trial_id self.trial_timestamp = trial_timestamp self.pipeline = pipeline self.user_tags = user_tags @@ -1104,7 +1094,7 @@ class Race: self.challenge = challenge self.car = car self.total_laps = total_laps - # will be set later - contains hosts, revision, distribution_version, ...s + # will be set later - contains hosts, revision, distribution_version, ... self.cluster = cluster self.lap_results = lap_results self.results = results @@ -1143,6 +1133,7 @@ class Race: d = { "rally-version": self.rally_version, "environment": self.environment_name, + "trial-id": self.trial_id, "trial-timestamp": time.to_iso8601(self.trial_timestamp), "pipeline": self.pipeline, "user-tags": self.user_tags, @@ -1164,6 +1155,7 @@ class Race: result_template = { "rally-version": self.rally_version, "environment": self.environment_name, + "trial-id": self.trial_id, "trial-timestamp": time.to_iso8601(self.trial_timestamp), "distribution-version": self.cluster.distribution_version, "distribution-major-version": versions.major_version(self.cluster.distribution_version), @@ -1206,7 +1198,7 @@ class Race: # Don't restore a few properties like cluster because they (a) cannot be reconstructed easily without knowledge of other modules # and (b) it is not necessary for this use case. - return Race(d["rally-version"], d["environment"], time.from_is8601(d["trial-timestamp"]), d["pipeline"], user_tags, + return Race(d["rally-version"], d["environment"], d["trial-id"], time.from_is8601(d["trial-timestamp"]), d["pipeline"], user_tags, d["track"], d.get("track-params"), d["challenge"], d["car"], d["total-laps"], results=d["results"]) @@ -1359,10 +1351,10 @@ class EsRaceStore(RaceStore): def find_by_timestamp(self, timestamp): filters = [{ - "term": { - "environment": self.environment_name - } - }, + "term": { + "environment": self.environment_name + } + }, { "term": { "trial-timestamp": timestamp diff --git a/esrally/rally.py b/esrally/rally.py index 2fe5c91f..483a86ec 100644 --- a/esrally/rally.py +++ b/esrally/rally.py @@ -5,6 +5,7 @@ import logging.handlers import os import sys import time +import uuid from esrally import version, actor, config, paths, racecontrol, reporter, metrics, track, chart_generator, exceptions, time as rtime from esrally import PROGRAM_NAME, DOC_LINK, BANNER, SKULL, check_python_version @@ -662,6 +663,7 @@ def main(): cfg.add(config.Scope.application, "system", "time.start.user_provided", False) cfg.add(config.Scope.applicationOverride, "system", "quiet.mode", args.quiet) + cfg.add(config.Scope.applicationOverride, "system", "trial.id", str(uuid.uuid4())) # per node? cfg.add(config.Scope.applicationOverride, "system", "offline.mode", args.offline) diff --git a/esrally/reporter.py b/esrally/reporter.py index 9e089a5a..ee0b997a 100644 --- a/esrally/reporter.py +++ b/esrally/reporter.py @@ -543,8 +543,8 @@ class ComparisonReporter: self.plain = False def report(self, r1, r2): - logger.info("Generating comparison report for baseline (invocation=[%s], track=[%s], challenge=[%s], car=[%s]) and " - "contender (invocation=[%s], track=[%s], challenge=[%s], car=[%s])" % + logger.info("Generating comparison report for baseline (trial timestamp=[%s], track=[%s], challenge=[%s], car=[%s]) and " + "contender (trial timestamp=[%s], track=[%s], challenge=[%s], car=[%s])" % (r1.trial_timestamp, r1.track, r1.challenge, r1.car, r2.trial_timestamp, r2.track, r2.challenge, r2.car)) # we don't verify anything about the races as it is possible that the user benchmarks two different tracks intentionally diff --git a/esrally/resources/metrics-template.json b/esrally/resources/metrics-template.json index 20155504..ea7f62d6 100644 --- a/esrally/resources/metrics-template.json +++ b/esrally/resources/metrics-template.json @@ -32,6 +32,9 @@ "relative-time": { "type": "long" }, + "trial-id": { + "type": "keyword" + }, "trial-timestamp": { "type": "date", "format": "basic_date_time_no_millis", diff --git a/esrally/resources/races-template.json b/esrally/resources/races-template.json index cc158693..67eea774 100644 --- a/esrally/resources/races-template.json +++ b/esrally/resources/races-template.json @@ -25,6 +25,9 @@ "enabled": true }, "properties": { + "trial-id": { + "type": "keyword" + }, "trial-timestamp": { "type": "date", "format": "basic_date_time_no_millis", diff --git a/esrally/resources/results-template.json b/esrally/resources/results-template.json index 9239c817..ac5896b0 100644 --- a/esrally/resources/results-template.json +++ b/esrally/resources/results-template.json @@ -25,6 +25,9 @@ "enabled": true }, "properties": { + "trial-id": { + "type": "keyword" + }, "trial-timestamp": { "type": "date", "format": "basic_date_time_no_millis",
Add a unique race id Currently we use a combination of track, challenge, car and race timestamp to select documents in reporting. We can simplify the code (and also make it more robust) by generating a unique id per race and using that instead.
elastic/rally
diff --git a/tests/driver/driver_test.py b/tests/driver/driver_test.py index c2ccfdc2..fbfcc08a 100644 --- a/tests/driver/driver_test.py +++ b/tests/driver/driver_test.py @@ -57,6 +57,7 @@ class DriverTests(TestCase): self.cfg = config.Config() self.cfg.add(config.Scope.application, "system", "env.name", "unittest") self.cfg.add(config.Scope.application, "system", "time.start", datetime(year=2017, month=8, day=20, hour=1, minute=0, second=0)) + self.cfg.add(config.Scope.application, "system", "trial.id", "6ebc6e53-ee20-4b0c-99b4-09697987e9f4") self.cfg.add(config.Scope.application, "track", "challenge.name", "default") self.cfg.add(config.Scope.application, "track", "params", {}) self.cfg.add(config.Scope.application, "track", "test.mode.enabled", True) diff --git a/tests/metrics_test.py b/tests/metrics_test.py index fab49911..5c754bfe 100644 --- a/tests/metrics_test.py +++ b/tests/metrics_test.py @@ -167,6 +167,7 @@ class EsClientTests(TestCase): class EsMetricsTests(TestCase): TRIAL_TIMESTAMP = datetime.datetime(2016, 1, 31) + TRIAL_ID = "6ebc6e53-ee20-4b0c-99b4-09697987e9f4" def setUp(self): self.cfg = config.Config() @@ -182,12 +183,13 @@ class EsMetricsTests(TestCase): def test_put_value_without_meta_info(self): throughput = 5000 - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_count_cluster_level("indexing_throughput", throughput, "docs/s") expected_doc = { "@timestamp": StaticClock.NOW * 1000, + "trial-id": EsMetricsTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "relative-time": 0, "environment": "unittest", @@ -197,7 +199,7 @@ class EsMetricsTests(TestCase): "shard-count": 3 }, "lap": 1, - "challenge": "append-no-conflicts", + "challenge": "append", "car": "defaults", "name": "indexing_throughput", "value": throughput, @@ -211,13 +213,14 @@ class EsMetricsTests(TestCase): def test_put_value_with_explicit_timestamps(self): throughput = 5000 - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_count_cluster_level(name="indexing_throughput", count=throughput, unit="docs/s", absolute_time=0, relative_time=10) expected_doc = { "@timestamp": 0, + "trial-id": EsMetricsTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "relative-time": 10000000, "environment": "unittest", @@ -227,7 +230,7 @@ class EsMetricsTests(TestCase): "shard-count": 3 }, "lap": 1, - "challenge": "append-no-conflicts", + "challenge": "append", "car": "defaults", "name": "indexing_throughput", "value": throughput, @@ -243,7 +246,7 @@ class EsMetricsTests(TestCase): throughput = 5000 # add a user-defined tag self.cfg.add(config.Scope.application, "race", "user.tag", "intention:testing,disk_type:hdd") - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append", "defaults", create=True) self.metrics_store.lap = 1 # Ensure we also merge in cluster level meta info @@ -257,6 +260,7 @@ class EsMetricsTests(TestCase): self.metrics_store.put_value_node_level("node0", "indexing_throughput", throughput, "docs/s") expected_doc = { "@timestamp": StaticClock.NOW * 1000, + "trial-id": EsMetricsTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "relative-time": 0, "environment": "unittest", @@ -266,7 +270,7 @@ class EsMetricsTests(TestCase): "shard-count": 3 }, "lap": 1, - "challenge": "append-no-conflicts", + "challenge": "append", "car": "defaults", "name": "indexing_throughput", "value": throughput, @@ -301,7 +305,7 @@ class EsMetricsTests(TestCase): } self.es_mock.search = mock.MagicMock(return_value=search_result) - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") expected_query = { "query": { @@ -309,27 +313,7 @@ class EsMetricsTests(TestCase): "filter": [ { "term": { - "trial-timestamp": "20160131T000000Z" - } - }, - { - "term": { - "environment": "unittest" - } - }, - { - "term": { - "track": "test" - } - }, - { - "term": { - "challenge": "append-no-conflicts" - } - }, - { - "term": { - "car": "defaults" + "trial-id": EsMetricsTests.TRIAL_ID } }, { @@ -369,7 +353,7 @@ class EsMetricsTests(TestCase): } self.es_mock.search = mock.MagicMock(return_value=search_result) - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") expected_query = { "query": { @@ -377,27 +361,7 @@ class EsMetricsTests(TestCase): "filter": [ { "term": { - "trial-timestamp": "20160131T000000Z" - } - }, - { - "term": { - "environment": "unittest" - } - }, - { - "term": { - "track": "test" - } - }, - { - "term": { - "challenge": "append-no-conflicts" - } - }, - { - "term": { - "car": "defaults" + "trial-id": EsMetricsTests.TRIAL_ID } }, { @@ -523,7 +487,7 @@ class EsMetricsTests(TestCase): } self.es_mock.search = mock.MagicMock(return_value=search_result) - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(EsMetricsTests.TRIAL_ID, EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") expected_query = { "query": { @@ -531,27 +495,7 @@ class EsMetricsTests(TestCase): "filter": [ { "term": { - "trial-timestamp": "20160131T000000Z" - } - }, - { - "term": { - "environment": "unittest" - } - }, - { - "term": { - "track": "test" - } - }, - { - "term": { - "challenge": "append-no-conflicts" - } - }, - { - "term": { - "car": "defaults" + "trial-id": EsMetricsTests.TRIAL_ID } }, { @@ -589,6 +533,7 @@ class EsMetricsTests(TestCase): class EsRaceStoreTests(TestCase): TRIAL_TIMESTAMP = datetime.datetime(2016, 1, 31) + TRIAL_ID = "6ebc6e53-ee20-4b0c-99b4-09697987e9f4" class DictHolder: def __init__(self, d): @@ -617,7 +562,8 @@ class EsRaceStoreTests(TestCase): indices=[track.Index(name="tests", types=["test-type"])], challenges=[track.Challenge(name="index", default=True, schedule=schedule)]) - race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_timestamp=EsRaceStoreTests.TRIAL_TIMESTAMP, + race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_id=EsRaceStoreTests.TRIAL_ID, + trial_timestamp=EsRaceStoreTests.TRIAL_TIMESTAMP, pipeline="from-sources", user_tags={"os": "Linux"}, track=t, track_params={"shard-count": 3}, challenge=t.default_challenge, car="4gheap", total_laps=12, @@ -653,6 +599,7 @@ class EsRaceStoreTests(TestCase): expected_doc = { "rally-version": "0.4.4", "environment": "unittest", + "trial-id": EsRaceStoreTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "pipeline": "from-sources", "user-tags": { @@ -697,6 +644,7 @@ class EsRaceStoreTests(TestCase): class EsResultsStoreTests(TestCase): TRIAL_TIMESTAMP = datetime.datetime(2016, 1, 31) + TRIAL_ID = "6ebc6e53-ee20-4b0c-99b4-09697987e9f4" def setUp(self): self.cfg = config.Config() @@ -727,7 +675,8 @@ class EsResultsStoreTests(TestCase): node = c.add_node("localhost", "rally-node-0") node.plugins.append("x-pack") - race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_timestamp=EsResultsStoreTests.TRIAL_TIMESTAMP, + race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_id=EsResultsStoreTests.TRIAL_ID, + trial_timestamp=EsResultsStoreTests.TRIAL_TIMESTAMP, pipeline="from-sources", user_tags={"os": "Linux"}, track=t, track_params=None, challenge=t.default_challenge, car="4gheap", total_laps=12, @@ -764,6 +713,7 @@ class EsResultsStoreTests(TestCase): { "rally-version": "0.4.4", "environment": "unittest", + "trial-id": EsResultsStoreTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "distribution-version": "5.0.0", "distribution-major-version": 5, @@ -784,6 +734,7 @@ class EsResultsStoreTests(TestCase): { "rally-version": "0.4.4", "environment": "unittest", + "trial-id": EsResultsStoreTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "distribution-version": "5.0.0", "distribution-major-version": 5, @@ -805,6 +756,7 @@ class EsResultsStoreTests(TestCase): { "rally-version": "0.4.4", "environment": "unittest", + "trial-id": EsResultsStoreTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "distribution-version": "5.0.0", "distribution-major-version": 5, @@ -830,6 +782,7 @@ class EsResultsStoreTests(TestCase): { "rally-version": "0.4.4", "environment": "unittest", + "trial-id": EsResultsStoreTests.TRIAL_ID, "trial-timestamp": "20160131T000000Z", "distribution-version": "5.0.0", "distribution-major-version": 5, @@ -852,6 +805,9 @@ class EsResultsStoreTests(TestCase): class InMemoryMetricsStoreTests(TestCase): + TRIAL_TIMESTAMP = datetime.datetime(2016, 1, 31) + TRIAL_ID = "6ebc6e53-ee20-4b0c-99b4-09697987e9f4" + def setUp(self): self.cfg = config.Config() self.cfg.add(config.Scope.application, "system", "env.name", "unittest") @@ -864,7 +820,8 @@ class InMemoryMetricsStoreTests(TestCase): def test_get_value(self): throughput = 5000 - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_count_cluster_level("indexing_throughput", 1, "docs/s", sample_type=metrics.SampleType.Warmup) self.metrics_store.put_count_cluster_level("indexing_throughput", throughput, "docs/s") @@ -872,20 +829,23 @@ class InMemoryMetricsStoreTests(TestCase): self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assertEqual(1, self.metrics_store.get_one("indexing_throughput", sample_type=metrics.SampleType.Warmup)) self.assertEqual(throughput, self.metrics_store.get_one("indexing_throughput", sample_type=metrics.SampleType.Normal)) def test_get_percentile(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 for i in range(1, 1001): self.metrics_store.put_value_cluster_level("query_latency", float(i), "ms") self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assert_equal_percentiles("query_latency", [100.0], {100.0: 1000.0}) self.assert_equal_percentiles("query_latency", [99.0], {99.0: 990.0}) @@ -895,14 +855,16 @@ class InMemoryMetricsStoreTests(TestCase): self.assert_equal_percentiles("query_latency", [99, 99.9, 100], {99: 990.0, 99.9: 999.0, 100: 1000.0}) def test_get_median(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 for i in range(1, 1001): self.metrics_store.put_value_cluster_level("query_latency", float(i), "ms") self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assertAlmostEqual(500.5, self.metrics_store.get_median("query_latency", lap=1)) @@ -914,7 +876,8 @@ class InMemoryMetricsStoreTests(TestCase): msg=str(percentile) + "th percentile differs") def test_externalize_and_bulk_add(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_count_cluster_level("final_index_size", 1000, "GB") @@ -932,7 +895,8 @@ class InMemoryMetricsStoreTests(TestCase): self.assertEqual(1000, self.metrics_store.get_one("final_index_size")) def test_meta_data_per_document(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.add_meta_info(metrics.MetaInfoScope.cluster, None, "cluster-name", "test") @@ -955,16 +919,19 @@ class InMemoryMetricsStoreTests(TestCase): }, self.metrics_store.docs[1]["meta"]) def test_get_error_rate_zero_without_samples(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assertEqual(0.0, self.metrics_store.get_error_rate("term-query", sample_type=metrics.SampleType.Normal)) def test_get_error_rate_by_sample_type(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_value_cluster_level("service_time", 3.0, "ms", task="term-query", sample_type=metrics.SampleType.Warmup, meta_data={"success": False}) @@ -973,13 +940,15 @@ class InMemoryMetricsStoreTests(TestCase): self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assertEqual(1.0, self.metrics_store.get_error_rate("term-query", sample_type=metrics.SampleType.Warmup)) self.assertEqual(0.0, self.metrics_store.get_error_rate("term-query", sample_type=metrics.SampleType.Normal)) def test_get_error_rate_mixed(self): - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults", create=True) + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults", create=True) self.metrics_store.lap = 1 self.metrics_store.put_value_cluster_level("service_time", 3.0, "ms", task="term-query", sample_type=metrics.SampleType.Normal, meta_data={"success": True}) @@ -994,7 +963,8 @@ class InMemoryMetricsStoreTests(TestCase): self.metrics_store.close() - self.metrics_store.open(EsMetricsTests.TRIAL_TIMESTAMP, "test", "append-no-conflicts", "defaults") + self.metrics_store.open(InMemoryMetricsStoreTests.TRIAL_ID, InMemoryMetricsStoreTests.TRIAL_TIMESTAMP, + "test", "append-no-conflicts", "defaults") self.assertEqual(0.0, self.metrics_store.get_error_rate("term-query", sample_type=metrics.SampleType.Warmup)) self.assertEqual(0.2, self.metrics_store.get_error_rate("term-query", sample_type=metrics.SampleType.Normal)) @@ -1002,6 +972,7 @@ class InMemoryMetricsStoreTests(TestCase): class FileRaceStoreTests(TestCase): TRIAL_TIMESTAMP = datetime.datetime(2016, 1, 31) + TRIAL_ID = "6ebc6e53-ee20-4b0c-99b4-09697987e9f4" class DictHolder: def __init__(self, d): @@ -1030,7 +1001,8 @@ class FileRaceStoreTests(TestCase): indices=[track.Index(name="tests", types=["test-type"])], challenges=[track.Challenge(name="index", default=True, schedule=schedule)]) - race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_timestamp=FileRaceStoreTests.TRIAL_TIMESTAMP, + race = metrics.Race(rally_version="0.4.4", environment_name="unittest", trial_id=FileRaceStoreTests.TRIAL_ID, + trial_timestamp=FileRaceStoreTests.TRIAL_TIMESTAMP, pipeline="from-sources", user_tags={"os": "Linux"}, track=t, track_params={"clients": 12}, challenge=t.default_challenge, car="4gheap", total_laps=12, diff --git a/tests/reporter_test.py b/tests/reporter_test.py index 7750f7c6..7dc623dd 100644 --- a/tests/reporter_test.py +++ b/tests/reporter_test.py @@ -10,6 +10,7 @@ class StatsCalculatorTests(TestCase): cfg = config.Config() cfg.add(config.Scope.application, "system", "env.name", "unittest") cfg.add(config.Scope.application, "system", "time.start", datetime.datetime.now()) + cfg.add(config.Scope.application, "system", "trial.id", "6ebc6e53-ee20-4b0c-99b4-09697987e9f4") cfg.add(config.Scope.application, "reporting", "datastore.type", "in-memory") cfg.add(config.Scope.application, "mechanic", "car.names", ["unittest_car"]) cfg.add(config.Scope.application, "race", "laps", 1)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 8 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-benchmark" ], "pre_install": [ "apt-get update", "apt-get install -y gcc python3-pip python3-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 elasticsearch==6.0.0 -e git+https://github.com/elastic/rally.git@fabefdd6464d996e830806bd5725afe49af12ae4#egg=esrally importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.9.5 jsonschema==2.5.1 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==5.4.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work py-cpuinfo==3.2.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-benchmark==3.4.1 tabulate==0.8.1 thespian==3.9.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.22 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: rally channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - elasticsearch==6.0.0 - jinja2==2.9.5 - jsonschema==2.5.1 - markupsafe==2.0.1 - psutil==5.4.0 - py-cpuinfo==3.2.0 - pytest-benchmark==3.4.1 - tabulate==0.8.1 - thespian==3.9.2 - urllib3==1.22 prefix: /opt/conda/envs/rally
[ "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_additional_unknown_key", "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_explicit_one", "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_explicit_zero", "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_implicit_one", "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_implicit_zero", "tests/metrics_test.py::EsMetricsTests::test_get_error_rate_mixed", "tests/metrics_test.py::EsMetricsTests::test_get_median", "tests/metrics_test.py::EsMetricsTests::test_get_value", "tests/metrics_test.py::EsMetricsTests::test_put_value_with_explicit_timestamps", "tests/metrics_test.py::EsMetricsTests::test_put_value_with_meta_info", "tests/metrics_test.py::EsMetricsTests::test_put_value_without_meta_info", "tests/metrics_test.py::EsRaceStoreTests::test_store_race", "tests/metrics_test.py::EsResultsStoreTests::test_store_results", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_externalize_and_bulk_add", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_error_rate_by_sample_type", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_error_rate_mixed", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_error_rate_zero_without_samples", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_median", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_percentile", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_get_value", "tests/metrics_test.py::InMemoryMetricsStoreTests::test_meta_data_per_document", "tests/metrics_test.py::FileRaceStoreTests::test_store_race" ]
[]
[ "tests/driver/driver_test.py::DriverTests::test_assign_drivers_round_robin", "tests/driver/driver_test.py::DriverTests::test_client_reaches_join_point_others_still_executing", "tests/driver/driver_test.py::DriverTests::test_client_reaches_join_point_which_completes_parent", "tests/driver/driver_test.py::DriverTests::test_start_benchmark_and_prepare_track", "tests/driver/driver_test.py::AllocatorTests::test_a_task_completes_the_parallel_structure", "tests/driver/driver_test.py::AllocatorTests::test_allocates_mixed_tasks", "tests/driver/driver_test.py::AllocatorTests::test_allocates_more_tasks_than_clients", "tests/driver/driver_test.py::AllocatorTests::test_allocates_one_task", "tests/driver/driver_test.py::AllocatorTests::test_allocates_two_parallel_tasks", "tests/driver/driver_test.py::AllocatorTests::test_allocates_two_serial_tasks", "tests/driver/driver_test.py::AllocatorTests::test_considers_number_of_clients_per_subtask", "tests/driver/driver_test.py::MetricsAggregationTests::test_different_sample_types", "tests/driver/driver_test.py::MetricsAggregationTests::test_single_metrics_aggregation", "tests/driver/driver_test.py::SchedulerTests::test_eternal_schedule_with_progress_indication", "tests/driver/driver_test.py::SchedulerTests::test_eternal_schedule_without_progress_indication", "tests/driver/driver_test.py::SchedulerTests::test_schedule_for_time_based", "tests/driver/driver_test.py::SchedulerTests::test_schedule_for_warmup_time_based", "tests/driver/driver_test.py::SchedulerTests::test_search_task_one_client", "tests/driver/driver_test.py::SchedulerTests::test_search_task_two_clients", "tests/driver/driver_test.py::ExecutorTests::test_cancel_execute_schedule", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_aborts_on_error", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_in_throughput_mode", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_throughput_throttled", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_dict", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_no_return_value", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_tuple", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_connection_error", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_http_400", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_key_error", "tests/driver/driver_test.py::ProfilerTests::test_profiler_is_a_transparent_wrapper", "tests/metrics_test.py::ExtractUserTagsTests::test_extracts_proper_user_tags", "tests/metrics_test.py::ExtractUserTagsTests::test_missing_comma_raises_error", "tests/metrics_test.py::ExtractUserTagsTests::test_missing_value_raises_error", "tests/metrics_test.py::ExtractUserTagsTests::test_no_tags_returns_empty_dict", "tests/metrics_test.py::EsClientTests::test_fails_after_too_many_timeouts", "tests/metrics_test.py::EsClientTests::test_raises_rally_error_on_unknown_problems", "tests/metrics_test.py::EsClientTests::test_raises_sytem_setup_error_on_authentication_problems", "tests/metrics_test.py::EsClientTests::test_raises_sytem_setup_error_on_authorization_problems", "tests/metrics_test.py::EsClientTests::test_raises_sytem_setup_error_on_connection_problems", "tests/metrics_test.py::EsClientTests::test_retries_on_timeouts", "tests/reporter_test.py::StatsCalculatorTests::test_calculate_simple_index_stats", "tests/reporter_test.py::StatsTests::test_as_flat_list", "tests/reporter_test.py::FormatterTests::test_formats_as_csv", "tests/reporter_test.py::FormatterTests::test_formats_as_markdown" ]
[]
Apache License 2.0
2,278
[ "esrally/resources/races-template.json", "esrally/driver/driver.py", "esrally/metrics.py", "docs/metrics.rst", "esrally/resources/metrics-template.json", "esrally/reporter.py", "esrally/resources/results-template.json", "esrally/rally.py" ]
[ "esrally/resources/races-template.json", "esrally/driver/driver.py", "esrally/metrics.py", "docs/metrics.rst", "esrally/resources/metrics-template.json", "esrally/reporter.py", "esrally/resources/results-template.json", "esrally/rally.py" ]
elastic__rally-436
1ca746a1ef3cbf7c2441e9d6d2bb279795853991
2018-03-09 11:38:31
a5408e0d0d07b271b509df8057a7c73303604c10
diff --git a/esrally/driver/driver.py b/esrally/driver/driver.py index cfb50622..112471b4 100644 --- a/esrally/driver/driver.py +++ b/esrally/driver/driver.py @@ -1230,9 +1230,16 @@ def schedule_for(current_track, task, client_index): "time period of [%s] seconds." % (task.schedule, task, str(warmup_time_period), str(task.time_period))) return time_period_based(sched, warmup_time_period, task.time_period, runner_for_op, params_for_op) else: + warmup_iterations = task.warmup_iterations if task.warmup_iterations else 0 + if task.iterations: + iterations = task.iterations + elif params_for_op.size(): + iterations = params_for_op.size() - warmup_iterations + else: + iterations = 1 logger.info("Creating iteration-count based schedule with [%s] distribution for [%s] with [%d] warmup iterations and " - "[%d] iterations." % (task.schedule, op, task.warmup_iterations, task.iterations)) - return iteration_count_based(sched, task.warmup_iterations, task.iterations, runner_for_op, params_for_op) + "[%d] iterations." % (task.schedule, op, warmup_iterations, iterations)) + return iteration_count_based(sched, warmup_iterations, iterations, runner_for_op, params_for_op) def time_period_based(sched, warmup_time_period, time_period, runner, params): diff --git a/esrally/track/loader.py b/esrally/track/loader.py index 013a9875..c8e7b401 100644 --- a/esrally/track/loader.py +++ b/esrally/track/loader.py @@ -625,11 +625,11 @@ def post_process_for_test_mode(t): # we need iterate over leaf tasks and await iterating over possible intermediate 'parallel' elements for leaf_task in task: # iteration-based schedules are divided among all clients and we should provide at least one iteration for each client. - if leaf_task.warmup_iterations > leaf_task.clients: + if leaf_task.warmup_iterations is not None and leaf_task.warmup_iterations > leaf_task.clients: count = leaf_task.clients logger.info("Resetting warmup iterations to %d for [%s]" % (count, str(leaf_task))) leaf_task.warmup_iterations = count - if leaf_task.iterations > leaf_task.clients: + if leaf_task.iterations is not None and leaf_task.iterations > leaf_task.clients: count = leaf_task.clients logger.info("Resetting measurement iterations to %d for [%s]" % (count, str(leaf_task))) leaf_task.iterations = count @@ -1054,8 +1054,8 @@ class TrackSpecificationReader: def parse_parallel(self, ops_spec, ops, challenge_name): # use same default values as #parseTask() in case the 'parallel' element did not specify anything - default_warmup_iterations = self._r(ops_spec, "warmup-iterations", error_ctx="parallel", mandatory=False, default_value=0) - default_iterations = self._r(ops_spec, "iterations", error_ctx="parallel", mandatory=False, default_value=1) + default_warmup_iterations = self._r(ops_spec, "warmup-iterations", error_ctx="parallel", mandatory=False) + default_iterations = self._r(ops_spec, "iterations", error_ctx="parallel", mandatory=False) default_warmup_time_period = self._r(ops_spec, "warmup-time-period", error_ctx="parallel", mandatory=False) default_time_period = self._r(ops_spec, "time-period", error_ctx="parallel", mandatory=False) clients = self._r(ops_spec, "clients", error_ctx="parallel", mandatory=False) @@ -1079,7 +1079,7 @@ class TrackSpecificationReader: "this name exists." % (challenge_name, completed_by)) return track.Parallel(tasks, clients) - def parse_task(self, task_spec, ops, challenge_name, default_warmup_iterations=0, default_iterations=1, + def parse_task(self, task_spec, ops, challenge_name, default_warmup_iterations=None, default_iterations=None, default_warmup_time_period=None, default_time_period=None, completed_by_name=None): op_spec = task_spec["operation"] @@ -1106,10 +1106,10 @@ class TrackSpecificationReader: schedule=schedule, # this is to provide scheduler-specific parameters for custom schedulers. params=task_spec) - if task.warmup_iterations != default_warmup_iterations and task.time_period is not None: + if task.warmup_iterations is not None and task.time_period is not None: self._error("Operation '%s' in challenge '%s' defines '%d' warmup iterations and a time period of '%d' seconds. Please do not " "mix time periods and iterations." % (op.name, challenge_name, task.warmup_iterations, task.time_period)) - elif task.warmup_time_period is not None and task.iterations != default_iterations: + elif task.warmup_time_period is not None and task.iterations is not None: self._error("Operation '%s' in challenge '%s' defines a warmup time period of '%d' seconds and '%d' iterations. Please do not " "mix time periods and iterations." % (op.name, challenge_name, task.warmup_time_period, task.iterations)) diff --git a/esrally/track/params.py b/esrally/track/params.py index 65257f38..c8b662c2 100644 --- a/esrally/track/params.py +++ b/esrally/track/params.py @@ -113,8 +113,8 @@ class ParamSource: * It will either run an operation for a pre-determined number of times or * It can run until the parameter source is exhausted. - In the former case, return just 1. In the latter case, you should determine the number of times that `#params()` will be invoked. - With that number, Rally can show the progress made so far to the user. + In the former case, you should determine the number of times that `#params()` will be invoked. With that number, Rally can show + the progress made so far to the user. In the latter case, return ``None``. :return: The "size" of this parameter source or ``None`` if should run eternally. """ diff --git a/esrally/track/track.py b/esrally/track/track.py index 9aafbf8c..cfd1ba0a 100644 --- a/esrally/track/track.py +++ b/esrally/track/track.py @@ -588,7 +588,7 @@ class Parallel: class Task: - def __init__(self, name, operation, meta_data=None, warmup_iterations=0, iterations=1, warmup_time_period=None, time_period=None, + def __init__(self, name, operation, meta_data=None, warmup_iterations=None, iterations=None, warmup_time_period=None, time_period=None, clients=1, completes_parent=False, schedule="deterministic", params=None): self.name = name
Index-append operation only indexing bulk-size * clients documents <!-- ******************************************************************************* Please ask questions in our forum at https://discuss.elastic.co/c/elasticsearch/rally GitHub is reserved for bug reports and feature requests only and we will close your issue immediately if it is just a question. ******************************************************************************* If you are in fact posting a bug report or a feature request, please include one and only one of the below blocks in your new issue. --> <!-- If you are filing a bug report, please remove the below feature request block and provide responses for all of the below items. --> **Rally version** (get with `esrally --version`): Latest from master, 425d8f6e8828f944607807d192aa7a98a45672f3 **Invoked command**: ``` ./rally --track-path=/home/hinmanm/es/mytrack --target-hosts=127.0.0.1:9200 --pipeline=benchmark-only ``` **Configuration file (located in `~/.rally/rally.ini`))**: ``` [meta] config.version = 12 [system] env.name = local [node] root.dir = /home/hinmanm/.rally/benchmarks src.root.dir = /home/hinmanm/es [source] remote.repo.url = https://github.com/elastic/elasticsearch.git elasticsearch.src.subdir = elasticsearch [build] gradle.bin = /home/hinmanm/.sdkman/candidates/gradle/current/bin/gradle [runtime] java.home = /usr/lib/jvm/java-1.8.0-openjdk-1.8.0.151-1.b12.fc26.x86_64 [benchmarks] local.dataset.cache = ${node:root.dir}/data [reporting] datastore.type = elasticsearch datastore.host = localhost datastore.port = 9900 datastore.secure = False datastore.user = datastore.password = [tracks] default.url = https://github.com/elastic/rally-tracks [teams] default.url = https://github.com/elastic/rally-teams [defaults] preserve_benchmark_candidate = False [distributions] release.1.url = https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-{{VERSION}}.tar.gz release.2.url = https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz release.url = https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz release.cache = true ``` **JVM version**: JDK 8 **OS version**: Fedora 26 **Description of the problem including expected versus actual behavior**: I have a track with an index-append operation defined inline in the challenge like so: ```json "schedule": [ { "operation": { "name": "index-append", "operation-type": "bulk", "bulk-size": {{bulk_size | default(100)}} }, "clients": 4 }, ``` The documents.json contains 1967 documents, however, only 400 are actually indexed. **Steps to reproduce**: 1. Using a track with many documents, add a challenge schedule with a low bulk-size and multiple clients 2. Run the track 3. Observe that only `bulk-size x clients` documents are indexed, in my case, `100 x 4 = 400` documents actually indexed. I've noticed that this didn't affect me when the indexing was defined in a separate operation, it only started affecting me when I defined it inline in the challenge. **Provide logs (if relevant)**: The data is from a private repo, so I cannot provide it here.
elastic/rally
diff --git a/tests/driver/driver_test.py b/tests/driver/driver_test.py index fbfcc08a..981546e7 100644 --- a/tests/driver/driver_test.py +++ b/tests/driver/driver_test.py @@ -452,6 +452,47 @@ class SchedulerTests(ScheduleTestCase): ] self.assert_schedule(expected_schedule, schedule) + def test_schedule_param_source_determines_iterations_no_warmup(self): + # we neither define any time-period nor any iteration count on the task. + task = track.Task("bulk-index", track.Operation("bulk-index", track.OperationType.Bulk.name, params={"body": ["a"], "size": 3}, + param_source="driver-test-param-source"), + clients=1, params={"target-throughput": 4, "clients": 4}) + + invocations = driver.schedule_for(self.test_track, task, 0) + + self.assert_schedule([ + (0.0, metrics.SampleType.Normal, 1 / 3, {"body": ["a"], "size": 3}), + (1.0, metrics.SampleType.Normal, 2 / 3, {"body": ["a"], "size": 3}), + (2.0, metrics.SampleType.Normal, 3 / 3, {"body": ["a"], "size": 3}), + ], list(invocations)) + + def test_schedule_param_source_determines_iterations_including_warmup(self): + task = track.Task("bulk-index", track.Operation("bulk-index", track.OperationType.Bulk.name, params={"body": ["a"], "size": 5}, + param_source="driver-test-param-source"), + warmup_iterations=2, clients=1, params={"target-throughput": 4, "clients": 4}) + + invocations = driver.schedule_for(self.test_track, task, 0) + + self.assert_schedule([ + (0.0, metrics.SampleType.Warmup, 1 / 5, {"body": ["a"], "size": 5}), + (1.0, metrics.SampleType.Warmup, 2 / 5, {"body": ["a"], "size": 5}), + (2.0, metrics.SampleType.Normal, 3 / 5, {"body": ["a"], "size": 5}), + (3.0, metrics.SampleType.Normal, 4 / 5, {"body": ["a"], "size": 5}), + (4.0, metrics.SampleType.Normal, 5 / 5, {"body": ["a"], "size": 5}), + ], list(invocations)) + + def test_schedule_defaults_to_iteration_based(self): + # no time-period and no iterations specified on the task. Also, the parameter source does not define a size. + task = track.Task("bulk-index", track.Operation("bulk-index", track.OperationType.Bulk.name, params={"body": ["a"]}, + param_source="driver-test-param-source"), + clients=1, params={"target-throughput": 4, "clients": 4}) + + invocations = driver.schedule_for(self.test_track, task, 0) + + self.assert_schedule([ + (0.0, metrics.SampleType.Normal, 1 / 1, {"body": ["a"]}), + ], list(invocations)) + def test_schedule_for_warmup_time_based(self): task = track.Task("time-based", track.Operation("time-based", track.OperationType.Bulk.name, params={"body": ["a"], "size": 11}, param_source="driver-test-param-source"),
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_git_commit_hash", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 4 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-benchmark" ], "pre_install": [ "apt-get update", "apt-get install -y gcc python3-pip python3-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 elasticsearch==6.0.0 -e git+https://github.com/elastic/rally.git@1ca746a1ef3cbf7c2441e9d6d2bb279795853991#egg=esrally importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.9.5 jsonschema==2.5.1 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==5.4.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work py-cpuinfo==3.2.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-benchmark==3.4.1 tabulate==0.8.1 thespian==3.9.2 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.22 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: rally channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - elasticsearch==6.0.0 - jinja2==2.9.5 - jsonschema==2.5.1 - markupsafe==2.0.1 - psutil==5.4.0 - py-cpuinfo==3.2.0 - pytest-benchmark==3.4.1 - tabulate==0.8.1 - thespian==3.9.2 - urllib3==1.22 prefix: /opt/conda/envs/rally
[ "tests/driver/driver_test.py::SchedulerTests::test_schedule_param_source_determines_iterations_including_warmup", "tests/driver/driver_test.py::SchedulerTests::test_schedule_param_source_determines_iterations_no_warmup" ]
[]
[ "tests/driver/driver_test.py::DriverTests::test_assign_drivers_round_robin", "tests/driver/driver_test.py::DriverTests::test_client_reaches_join_point_others_still_executing", "tests/driver/driver_test.py::DriverTests::test_client_reaches_join_point_which_completes_parent", "tests/driver/driver_test.py::DriverTests::test_start_benchmark_and_prepare_track", "tests/driver/driver_test.py::AllocatorTests::test_a_task_completes_the_parallel_structure", "tests/driver/driver_test.py::AllocatorTests::test_allocates_mixed_tasks", "tests/driver/driver_test.py::AllocatorTests::test_allocates_more_tasks_than_clients", "tests/driver/driver_test.py::AllocatorTests::test_allocates_one_task", "tests/driver/driver_test.py::AllocatorTests::test_allocates_two_parallel_tasks", "tests/driver/driver_test.py::AllocatorTests::test_allocates_two_serial_tasks", "tests/driver/driver_test.py::AllocatorTests::test_considers_number_of_clients_per_subtask", "tests/driver/driver_test.py::MetricsAggregationTests::test_different_sample_types", "tests/driver/driver_test.py::MetricsAggregationTests::test_single_metrics_aggregation", "tests/driver/driver_test.py::SchedulerTests::test_eternal_schedule_with_progress_indication", "tests/driver/driver_test.py::SchedulerTests::test_eternal_schedule_without_progress_indication", "tests/driver/driver_test.py::SchedulerTests::test_schedule_defaults_to_iteration_based", "tests/driver/driver_test.py::SchedulerTests::test_schedule_for_time_based", "tests/driver/driver_test.py::SchedulerTests::test_schedule_for_warmup_time_based", "tests/driver/driver_test.py::SchedulerTests::test_search_task_one_client", "tests/driver/driver_test.py::SchedulerTests::test_search_task_two_clients", "tests/driver/driver_test.py::ExecutorTests::test_cancel_execute_schedule", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_aborts_on_error", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_in_throughput_mode", "tests/driver/driver_test.py::ExecutorTests::test_execute_schedule_throughput_throttled", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_dict", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_no_return_value", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_tuple", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_connection_error", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_http_400", "tests/driver/driver_test.py::ExecutorTests::test_execute_single_with_key_error", "tests/driver/driver_test.py::ProfilerTests::test_profiler_is_a_transparent_wrapper" ]
[]
Apache License 2.0
2,279
[ "esrally/driver/driver.py", "esrally/track/params.py", "esrally/track/track.py", "esrally/track/loader.py" ]
[ "esrally/driver/driver.py", "esrally/track/params.py", "esrally/track/track.py", "esrally/track/loader.py" ]
chaostoolkit__chaostoolkit-lib-41
80fb16cd50d4f6d761cf2c0c5919574d2972932c
2018-03-09 16:44:37
80fb16cd50d4f6d761cf2c0c5919574d2972932c
diff --git a/CHANGELOG.md b/CHANGELOG.md index 54b1a3c..ff3263c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,20 @@ ## [Unreleased][] -[Unreleased]: https://github.com/chaostoolkit/chaostoolkit-lib/compare/0.15.0...HEAD +[Unreleased]: https://github.com/chaostoolkit/chaostoolkit-lib/compare/0.15.1...HEAD + +## [0.15.1][] - 2018-03-09 + +[0.15.1]: https://github.com/chaostoolkit/chaostoolkit-lib/compare/0.15.0...0.15.1 + +### Changed + +- Log a message wgen loding the configuration +- Raise `InvalidExperiment` when a configuration or secret references a key + in the environment and that key does not exist (it may not be set however) + [#40][40]. This bails the experiment at validation time so before it runs. + +[40]: https://github.com/chaostoolkit/chaostoolkit-lib/issues/40 ## [0.15.0][] - 2018-02-20 diff --git a/chaoslib/__init__.py b/chaoslib/__init__.py index ebb5445..e2065c9 100644 --- a/chaoslib/__init__.py +++ b/chaoslib/__init__.py @@ -6,7 +6,7 @@ from typing import Any, Dict, List, Mapping, Union from chaoslib.types import Configuration, Secrets __all__ = ["__version__", "substitute"] -__version__ = '0.15.0' +__version__ = '0.15.1' def substitute(data: Union[str, Dict[str, Any], List], diff --git a/chaoslib/configuration.py b/chaoslib/configuration.py index cb34bd4..4d97032 100644 --- a/chaoslib/configuration.py +++ b/chaoslib/configuration.py @@ -4,6 +4,7 @@ from typing import Dict from logzero import logger +from chaoslib.exceptions import InvalidExperiment from chaoslib.types import Configuration __all__ = ["load_configuration"] @@ -32,13 +33,19 @@ def load_configuration(config_info: Dict[str, str]) -> Configuration: configuration key is dynamically fetched from the `MY_TOKEN` environment variable. """ + logger.debug("Loading configuration...") env = os.environ conf = {} for (key, value) in config_info.items(): if isinstance(value, dict) and "type" in value: if value["type"] == "env": - conf[key] = env.get(value["key"]) + env_key = value["key"] + if env_key not in env: + raise InvalidExperiment( + "Configuration makes reference to an environment key" + " that does not exist: {}".format(env_key)) + conf[key] = env.get(env_key) else: conf[key] = value diff --git a/chaoslib/experiment.py b/chaoslib/experiment.py index 4bcd6f6..b909733 100644 --- a/chaoslib/experiment.py +++ b/chaoslib/experiment.py @@ -73,6 +73,9 @@ def ensure_experiment_is_valid(experiment: Experiment): raise InvalidExperiment( "experiment tags must be a non-empty string") + config = load_configuration(experiment.get("configuration", {})) + secrets = load_secrets(experiment.get("secrets", {}), config) + ensure_hypothesis_is_valid(experiment) method = experiment.get("method") diff --git a/chaoslib/secret.py b/chaoslib/secret.py index dfe1dd2..a272151 100644 --- a/chaoslib/secret.py +++ b/chaoslib/secret.py @@ -10,6 +10,7 @@ try: except ImportError: HAS_HVAC = False +from chaoslib.exceptions import InvalidExperiment from chaoslib.types import Configuration, Secrets __all__ = ["load_secrets"] @@ -123,7 +124,12 @@ def load_secrets_from_env(secrets_info: Dict[str, Dict[str, str]], for (key, value) in keys.items(): if isinstance(value, dict) and value.get("type") == "env": - secrets[target][key] = env.get(value["key"]) + env_key = value["key"] + if env_key not in env: + raise InvalidExperiment( + "Secrets make reference to an environment key " + "that does not exist: {}".format(env_key)) + secrets[target][key] = env.get(env_key) if not secrets[target]: secrets.pop(target)
Bail cleanly when environment key was not found It appears the toolkit doesn't tell you when a key couldn't be found in the environment.
chaostoolkit/chaostoolkit-lib
diff --git a/tests/fixtures/experiments.py b/tests/fixtures/experiments.py index 627ce70..757f6f4 100644 --- a/tests/fixtures/experiments.py +++ b/tests/fixtures/experiments.py @@ -160,3 +160,11 @@ Experiment = { } ] } + +ExperimentWithConfigurationCallingMissingEnvKey = Experiment.copy() +ExperimentWithConfigurationCallingMissingEnvKey["configuration"] = { + "mykey": { + "type": "env", + "key": "DOES_NOT_EXIST" + } +} \ No newline at end of file diff --git a/tests/test_experiment.py b/tests/test_experiment.py index 28046b4..444746f 100644 --- a/tests/test_experiment.py +++ b/tests/test_experiment.py @@ -152,3 +152,12 @@ def test_experiment_may_run_without_steady_state(): journal = run_experiment(experiment) assert journal is not None + + +def test_should_bail_experiment_when_env_was_not_found(): + experiment = experiments.ExperimentWithConfigurationCallingMissingEnvKey + + with pytest.raises(InvalidExperiment) as x: + run_experiment(experiment) + assert "Configuration makes reference to an environment key that does " \ + "not exist" in str(x) \ No newline at end of file
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 5 }
0.15
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-sugar" ], "pre_install": null, "python": "3.7", "reqs_path": [ "requirements.txt", "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi -e git+https://github.com/chaostoolkit/chaostoolkit-lib.git@80fb16cd50d4f6d761cf2c0c5919574d2972932c#egg=chaostoolkit_lib charset-normalizer==3.4.1 coverage==7.2.7 exceptiongroup==1.2.2 hvac==0.11.2 idna==3.10 importlib-metadata==6.7.0 iniconfig==2.0.0 logzero==1.7.0 packaging==24.0 pluggy==1.2.0 ply==3.4 pycodestyle==2.10.0 pyhcl==0.2.3 pytest==7.4.4 pytest-cov==4.1.0 pytest-sugar==1.0.0 PyYAML==6.0.1 requests==2.31.0 requests-mock==1.12.1 six==1.17.0 termcolor==2.3.0 tomli==2.0.1 typing_extensions==4.7.1 urllib3==2.0.7 zipp==3.15.0
name: chaostoolkit-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==3.4.1 - coverage==7.2.7 - exceptiongroup==1.2.2 - hvac==0.11.2 - idna==3.10 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - logzero==1.7.0 - packaging==24.0 - pluggy==1.2.0 - ply==3.4 - pycodestyle==2.10.0 - pyhcl==0.2.3 - pytest==7.4.4 - pytest-cov==4.1.0 - pytest-sugar==1.0.0 - pyyaml==6.0.1 - requests==2.31.0 - requests-mock==1.12.1 - six==1.17.0 - termcolor==2.3.0 - tomli==2.0.1 - typing-extensions==4.7.1 - urllib3==2.0.7 - zipp==3.15.0 prefix: /opt/conda/envs/chaostoolkit-lib
[ "tests/test_experiment.py::test_should_bail_experiment_when_env_was_not_found" ]
[]
[ "tests/test_experiment.py::test_empty_experiment_is_invalid", "tests/test_experiment.py::test_experiment_must_have_a_method", "tests/test_experiment.py::test_experiment_must_have_at_least_one_step", "tests/test_experiment.py::test_experiment_must_have_a_title", "tests/test_experiment.py::test_experiment_must_have_a_description", "tests/test_experiment.py::test_experiment_may_not_have_a_hypothesis", "tests/test_experiment.py::test_experiment_hypothesis_must_have_a_title", "tests/test_experiment.py::test_experiment_hypothesis_must_have_a_valid_probe", "tests/test_experiment.py::test_valid_experiment", "tests/test_experiment.py::test_can_run_experiment_in_dry_mode", "tests/test_experiment.py::test_can_iterate_over_activities", "tests/test_experiment.py::test_no_rollback_even_on_SIGINT", "tests/test_experiment.py::test_no_rollback_even_on_SystemExit", "tests/test_experiment.py::test_probes_can_reference_each_other", "tests/test_experiment.py::test_probes_missing_ref_should_fail_the_experiment", "tests/test_experiment.py::test_experiment_with_steady_state", "tests/test_experiment.py::test_experiment_may_run_without_steady_state" ]
[]
Apache License 2.0
2,280
[ "chaoslib/configuration.py", "chaoslib/experiment.py", "CHANGELOG.md", "chaoslib/secret.py", "chaoslib/__init__.py" ]
[ "chaoslib/configuration.py", "chaoslib/experiment.py", "CHANGELOG.md", "chaoslib/secret.py", "chaoslib/__init__.py" ]
pydicom__pydicom-595
06e2352e8dc5e3ad0801ffa52339b469ae2b7b10
2018-03-10 20:24:57
fcc63f0b96fb370b0eb60b2c765b469ce62e597c
darcymason: I had a quick look at this and it looked fine, but then I tried commenting out the 'continue' lines to make the test fail, and got some strange warnings from values.py about unexpected length. I've been travelling and haven't had a chance to try again or investigate further. Will look into it when I can, or perhaps someone else can give it a try -- I may have set something up incorrectly. mrbean-bremen: Hm, if I do the same (e.g. replace `continue` with `pass`) I get: ``` def test_write_removes_grouplength(self): ds = dcmread(color_pl_name) assert 0x00080000 in ds ds.save_as(self.file_out, write_like_original=True) self.file_out.seek(0) ds = dcmread(self.file_out) # group length has been removed > assert 0x00080000 not in ds E AssertionError: assert 524288 not in (0008, 0000) Group Length UL: 480\n(0008, 0008) Image Type CS: ['ORIGIN...Group Length UL: 92172\n(7fe0, 0010) Pixel Data OW: Array of 92160 bytes ``` which looks ok to me. darcymason: Okay, I finally cracked this - it had nothing to do with this latest code, but I couldn't leave the mystery. Well, it is still a little bit of a mystery. It went away if I removed the other classes from test_filewriter.py. The second clue was that the error and warnings appeared twice as often as expected. Finally I noticed there were coming both from class WriteFileTests and from ScratchWriteDateTimeTests. Then finally noticed that the latter is *derived* from the former. Changed that to subclass from unittest.TestCase, and my issue went away, and the tests run normally with the `continue` line put back also. So I haven't tried to figure out exactly why it causes trouble in both classes when the one is derived from the other, but it certainly is the source. I suspect it is something to do with the temp file not being reset, and maybe the order of events depends on platform (I was testing on Windows, python 3.6.4). So... @mrbean-bremen, if you don't mind updating that one line, then I'd be happy to merge this. scaramallion: It looks like the change was made so that the `WriteFileTests` tests are run again but with `config.datetime_conversion = True` darcymason: Yes, I think you are right, I remember that now that you bring it up. But somehow that is not working out when the new test fails. Well, it shouldn't fail, of course, so maybe it is a moot point, but it does seem the code is a bit fragile somehow. Personally I like the 'test first' philosophy where you write the new unit test (so that it fails) before fixing the main code. darcymason: ... I'll merge this and add a separate issue for the test code problem.
diff --git a/pydicom/filewriter.py b/pydicom/filewriter.py index 01d9a4911..e283da86f 100644 --- a/pydicom/filewriter.py +++ b/pydicom/filewriter.py @@ -458,6 +458,9 @@ def write_dataset(fp, dataset, parent_encoding=default_encoding): tags = sorted(dataset.keys()) for tag in tags: + # do not write retired Group Length (see PS3.5, 7.2) + if tag.element == 0 and tag.group > 6: + continue with tag_in_exception(tag): # write_data_element(fp, dataset.get_item(tag), dataset_encoding) # XXX for writing raw tags without converting to DataElement
Update filewriter group length fields _From [[email protected]](https://code.google.com/u/[email protected]/) on December 10, 2008 22:48:30_ It looks like filewriter module does not recalc group length fields except for file meta info section. Should make this the case for all groups. _Original issue: http://code.google.com/p/pydicom/issues/detail?id=30_
pydicom/pydicom
diff --git a/pydicom/tests/test_filewriter.py b/pydicom/tests/test_filewriter.py index f2bac9a81..362704140 100644 --- a/pydicom/tests/test_filewriter.py +++ b/pydicom/tests/test_filewriter.py @@ -39,6 +39,7 @@ ct_name = get_testdata_files("CT_small.dcm")[0] mr_name = get_testdata_files("MR_small.dcm")[0] jpeg_name = get_testdata_files("JPEG2000.dcm")[0] no_ts = get_testdata_files("meta_missing_tsyntax.dcm")[0] +color_pl_name = get_testdata_files("color-pl.dcm")[0] datetime_name = mr_name unicode_name = get_charset_files("chrH31.dcm")[0] @@ -194,6 +195,15 @@ class WriteFileTests(unittest.TestCase): ds = dcmread(fp, force=True) assert ds[0xFFFFFFFF].value == b'123456' + def test_write_removes_grouplength(self): + ds = dcmread(color_pl_name) + assert 0x00080000 in ds + ds.save_as(self.file_out, write_like_original=True) + self.file_out.seek(0) + ds = dcmread(self.file_out) + # group length has been removed + assert 0x00080000 not in ds + class ScratchWriteDateTimeTests(WriteFileTests): """Write and reread simple or multi-value DA/DT/TM data elements"""
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/pydicom/pydicom.git@06e2352e8dc5e3ad0801ffa52339b469ae2b7b10#egg=pydicom pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pydicom channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/pydicom
[ "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_removes_grouplength", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_removes_grouplength" ]
[]
[ "pydicom/tests/test_filewriter.py::WriteFileTests::testCT", "pydicom/tests/test_filewriter.py::WriteFileTests::testJPEG2000", "pydicom/tests/test_filewriter.py::WriteFileTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::WriteFileTests::testMR", "pydicom/tests/test_filewriter.py::WriteFileTests::testMultiPN", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTDose", "pydicom/tests/test_filewriter.py::WriteFileTests::testRTPlan", "pydicom/tests/test_filewriter.py::WriteFileTests::testUnicode", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_ffff_ffff", "pydicom/tests/test_filewriter.py::WriteFileTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::WriteFileTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testCT", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testJPEG2000", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testListItemWriteBack", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMR", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testMultiPN", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTDose", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testRTPlan", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testUnicode", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_multivalue_DA", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_double_filemeta", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_ffff_ffff", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::test_write_no_ts", "pydicom/tests/test_filewriter.py::ScratchWriteDateTimeTests::testwrite_short_uid", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_empty_AT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OD_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_OL_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UC_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UN_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_explicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_UR_implicit_little", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_empty_LO", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DA", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_DT", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_multi_TM", "pydicom/tests/test_filewriter.py::WriteDataElementTests::test_write_unknown_vr_raises", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_lut_descriptor", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_overlay", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_data", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_one", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_pixel_representation_vm_three", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_sequence", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVR::test_waveform_bits_allocated", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_not_ambiguous", "pydicom/tests/test_filewriter.py::TestCorrectAmbiguousVRElement::test_pixel_data_not_ow_or_ob", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_big_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_little_endian", "pydicom/tests/test_filewriter.py::WriteAmbiguousVRTests::test_write_explicit_vr_raises", "pydicom/tests/test_filewriter.py::ScratchWriteTests::testImpl_LE_deflen_write", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_none_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_bad_preamble", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_prefix_none", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_ds_changed", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_transfer_syntax_raises", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_media_storage_sop_class_uid_added", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_write_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_raise_no_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_add_file_meta", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_standard", "pydicom/tests/test_filewriter.py::TestWriteToStandard::test_commandset_no_written", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_version", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_version_name_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_implementation_class_uid_length", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoToStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_ds_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_file_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_no_preamble", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_commandset_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_custom", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_default", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_preamble_filemeta_dataset", "pydicom/tests/test_filewriter.py::TestWriteNonStandard::test_read_write_identical", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_bad_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_filelike_position", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_group_length_updated", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_meta_unchanged", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_missing_elements", "pydicom/tests/test_filewriter.py::TestWriteFileMetaInfoNonStandard::test_transfer_syntax_not_added", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_empty_value", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_list", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_singleton", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_exception", "pydicom/tests/test_filewriter.py::TestWriteNumbers::test_write_big_endian", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding_unicode", "pydicom/tests/test_filewriter.py::TestWritePN::test_no_encoding", "pydicom/tests/test_filewriter.py::TestWriteDT::test_format_dt", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_big_endian_incorrect_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_correct_data", "pydicom/tests/test_filewriter.py::TestWriteUndefinedLengthPixelData::test_little_endian_incorrect_data" ]
[]
MIT License
2,285
[ "pydicom/filewriter.py" ]
[ "pydicom/filewriter.py" ]
tox-dev__tox-772
9a9ea31922c75c275fc2e28ff842b3aad5664257
2018-03-10 20:34:41
a52e3519cb3333d5b53e9741a7d63efa0709d184
codecov[bot]: # [Codecov](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=h1) Report > Merging [#772](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=desc) into [master](https://codecov.io/gh/tox-dev/tox/commit/9a9ea31922c75c275fc2e28ff842b3aad5664257?src=pr&el=desc) will **increase** coverage by `<.01%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/tox-dev/tox/pull/772/graphs/tree.svg?height=150&width=650&token=DYodAwDCZ5&src=pr)](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #772 +/- ## ========================================== + Coverage 94.83% 94.84% +<.01% ========================================== Files 11 11 Lines 2402 2406 +4 ========================================== + Hits 2278 2282 +4 Misses 124 124 ``` | [Impacted Files](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [tox/config.py](https://codecov.io/gh/tox-dev/tox/pull/772/diff?src=pr&el=tree#diff-dG94L2NvbmZpZy5weQ==) | `97.76% <100%> (+0.01%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=footer). Last update [9a9ea31...1fc5b93](https://codecov.io/gh/tox-dev/tox/pull/772?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/changelog/706.bugfix.rst b/changelog/706.bugfix.rst new file mode 100644 index 00000000..b0827db6 --- /dev/null +++ b/changelog/706.bugfix.rst @@ -0,0 +1,1 @@ +Fix bug with incorrectly defactorized dependencies - by @bartsanchez diff --git a/doc/config.rst b/doc/config.rst index 2e12fe50..cf1d9ec4 100644 --- a/doc/config.rst +++ b/doc/config.rst @@ -674,6 +674,32 @@ the following: - but not ``py2``, ``py36-sql`` or ``py36-mysql-dev``. +Factors and values substitution are compatible +++++++++++++++++++++++++++++++++++++++++++++++ + +It is possible to mix both values substitution and factor expressions. +For example:: + + [tox] + envlist = py27,py36,coverage + + [testenv] + deps = + flake8 + coverage: coverage + + [testenv:py27] + deps = + {{[testenv]deps}} + pytest + +With the previous configuration, it will install: + +- ``flake8`` and ``pytest`` packages for ``py27`` environment. +- ``flake8`` package for ``py36`` environment. +- ``flake8`` and ``coverage`` packages for ``coverage`` environment. + + Other Rules and notes ===================== diff --git a/tox/config.py b/tox/config.py index ad72bdf3..f8cb9ff4 100755 --- a/tox/config.py +++ b/tox/config.py @@ -1076,10 +1076,10 @@ class SectionReader: if x is None: x = default else: + x = self._replace_if_needed(x, name, replace, crossonly) x = self._apply_factors(x) - if replace and x and hasattr(x, 'replace'): - x = self._replace(x, name=name, crossonly=crossonly) + x = self._replace_if_needed(x, name, replace, crossonly) # print "getstring", self.section_name, name, "returned", repr(x) return x @@ -1115,6 +1115,11 @@ class SectionReader: raise return replaced + def _replace_if_needed(self, x, name, replace, crossonly): + if replace and x and hasattr(x, 'replace'): + x = self._replace(x, name=name, crossonly=crossonly) + return x + class Replacer: RE_ITEM_REF = re.compile(
deps passed to pip are not de-factorized Given this `tox.ini`: ```ini [testenv] deps = foo coverage: coverage [testenv:sub] deps = {[testenv]deps} ``` Running `tox -e sub` results in: ``` sub create: /tmp/t1/.tox/sub sub installdeps: foo, coverage: coverage ERROR: invocation failed (exit code 1), logfile: /tmp/t1/.tox/sub/log/sub-1.log ERROR: actionid: sub msg: getenv cmdargs: ['/tmp/t1/.tox/sub/bin/pip', 'install', 'foo', 'coverage: coverage'] Invalid requirement: 'coverage: coverage' Traceback (most recent call last): File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py", line 92, in __init__ req = REQUIREMENT.parseString(requirement_string) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 1617, in parseString raise exc File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 1607, in parseString loc, tokens = self._parse( instring, 0 ) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 1379, in _parseNoCache loc,tokens = self.parseImpl( instring, preloc, doActions ) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 3376, in parseImpl loc, exprtokens = e._parse( instring, loc, doActions ) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 1383, in _parseNoCache loc,tokens = self.parseImpl( instring, preloc, doActions ) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/pyparsing.py", line 3164, in parseImpl raise ParseException(instring, loc, self.errmsg, self) pip._vendor.pyparsing.ParseException: Expected stringEnd (at char 8), (line:1, col:9) During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/req/req_install.py", line 82, in __init__ req = Requirement(req) File "/tmp/tox/tmp/t1/sub/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py", line 96, in __init__ requirement_string[e.loc:e.loc + 8])) pip._vendor.packaging.requirements.InvalidRequirement: Invalid requirement, parse error at "': covera'" ERROR: could not install deps [foo, coverage: coverage]; v = InvocationError('/tmp/t1/.tox/sub/bin/pip install foo coverage: coverage (see /tmp/t1/.tox/sub/log/sub-1.log)', 1) __________________________________________ summary __________________________________________ ERROR: sub: could not install deps [foo, coverage: coverage]; v = InvocationError('/tmp/t1/.tox/sub/bin/pip install foo coverage: coverage (see /tmp/t1/.tox/sub/log/sub-1.log)', 1) ``` `tox 2.9.2.dev35` (current master). 1. it should only install 'coverage' and not 'coverage: coverage' 2. it should only install it for when the "coverage" factor is used.
tox-dev/tox
diff --git a/tests/test_config.py b/tests/test_config.py index 26421fa5..61d106bf 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1294,7 +1294,36 @@ class TestConfigTestEnv: ) conf = newconfig([], inisource).envconfigs['py27'] packages = [dep.name for dep in conf.deps] - assert packages == list(deps) + ['fun', 'frob>1.0,<2.0'] + assert packages == ['pytest', 'pytest-cov', 'fun', 'frob>1.0,<2.0'] + + # https://github.com/tox-dev/tox/issues/706 + @pytest.mark.parametrize('envlist', [['py27', 'coverage', 'other']]) + def test_regression_test_issue_706(self, newconfig, envlist): + inisource = """ + [tox] + envlist = {envlist} + [testenv] + deps= + flake8 + coverage: coverage + [testenv:py27] + deps= + {{[testenv]deps}} + fun + """.format( + envlist=','.join(envlist), + ) + conf = newconfig([], inisource).envconfigs['coverage'] + packages = [dep.name for dep in conf.deps] + assert packages == ['flake8', 'coverage'] + + conf = newconfig([], inisource).envconfigs['other'] + packages = [dep.name for dep in conf.deps] + assert packages == ['flake8'] + + conf = newconfig([], inisource).envconfigs['py27'] + packages = [dep.name for dep in conf.deps] + assert packages == ['flake8', 'fun'] def test_take_dependencies_from_other_section(self, newconfig): inisource = """
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 2 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[testing]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-xdist" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 execnet==2.1.1 filelock==3.18.0 iniconfig==2.1.0 packaging==24.2 platformdirs==4.3.7 pluggy==0.13.1 py==1.11.0 pytest==7.4.4 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-timeout==2.3.1 pytest-xdist==3.6.1 six==1.17.0 tomli==2.2.1 -e git+https://github.com/tox-dev/tox.git@9a9ea31922c75c275fc2e28ff842b3aad5664257#egg=tox virtualenv==20.29.3
name: tox channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - execnet==2.1.1 - filelock==3.18.0 - iniconfig==2.1.0 - packaging==24.2 - platformdirs==4.3.7 - pluggy==0.13.1 - py==1.11.0 - pytest==7.4.4 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-timeout==2.3.1 - pytest-xdist==3.6.1 - six==1.17.0 - tomli==2.2.1 - tox==3.0.0rc3.dev1+g9a9ea319 - virtualenv==20.29.3 prefix: /opt/conda/envs/tox
[ "tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_testenv[envlist1-deps1]", "tests/test_config.py::TestConfigTestEnv::test_regression_test_issue_706[envlist0]" ]
[ "tests/test_config.py::TestVenvConfig::test_force_dep_with_url", "tests/test_config.py::TestIniParser::test_getbool", "tests/test_config.py::TestCmdInvocation::test_listenvs", "tests/test_config.py::TestCmdInvocation::test_listenvs_verbose_description", "tests/test_config.py::TestCmdInvocation::test_listenvs_all", "tests/test_config.py::TestCmdInvocation::test_no_tox_ini" ]
[ "tests/test_config.py::TestVenvConfig::test_config_parsing_minimal", "tests/test_config.py::TestVenvConfig::test_config_parsing_multienv", "tests/test_config.py::TestVenvConfig::test_envdir_set_manually", "tests/test_config.py::TestVenvConfig::test_envdir_set_manually_with_substitutions", "tests/test_config.py::TestVenvConfig::test_force_dep_version", "tests/test_config.py::TestVenvConfig::test_process_deps", "tests/test_config.py::TestVenvConfig::test_is_same_dep", "tests/test_config.py::TestConfigPlatform::test_config_parse_platform", "tests/test_config.py::TestConfigPlatform::test_config_parse_platform_rex", "tests/test_config.py::TestConfigPlatform::test_config_parse_platform_with_factors[win]", "tests/test_config.py::TestConfigPlatform::test_config_parse_platform_with_factors[lin]", "tests/test_config.py::TestConfigPlatform::test_config_parse_platform_with_factors[osx]", "tests/test_config.py::TestConfigPackage::test_defaults", "tests/test_config.py::TestConfigPackage::test_defaults_distshare", "tests/test_config.py::TestConfigPackage::test_defaults_changed_dir", "tests/test_config.py::TestConfigPackage::test_project_paths", "tests/test_config.py::TestParseconfig::test_search_parents", "tests/test_config.py::TestParseconfig::test_explicit_config_path", "tests/test_config.py::test_get_homedir", "tests/test_config.py::TestGetcontextname::test_blank", "tests/test_config.py::TestGetcontextname::test_jenkins", "tests/test_config.py::TestGetcontextname::test_hudson_legacy", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section_multiline", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section_posargs", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_section_and_posargs_substitution", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_env_substitution", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_env_substitution_global", "tests/test_config.py::TestIniParserAgainstCommandsKey::test_regression_issue595", "tests/test_config.py::TestIniParser::test_getstring_single", "tests/test_config.py::TestIniParser::test_missing_substitution", "tests/test_config.py::TestIniParser::test_getstring_fallback_sections", "tests/test_config.py::TestIniParser::test_getstring_substitution", "tests/test_config.py::TestIniParser::test_getlist", "tests/test_config.py::TestIniParser::test_getdict", "tests/test_config.py::TestIniParser::test_normal_env_sub_works", "tests/test_config.py::TestIniParser::test_missing_env_sub_raises_config_error_in_non_testenv", "tests/test_config.py::TestIniParser::test_missing_env_sub_populates_missing_subs", "tests/test_config.py::TestIniParser::test_getstring_environment_substitution_with_default", "tests/test_config.py::TestIniParser::test_value_matches_section_substitution", "tests/test_config.py::TestIniParser::test_value_doesn_match_section_substitution", "tests/test_config.py::TestIniParser::test_getstring_other_section_substitution", "tests/test_config.py::TestIniParser::test_argvlist", "tests/test_config.py::TestIniParser::test_argvlist_windows_escaping", "tests/test_config.py::TestIniParser::test_argvlist_multiline", "tests/test_config.py::TestIniParser::test_argvlist_quoting_in_command", "tests/test_config.py::TestIniParser::test_argvlist_comment_after_command", "tests/test_config.py::TestIniParser::test_argvlist_command_contains_hash", "tests/test_config.py::TestIniParser::test_argvlist_positional_substitution", "tests/test_config.py::TestIniParser::test_argvlist_quoted_posargs", "tests/test_config.py::TestIniParser::test_argvlist_posargs_with_quotes", "tests/test_config.py::TestIniParser::test_positional_arguments_are_only_replaced_when_standing_alone", "tests/test_config.py::TestIniParser::test_posargs_are_added_escaped_issue310", "tests/test_config.py::TestIniParser::test_substitution_with_multiple_words", "tests/test_config.py::TestIniParser::test_getargv", "tests/test_config.py::TestIniParser::test_getpath", "tests/test_config.py::TestIniParserPrefix::test_basic_section_access", "tests/test_config.py::TestIniParserPrefix::test_fallback_sections", "tests/test_config.py::TestIniParserPrefix::test_value_matches_prefixed_section_substitution", "tests/test_config.py::TestIniParserPrefix::test_value_doesn_match_prefixed_section_substitution", "tests/test_config.py::TestIniParserPrefix::test_other_section_substitution", "tests/test_config.py::TestConfigTestEnv::test_commentchars_issue33", "tests/test_config.py::TestConfigTestEnv::test_defaults", "tests/test_config.py::TestConfigTestEnv::test_sitepackages_switch", "tests/test_config.py::TestConfigTestEnv::test_installpkg_tops_develop", "tests/test_config.py::TestConfigTestEnv::test_specific_command_overrides", "tests/test_config.py::TestConfigTestEnv::test_whitelist_externals", "tests/test_config.py::TestConfigTestEnv::test_changedir", "tests/test_config.py::TestConfigTestEnv::test_ignore_errors", "tests/test_config.py::TestConfigTestEnv::test_envbindir", "tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[jython]", "tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[pypy]", "tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[pypy3]", "tests/test_config.py::TestConfigTestEnv::test_passenv_as_multiline_list[win32]", "tests/test_config.py::TestConfigTestEnv::test_passenv_as_multiline_list[linux2]", "tests/test_config.py::TestConfigTestEnv::test_passenv_as_space_separated_list[win32]", "tests/test_config.py::TestConfigTestEnv::test_passenv_as_space_separated_list[linux2]", "tests/test_config.py::TestConfigTestEnv::test_passenv_with_factor", "tests/test_config.py::TestConfigTestEnv::test_passenv_from_global_env", "tests/test_config.py::TestConfigTestEnv::test_passenv_glob_from_global_env", "tests/test_config.py::TestConfigTestEnv::test_changedir_override", "tests/test_config.py::TestConfigTestEnv::test_install_command_setting", "tests/test_config.py::TestConfigTestEnv::test_install_command_must_contain_packages", "tests/test_config.py::TestConfigTestEnv::test_install_command_substitutions", "tests/test_config.py::TestConfigTestEnv::test_pip_pre", "tests/test_config.py::TestConfigTestEnv::test_pip_pre_cmdline_override", "tests/test_config.py::TestConfigTestEnv::test_simple", "tests/test_config.py::TestConfigTestEnv::test_substitution_error", "tests/test_config.py::TestConfigTestEnv::test_substitution_defaults", "tests/test_config.py::TestConfigTestEnv::test_substitution_notfound_issue246", "tests/test_config.py::TestConfigTestEnv::test_substitution_notfound_issue515", "tests/test_config.py::TestConfigTestEnv::test_substitution_nested_env_defaults", "tests/test_config.py::TestConfigTestEnv::test_substitution_positional", "tests/test_config.py::TestConfigTestEnv::test_substitution_noargs_issue240", "tests/test_config.py::TestConfigTestEnv::test_substitution_double", "tests/test_config.py::TestConfigTestEnv::test_posargs_backslashed_or_quoted", "tests/test_config.py::TestConfigTestEnv::test_rewrite_posargs", "tests/test_config.py::TestConfigTestEnv::test_rewrite_simple_posargs", "tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_testenv[envlist0-deps0]", "tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_section", "tests/test_config.py::TestConfigTestEnv::test_multilevel_substitution", "tests/test_config.py::TestConfigTestEnv::test_recursive_substitution_cycle_fails", "tests/test_config.py::TestConfigTestEnv::test_single_value_from_other_secton", "tests/test_config.py::TestConfigTestEnv::test_factors", "tests/test_config.py::TestConfigTestEnv::test_factor_ops", "tests/test_config.py::TestConfigTestEnv::test_envconfigs_based_on_factors", "tests/test_config.py::TestConfigTestEnv::test_default_factors", "tests/test_config.py::TestConfigTestEnv::test_factors_in_boolean", "tests/test_config.py::TestConfigTestEnv::test_factors_in_setenv", "tests/test_config.py::TestConfigTestEnv::test_factor_use_not_checked", "tests/test_config.py::TestConfigTestEnv::test_factors_groups_touch", "tests/test_config.py::TestConfigTestEnv::test_period_in_factor", "tests/test_config.py::TestConfigTestEnv::test_ignore_outcome", "tests/test_config.py::TestGlobalOptions::test_notest", "tests/test_config.py::TestGlobalOptions::test_verbosity", "tests/test_config.py::TestGlobalOptions::test_quiet[args0-0]", "tests/test_config.py::TestGlobalOptions::test_quiet[args1-1]", "tests/test_config.py::TestGlobalOptions::test_quiet[args2-2]", "tests/test_config.py::TestGlobalOptions::test_quiet[args3-3]", "tests/test_config.py::TestGlobalOptions::test_substitution_jenkins_default", "tests/test_config.py::TestGlobalOptions::test_substitution_jenkins_context", "tests/test_config.py::TestGlobalOptions::test_sdist_specification", "tests/test_config.py::TestGlobalOptions::test_env_selection", "tests/test_config.py::TestGlobalOptions::test_py_venv", "tests/test_config.py::TestGlobalOptions::test_default_environments", "tests/test_config.py::TestGlobalOptions::test_envlist_expansion", "tests/test_config.py::TestGlobalOptions::test_envlist_cross_product", "tests/test_config.py::TestGlobalOptions::test_envlist_multiline", "tests/test_config.py::TestGlobalOptions::test_minversion", "tests/test_config.py::TestGlobalOptions::test_skip_missing_interpreters_true", "tests/test_config.py::TestGlobalOptions::test_skip_missing_interpreters_false", "tests/test_config.py::TestGlobalOptions::test_defaultenv_commandline", "tests/test_config.py::TestGlobalOptions::test_defaultenv_partial_override", "tests/test_config.py::TestHashseedOption::test_default", "tests/test_config.py::TestHashseedOption::test_passing_integer", "tests/test_config.py::TestHashseedOption::test_passing_string", "tests/test_config.py::TestHashseedOption::test_passing_empty_string", "tests/test_config.py::TestHashseedOption::test_passing_no_argument", "tests/test_config.py::TestHashseedOption::test_setenv", "tests/test_config.py::TestHashseedOption::test_noset", "tests/test_config.py::TestHashseedOption::test_noset_with_setenv", "tests/test_config.py::TestHashseedOption::test_one_random_hashseed", "tests/test_config.py::TestHashseedOption::test_setenv_in_one_testenv", "tests/test_config.py::TestSetenv::test_getdict_lazy", "tests/test_config.py::TestSetenv::test_getdict_lazy_update", "tests/test_config.py::TestSetenv::test_setenv_uses_os_environ", "tests/test_config.py::TestSetenv::test_setenv_default_os_environ", "tests/test_config.py::TestSetenv::test_setenv_uses_other_setenv", "tests/test_config.py::TestSetenv::test_setenv_recursive_direct", "tests/test_config.py::TestSetenv::test_setenv_overrides", "tests/test_config.py::TestSetenv::test_setenv_with_envdir_and_basepython", "tests/test_config.py::TestSetenv::test_setenv_ordering_1", "tests/test_config.py::TestSetenv::test_setenv_cross_section_subst_issue294", "tests/test_config.py::TestSetenv::test_setenv_cross_section_subst_twice", "tests/test_config.py::TestSetenv::test_setenv_cross_section_mixed", "tests/test_config.py::TestIndexServer::test_indexserver", "tests/test_config.py::TestIndexServer::test_parse_indexserver", "tests/test_config.py::TestIndexServer::test_multiple_homedir_relative_local_indexservers", "tests/test_config.py::TestConfigConstSubstitutions::test_replace_pathsep_unix[:]", "tests/test_config.py::TestConfigConstSubstitutions::test_replace_pathsep_unix[;]", "tests/test_config.py::TestConfigConstSubstitutions::test_pathsep_regex", "tests/test_config.py::TestParseEnv::test_parse_recreate", "tests/test_config.py::TestCmdInvocation::test_help", "tests/test_config.py::TestCmdInvocation::test_version_simple", "tests/test_config.py::TestCmdInvocation::test_version_no_plugins", "tests/test_config.py::TestCmdInvocation::test_version_with_normal_plugin", "tests/test_config.py::TestCmdInvocation::test_version_with_fileless_module", "tests/test_config.py::TestCmdInvocation::test_listenvs_all_verbose_description", "tests/test_config.py::TestCmdInvocation::test_listenvs_all_verbose_description_no_additional_environments", "tests/test_config.py::TestCmdInvocation::test_config_specific_ini", "tests/test_config.py::TestCmdInvocation::test_override_workdir", "tests/test_config.py::TestCmdInvocation::test_showconfig_with_force_dep_version", "tests/test_config.py::test_env_spec[-e", "tests/test_config.py::TestCommandParser::test_command_parser_for_word", "tests/test_config.py::TestCommandParser::test_command_parser_for_posargs", "tests/test_config.py::TestCommandParser::test_command_parser_for_multiple_words", "tests/test_config.py::TestCommandParser::test_command_parser_for_substitution_with_spaces", "tests/test_config.py::TestCommandParser::test_command_parser_with_complex_word_set", "tests/test_config.py::TestCommandParser::test_command_with_runs_of_whitespace", "tests/test_config.py::TestCommandParser::test_command_with_split_line_in_subst_arguments", "tests/test_config.py::TestCommandParser::test_command_parsing_for_issue_10" ]
[]
MIT License
2,286
[ "doc/config.rst", "tox/config.py", "changelog/706.bugfix.rst" ]
[ "doc/config.rst", "tox/config.py", "changelog/706.bugfix.rst" ]
ofek__bit-32
89dcd0a8d05b785691b34a9b28dee157ee419bea
2018-03-11 19:22:19
89dcd0a8d05b785691b34a9b28dee157ee419bea
teran-mckinney: Thank you for reviewing! Could you consider making a new version and pushing it to pip if this looks good to you now?
diff --git a/.gitignore b/.gitignore index 9367500..ba62003 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *.log *.pyc +*.orig /.cache /.idea /.coverage diff --git a/bit/network/fees.py b/bit/network/fees.py index 58a783a..4204400 100644 --- a/bit/network/fees.py +++ b/bit/network/fees.py @@ -1,3 +1,4 @@ +import logging from functools import wraps from time import time @@ -54,7 +55,12 @@ def get_fee_local_cache(f): cached_fee_fast = request.json()['fastestFee'] fast_last_update = now except (ConnectionError, HTTPError, Timeout): # pragma: no cover - return cached_fee_fast or DEFAULT_FEE_FAST + if cached_fee_fast is None: + logging.warning('Connection to fee API failed, returning default fee (fast) of {}'.format(DEFAULT_FEE_FAST)) + return DEFAULT_FEE_FAST + else: + logging.warning('Connection to fee API failed, returning cached fee (fast).') + return cached_fee_fast return cached_fee_fast @@ -71,7 +77,12 @@ def get_fee_local_cache(f): cached_fee_hour = request.json()['hourFee'] hour_last_update = now except (ConnectionError, HTTPError, Timeout): # pragma: no cover - return cached_fee_hour or DEFAULT_FEE_HOUR + if cached_fee_hour is None: + logging.warning('Connection to fee API failed, returning default fee (hour) of {}'.format(DEFAULT_FEE_HOUR)) + return DEFAULT_FEE_HOUR + else: + logging.warning('Connection to fee API failed, returning cached fee (hour).') + return cached_fee_hour return cached_fee_hour diff --git a/bit/transaction.py b/bit/transaction.py index 7ef723c..4f38bdd 100644 --- a/bit/transaction.py +++ b/bit/transaction.py @@ -1,3 +1,4 @@ +import logging from collections import namedtuple from itertools import islice @@ -70,10 +71,19 @@ def estimate_tx_fee(n_in, n_out, satoshis, compressed): + 8 ) - return estimated_size * satoshis + estimated_fee = estimated_size * satoshis + + logging.debug('Estimated fee: {} satoshis for {} bytes'.format(estimated_fee, estimated_size)) + + return estimated_fee def sanitize_tx_data(unspents, outputs, fee, leftover, combine=True, message=None, compressed=True): + """ + sanitize_tx_data() + + fee is in satoshis per byte. + """ outputs = outputs.copy() @@ -94,12 +104,15 @@ def sanitize_tx_data(unspents, outputs, fee, leftover, combine=True, message=Non messages.append((message, 0)) # Include return address in fee estimate. - fee = estimate_tx_fee(len(unspents), len(outputs) + len(messages) + 1, fee, compressed) - total_out = sum(out[1] for out in outputs) + fee total_in = 0 + num_outputs = len(outputs) + len(messages) + 1 + sum_outputs = sum(out[1] for out in outputs) if combine: + # calculated_fee is in total satoshis. + calculated_fee = estimate_tx_fee(len(unspents), num_outputs, fee, compressed) + total_out = sum_outputs + calculated_fee unspents = unspents.copy() total_in += sum(unspent.amount for unspent in unspents) @@ -110,6 +123,8 @@ def sanitize_tx_data(unspents, outputs, fee, leftover, combine=True, message=Non for index, unspent in enumerate(unspents): total_in += unspent.amount + calculated_fee = estimate_tx_fee(len(unspents[:index + 1]), num_outputs, fee, compressed) + total_out = sum_outputs + calculated_fee if total_in >= total_out: break
Bug with send(combine=False) I think there's some weird behavior here. https://blockchain.info/tx/6519c1977bf0863aed1b70caeaeb521c32fab43e5fd663c310f09cdee63d0ad6?show_adv=true The fee was super high but I think that was a sideeffect of the main bug.
ofek/bit
diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 9f792ec..fc1a881 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -172,6 +172,48 @@ class TestSanitizeTxData: assert outputs[1][0] == RETURN_ADDRESS assert outputs[1][1] == 1000 + def test_no_combine_remaining_small_inputs(self): + unspents_original = [Unspent(1500, 0, '', '', 0), + Unspent(1600, 0, '', '', 0), + Unspent(1700, 0, '', '', 0)] + outputs_original = [(RETURN_ADDRESS, 2000, 'satoshi')] + + unspents, outputs = sanitize_tx_data( + unspents_original, outputs_original, fee=0, leftover=RETURN_ADDRESS, + combine=False, message=None + ) + assert unspents == [Unspent(1500, 0, '', '', 0), Unspent(1600, 0, '', '', 0)] + assert len(outputs) == 2 + assert outputs[1][0] == RETURN_ADDRESS + assert outputs[1][1] == 1100 + + def test_no_combine_with_fee(self): + """ + Verify that unused unspents do not increase fee. + """ + unspents_single = [Unspent(5000, 0, '', '', 0)] + unspents_original = [Unspent(5000, 0, '', '', 0), + Unspent(5000, 0, '', '', 0)] + outputs_original = [(RETURN_ADDRESS, 1000, 'satoshi')] + + unspents, outputs = sanitize_tx_data( + unspents_original, outputs_original, fee=1, leftover=RETURN_ADDRESS, + combine=False, message=None + ) + + unspents_single, outputs_single = sanitize_tx_data( + unspents_single, outputs_original, fee=1, leftover=RETURN_ADDRESS, + combine=False, message=None + ) + + assert unspents == [Unspent(5000, 0, '', '', 0)] + assert unspents_single == [Unspent(5000, 0, '', '', 0)] + assert len(outputs) == 2 + assert len(outputs_single) == 2 + assert outputs[1][0] == RETURN_ADDRESS + assert outputs_single[1][0] == RETURN_ADDRESS + assert outputs[1][1] == outputs_single[1][1] + def test_no_combine_insufficient_funds(self): unspents_original = [Unspent(1000, 0, '', '', 0), Unspent(1000, 0, '', '', 0)]
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 3 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/ofek/bit.git@89dcd0a8d05b785691b34a9b28dee157ee419bea#egg=bit certifi==2025.1.31 charset-normalizer==3.4.1 coincurve==21.0.0 coverage==7.8.0 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 tomli==2.2.1 urllib3==2.3.0
name: bit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - coincurve==21.0.0 - coverage==7.8.0 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/bit
[ "tests/test_transaction.py::TestSanitizeTxData::test_no_combine_with_fee" ]
[]
[ "tests/test_transaction.py::TestTxIn::test_init", "tests/test_transaction.py::TestTxIn::test_equality", "tests/test_transaction.py::TestTxIn::test_repr", "tests/test_transaction.py::TestSanitizeTxData::test_no_input", "tests/test_transaction.py::TestSanitizeTxData::test_message", "tests/test_transaction.py::TestSanitizeTxData::test_fee_applied", "tests/test_transaction.py::TestSanitizeTxData::test_zero_remaining", "tests/test_transaction.py::TestSanitizeTxData::test_combine_remaining", "tests/test_transaction.py::TestSanitizeTxData::test_combine_insufficient_funds", "tests/test_transaction.py::TestSanitizeTxData::test_no_combine_remaining", "tests/test_transaction.py::TestSanitizeTxData::test_no_combine_remaining_small_inputs", "tests/test_transaction.py::TestSanitizeTxData::test_no_combine_insufficient_funds", "tests/test_transaction.py::TestCreateSignedTransaction::test_matching", "tests/test_transaction.py::TestEstimateTxFee::test_accurate_compressed", "tests/test_transaction.py::TestEstimateTxFee::test_accurate_uncompressed", "tests/test_transaction.py::TestEstimateTxFee::test_none", "tests/test_transaction.py::TestConstructOutputBlock::test_no_message", "tests/test_transaction.py::TestConstructOutputBlock::test_message", "tests/test_transaction.py::TestConstructOutputBlock::test_long_message", "tests/test_transaction.py::test_construct_input_block", "tests/test_transaction.py::test_calc_txid" ]
[]
MIT License
2,287
[ ".gitignore", "bit/network/fees.py", "bit/transaction.py" ]
[ ".gitignore", "bit/network/fees.py", "bit/transaction.py" ]
lig__pyventory-18
67d6a2a607b6731ecc83ee65ed90f78dbc2c73aa
2018-03-11 23:28:00
67d6a2a607b6731ecc83ee65ed90f78dbc2c73aa
diff --git a/pyventory/asset.py b/pyventory/asset.py index 1d58aa6..2b9a962 100644 --- a/pyventory/asset.py +++ b/pyventory/asset.py @@ -7,71 +7,119 @@ from pyventory import errors __all__ = ['Asset'] -class Asset: +class SKIP_ATTR: + pass + + +class AssetAttr: + _value = None + _name = None + + def __init__(self, value): + self._value = value + + def __get__(self, instance, owner): + if instance: + return self._value + + if not isinstance(self._value, (str, Mapping, Sequence)): + return self._value + + def get_attr(value): + return owner._get_attr(owner, self._name, strict=True) + + value_type = type(self._value) + return type( + value_type.__name__, + (value_type,), + {'__call__': get_attr} + )(self._value) + + def __set_name__(self, owner, name): + self._name = name + + +class AssetMeta(type): + + def __new__(cls, name, bases, namespace, **kwds): + new_namespace = { + '_name': f'{namespace["__module__"]}.{name}', + } + + for key, value in namespace.items(): + if not key.startswith('_'): + value = AssetAttr(value) + new_namespace[key] = value + + return super().__new__(cls, name, bases, new_namespace, **kwds) + + +class Asset(metaclass=AssetMeta): + _string_format_regex = re.compile(r'{([\w_]+)}') - def __init__(self, **kwargs): - for name, value in kwargs.items(): - setattr(self, name, value) - - def _vars(self): - return self.__build_vars(self, strict_format=True) - - @classmethod - def _cls_vars(cls): - return cls.__build_vars(cls) - - @classmethod - def _name(cls): - return '{module}.{name}'.format( - module=cls.__module__, name=cls.__name__) - - @classmethod - def __build_vars(cls, obj, strict_format=False): - _vars = { - attr_name: getattr(obj, attr_name) - for attr_name in dir(obj) - if not attr_name.startswith('_')} - - for name, value in _vars.copy().items(): - try: - _vars[name] = cls.__format_value(value, _vars, name) - except NotImplementedError: - if strict_format: - raise errors.PropertyIsNotImplementedError( - f'Var "{name}" is not implemented in "{obj._name()}"' - ' asset instance') - else: - del _vars[name] - except KeyError as e: - if strict_format: - raise errors.ValueSubstitutionError( - f'Attribute "{e.args[0]}" must be available for' - ' "{obj._name()}" asset instance') - else: - del _vars[name] - except errors.ValueSubstitutionInfiniteLoopError: - raise errors.ValueSubstitutionInfiniteLoopError( - f'Attribute "{name}" has an infinite string substitution' - f' loop for "{obj._name()}" asset instance') - - return _vars - - @classmethod - def __format_value(cls, value, context, start_key): + def __new__(cls, **kwargs): + self = super().__new__(cls) + self.__dict__.update(kwargs) + self.__dict__.update(self._vars(self, strict=True)) + return self + + @staticmethod + def _attrs(obj): + return [name for name in dir(obj) if not name.startswith('_')] + + @staticmethod + def _context(obj): + return {name: getattr(obj, name) for name in obj._attrs(obj)} + + @staticmethod + def _vars(obj, strict=False): + return { + name: value + for name, value in ( + (name, obj._get_attr(obj, name, strict=strict)) + for name in obj._attrs(obj)) + if value is not SKIP_ATTR} + + @staticmethod + def _get_attr(obj, name, strict=False): + try: + context = obj._context(obj).copy() + return obj._format_value(obj, context, context[name], name) + except NotImplementedError: + if strict: + raise errors.PropertyIsNotImplementedError( + f'Var "{name}" is not implemented in "{obj._name}" asset') + else: + return SKIP_ATTR + except KeyError as e: + if strict: + raise errors.ValueSubstitutionError( + f'Attribute "{e.args[0]}" must be available for' + f' "{obj._name}" asset instance') + else: + return SKIP_ATTR + except errors.ValueSubstitutionInfiniteLoopError: + raise errors.ValueSubstitutionInfiniteLoopError( + f'Attribute "{name}" has an infinite string substitution' + f' loop in "{obj._name}" asset instance') + + @staticmethod + def _format_value(obj, context, value, start_key): if value is NotImplemented: raise NotImplementedError if isinstance(value, str): - for key in cls._string_format_regex.findall(value): + for key in obj._string_format_regex.findall(value): if key == start_key: raise errors.ValueSubstitutionInfiniteLoopError - context[key] = cls.__format_value( - context[key], context, start_key) + context[key] = obj._format_value( + obj, context, context[key], start_key) return value.format(**context) if isinstance(value, Mapping): return { - k: cls.__format_value(v, context, start_key) + k: obj._format_value(obj, context, v, start_key) for k, v in value.items()} if isinstance(value, Sequence): - return [cls.__format_value(v, context, start_key) for v in value] + return [ + obj._format_value(obj, context, v, start_key) for v in value] return value diff --git a/pyventory/export.py b/pyventory/export.py index 1ea2c70..dddf750 100644 --- a/pyventory/export.py +++ b/pyventory/export.py @@ -11,48 +11,54 @@ from pyventory.inventory import Inventory __all__ = ['pyventory_data', 'ansible_inventory', 'terraform_vars'] -def pyventory_data(hosts): +def pyventory_data(instances): """Provides raw inventory data as Python `dict` containing Asset data in - `assets` key and hosts data in `hosts` key. + `assets` key and instances data in `instances` key. """ - inventory = Inventory(hosts) + inventory = Inventory(instances) assets = { name: attr.asdict(asset) for name, asset in inventory.assets.items()} for asset in assets.values(): - for attr_name in ('hosts', 'vars', 'children',): + for attr_name in ('instances', 'vars', 'children',): if not asset[attr_name]: del asset[attr_name] - hosts = inventory.hosts.copy() + instances = inventory.instances.copy() - return {'assets': assets, 'hosts': hosts} + return {'assets': assets, 'instances': instances} -def ansible_inventory(hosts, out=sys.stdout, indent=None): +def ansible_inventory(instances, out=sys.stdout, indent=None): """Dumps inventory in the Ansible's Dynamic Inventory JSON format to `out`. """ - raw_data = pyventory_data(hosts) + raw_data = pyventory_data(instances) - data = raw_data['assets'] - data['_meta'] = {'hostvars': raw_data['hosts']} + data = {} + + for key, value in raw_data['assets'].items(): + if 'instances' in value: + value['hosts'] = value.pop('instances') + data[key] = value + + data['_meta'] = {'hostvars': raw_data['instances']} json.dump(data, out, indent=indent, default=list) -def terraform_vars(hosts, filename_base='pyventory', indent=None): +def terraform_vars(instances, filename_base='pyventory', indent=None): """Dumps inventory in the Terraform's JSON format to `<filename_base>.tf` setting their values as defaults. """ tf_config_path = pathlib.Path(filename_base).with_suffix('.tf') - raw_data = pyventory_data(hosts) + raw_data = pyventory_data(instances) tf_config = {} - for asset_name, asset_data in raw_data['hosts'].items(): + for asset_name, asset_data in raw_data['instances'].items(): for name, value in asset_data.items(): diff --git a/pyventory/inventory.py b/pyventory/inventory.py index b789166..d8a43c5 100644 --- a/pyventory/inventory.py +++ b/pyventory/inventory.py @@ -1,5 +1,5 @@ -from ordered_set import OrderedSet import attr +from ordered_set import OrderedSet from pyventory.asset import Asset @@ -11,28 +11,28 @@ __all__ = [] class AssetData: vars = attr.ib(default=attr.Factory(dict)) children = attr.ib(default=attr.Factory(OrderedSet)) - hosts = attr.ib(default=attr.Factory(OrderedSet)) + instances = attr.ib(default=attr.Factory(OrderedSet)) class Inventory: - def __init__(self, hosts): + def __init__(self, instances): self.assets = {} - self.hosts = {} + self.instances = {} - for name, host in sorted(hosts.items()): - self.add_host(name, host) + for name, instance in sorted(instances.items()): + self.add_instance(name, instance) - def add_host(self, name, host): - if not isinstance(host, Asset): + def add_instance(self, name, instance): + if not isinstance(instance, Asset): return - self.hosts[name] = host._vars() - self.add_asset(host.__class__) - self.assets[host._name()].hosts.add(name) + self.instances[name] = instance._vars(instance, strict=True) + self.add_asset(instance.__class__) + self.assets[instance._name].instances.add(name) def add_asset(self, asset): - if asset._name() in self.assets: + if asset._name in self.assets: return for parent_asset in asset.__bases__: @@ -44,6 +44,6 @@ class Inventory: continue self.add_asset(parent_asset) - self.assets[parent_asset._name()].children.add(asset._name()) + self.assets[parent_asset._name].children.add(asset._name) - self.assets[asset._name()] = AssetData(vars=asset._cls_vars()) + self.assets[asset._name] = AssetData(vars=asset._vars(asset))
Allow to use calculated value of an asset class Given the following code ```python class MyAsset(Asset): foo = 'value is {bar}' bar = 'my value' ``` Accessing `MyAsset.foo` will return `'value is {bar}'`. It will be useful to be able to get `'value is my value'` instead.
lig/pyventory
diff --git a/tests/test_asset.py b/tests/test_asset.py new file mode 100644 index 0000000..2489c87 --- /dev/null +++ b/tests/test_asset.py @@ -0,0 +1,31 @@ +import pytest + +from pyventory import Asset, errors + + +def test_calculate_asset_class_atribute_value_on_call(): + + class TestAsset(Asset): + foo = '{bar}' + bar = 'bar' + + assert TestAsset.foo() == 'bar' + + +def test_use_raw_asset_class_atribute_value(): + + class TestAsset(Asset): + foo = '{bar}-{baz}' + bar = 'bar' + + assert TestAsset.foo == '{bar}-{baz}' + + +def test_asset_class_atribute_value_calculation_is_strict(): + + class TestAsset(Asset): + foo = '{bar}-{baz}' + bar = 'bar' + + with pytest.raises(errors.ValueSubstitutionError): + TestAsset.foo() diff --git a/tests/test_inventory.py b/tests/test_inventory.py index ac7ebf4..541189f 100644 --- a/tests/test_inventory.py +++ b/tests/test_inventory.py @@ -26,12 +26,12 @@ def test_allow_mixins_for_inventory_items(): ], }, "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset", ], }, }, - "hosts": { + "instances": { "test_asset": {} }, } @@ -49,12 +49,12 @@ def test_allow_host_specific_vars(): assert result == { 'assets': { "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset", ], }, }, - "hosts": { + "instances": { "test_asset": { "foo": "bar" }, @@ -74,12 +74,12 @@ def test_allow_format_strings_as_values(): assert result == { 'assets': { "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": { "bar": "ham", "foo": "test_ham" @@ -102,12 +102,12 @@ def test_allow_mapping_of_format_strings_as_values(): assert result == { 'assets': { "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": { "bar": "ham", "foo": { @@ -130,12 +130,12 @@ def test_allow_sequence_of_format_strings_as_values(): assert result == { 'assets': { "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": { "bar": "ham", "foo": [ @@ -162,12 +162,12 @@ def test_strings_formatting_do_not_conflict_with_numbers(): "vars": { "foo": 42 }, - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": { "bar": "ham", "foo": 42 @@ -181,10 +181,8 @@ def test_require_arguments_for_format_strings(): class TestAsset(Asset): foo = '{bar}' - test_asset = TestAsset() - with pytest.raises(errors.ValueSubstitutionError): - pyventory_data(locals()) + test_asset = TestAsset() def test_inheritance_with_format(): @@ -207,12 +205,12 @@ def test_inheritance_with_format(): ] }, "test_inventory.ChildAsset": { - "hosts": [ + "instances": [ "child_asset" ] }, }, - "hosts": { + "instances": { "child_asset": { "bar": "ham", "foo": "ham" @@ -274,12 +272,12 @@ def test_deep_multiple_inheritance_propagation(): "baz": "Level3Asset4 baz value", "foo": "Level1Asset1 foo value" }, - "hosts": [ + "instances": [ "level3_asset4" ] }, }, - "hosts": { + "instances": { "level3_asset4": { "bar": "Level1Asset2 bar value", "baz": "Level3Asset4 baz value", @@ -305,12 +303,12 @@ def test_skip_non_asset_locals(): assert result == { 'assets': { "test_inventory.TestAsset": { - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": {} } } @@ -341,17 +339,17 @@ def test_multiple_children(): ] }, "test_inventory.TestAsset1": { - "hosts": [ + "instances": [ "test_asset1" ] }, "test_inventory.TestAsset2": { - "hosts": [ + "instances": [ "test_asset2" ] }, }, - "hosts": { + "instances": { "test_asset1": {}, "test_asset2": {} } @@ -381,12 +379,12 @@ def test_allow_notimplemented_value(): "vars": { "foo": "bar" }, - "hosts": [ + "instances": [ "test_asset" ] }, }, - "hosts": { + "instances": { "test_asset": { "foo": "bar" } @@ -402,10 +400,8 @@ def test_raise_notimplemented_value_in_final_asset(): class TestAsset(BaseTestAsset): pass - test_asset = TestAsset() - with pytest.raises(errors.PropertyIsNotImplementedError): - pyventory_data(locals()) + test_asset = TestAsset() def test_string_format_does_not_miss_values(): @@ -443,7 +439,7 @@ def test_string_format_does_not_miss_values(): "baz": "baz-value", "foo": "baz-value" }, - "hosts": [ + "instances": [ "test_asset_1" ] }, @@ -453,12 +449,12 @@ def test_string_format_does_not_miss_values(): "baz": "baz-value", "foo": "baz-value" }, - "hosts": [ + "instances": [ "test_asset_2" ] }, }, - "hosts": { + "instances": { "test_asset_1": { "bar": "baz-value", "baz": "baz-value", @@ -479,7 +475,5 @@ def test_string_format_detects_infinite_loop(): bar = '{foo}' foo = '{bar}' - test_asset = TestAsset() - with pytest.raises(errors.ValueSubstitutionInfiniteLoopError): - pyventory_data(locals()) + test_asset = TestAsset()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 3 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "tox" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 distlib==0.3.9 filelock==3.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work ordered-set==4.0.2 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work platformdirs==2.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 -e git+https://github.com/lig/pyventory.git@67d6a2a607b6731ecc83ee65ed90f78dbc2c73aa#egg=Pyventory six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tox==3.28.0 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work virtualenv==20.17.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pyventory channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - distlib==0.3.9 - filelock==3.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - ordered-set==4.0.2 - platformdirs==2.4.0 - six==1.17.0 - tox==3.28.0 - virtualenv==20.17.1 prefix: /opt/conda/envs/pyventory
[ "tests/test_asset.py::test_calculate_asset_class_atribute_value_on_call", "tests/test_asset.py::test_asset_class_atribute_value_calculation_is_strict", "tests/test_inventory.py::test_allow_mixins_for_inventory_items", "tests/test_inventory.py::test_allow_host_specific_vars", "tests/test_inventory.py::test_allow_format_strings_as_values", "tests/test_inventory.py::test_allow_mapping_of_format_strings_as_values", "tests/test_inventory.py::test_allow_sequence_of_format_strings_as_values", "tests/test_inventory.py::test_strings_formatting_do_not_conflict_with_numbers", "tests/test_inventory.py::test_require_arguments_for_format_strings", "tests/test_inventory.py::test_inheritance_with_format", "tests/test_inventory.py::test_deep_multiple_inheritance_propagation", "tests/test_inventory.py::test_skip_non_asset_locals", "tests/test_inventory.py::test_multiple_children", "tests/test_inventory.py::test_allow_notimplemented_value", "tests/test_inventory.py::test_raise_notimplemented_value_in_final_asset", "tests/test_inventory.py::test_string_format_does_not_miss_values", "tests/test_inventory.py::test_string_format_detects_infinite_loop" ]
[]
[ "tests/test_asset.py::test_use_raw_asset_class_atribute_value" ]
[]
MIT License
2,288
[ "pyventory/inventory.py", "pyventory/export.py", "pyventory/asset.py" ]
[ "pyventory/inventory.py", "pyventory/export.py", "pyventory/asset.py" ]
awslabs__aws-cfn-template-flip-43
168476fed202b08221f163de22adb9cb859d937e
2018-03-12 14:21:52
168476fed202b08221f163de22adb9cb859d937e
diff --git a/cfn_flip/yaml_dumper.py b/cfn_flip/yaml_dumper.py index 85b287d..2a3a764 100644 --- a/cfn_flip/yaml_dumper.py +++ b/cfn_flip/yaml_dumper.py @@ -15,7 +15,9 @@ See the License for the specific language governing permissions and limitations from cfn_clean.yaml_dumper import CleanCfnYamlDumper from cfn_tools.odict import ODict from cfn_tools.yaml_dumper import CfnYamlDumper +import six +TAG_STR = "tag:yaml.org,2002:str" TAG_MAP = "tag:yaml.org,2002:map" CONVERTED_SUFFIXES = ["Ref", "Condition"] @@ -46,6 +48,13 @@ class LongCleanDumper(CleanCfnYamlDumper): """ +def string_representer(dumper, value): + if value.startswith("0"): + return dumper.represent_scalar(TAG_STR, value, style="'") + + return dumper.represent_scalar(TAG_STR, value) + + def fn_representer(dumper, fn_name, value): tag = "!{}".format(fn_name) @@ -82,6 +91,7 @@ def map_representer(dumper, value): # Customise our dumpers Dumper.add_representer(ODict, map_representer) +Dumper.add_representer(six.text_type, string_representer) CleanDumper.add_representer(ODict, map_representer)
Inconsistent conversion of strings from json to yaml I am converting a document from json to yaml as part of a CloudFormation Template, and am noticing an odd error where some Id's that are marked as strings are being converted to strings, and other times not. Here's a json snippet I'm working with right now which are the mappings for some of the Generic Elastic Load Balancer ID's for AWS: ``` "Mappings": { "Regions": { "us-east-1": { "ELBID": "127311923021", "Name": "ue1" }, "us-east-2": { "ELBID": "033677994240", "Name": "ue2" }, "us-west-1": { "ELBID": "027434742980", "Name": "uw1" }, "us-west-2": { "ELBID": "797873946194", "Name": "uw2" } } } ``` And This is the resulting yaml I'm getting after calling to_yaml: ``` Mappings: Regions: us-east-1: ELBID: '127311923021' Name: ue1 us-east-2: ELBID: 033677994240 Name: ue2 us-west-1: ELBID: 027434742980 Name: uw1 us-west-2: ELBID: '797873946194' Name: uw2 ``` Strangely enough, any number beginning with 0 is converted, but the ones beginning with other numbers do not. I'm not sure what the expected behavior should be in this case, (either fully converted or not) but having it half and half is inconsistent, and I would believe is a bug. Currently I'm having errors with using this yaml with sceptre/CloudFormation due to some of the Elastic Load Balancer ID's not being strings.
awslabs/aws-cfn-template-flip
diff --git a/tests/test_flip.py b/tests/test_flip.py index c479a20..5ac0cee 100644 --- a/tests/test_flip.py +++ b/tests/test_flip.py @@ -502,5 +502,39 @@ def test_get_dumper(): When invoking get_dumper use clean_up & long_form :return: LongCleanDumper """ + resp = cfn_flip.get_dumper(clean_up=True, long_form=True) assert resp == cfn_flip.yaml_dumper.LongCleanDumper + + +def test_quoted_digits(): + """ + Any value that is composed entirely of digits + should be quoted for safety. + CloudFormation is happy for numbers to appear as strings. + But the opposite (e.g. account numbers as numbers) can cause issues + See https://github.com/awslabs/aws-cfn-template-flip/issues/41 + """ + + value = dump_json(ODict(( + ("int", 123456), + ("float", 123.456), + ("oct", "0123456"), + ("bad-oct", "012345678"), + ("safe-oct", "0o123456"), + ("string", "abcdef"), + ))) + + expected = "\n".join(( + "int: 123456", + "float: 123.456", + "oct: '0123456'", + "bad-oct: '012345678'", + "safe-oct: '0o123456'", + "string: abcdef", + "" + )) + + actual = cfn_flip.to_yaml(value) + + assert actual == expected
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-sugar" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/awslabs/aws-cfn-template-flip.git@168476fed202b08221f163de22adb9cb859d937e#egg=cfn_flip click==8.0.4 coverage==6.2 distlib==0.3.9 filelock==3.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-sugar==0.9.6 PyYAML==6.0.1 six==1.17.0 termcolor==1.1.0 toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: aws-cfn-template-flip channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-sugar==0.9.6 - pyyaml==6.0.1 - six==1.17.0 - termcolor==1.1.0 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/aws-cfn-template-flip
[ "tests/test_flip.py::test_quoted_digits" ]
[ "tests/test_flip.py::test_to_yaml_with_yaml", "tests/test_flip.py::test_flip_with_bad_data", "tests/test_flip.py::test_explicit_json_rejects_yaml", "tests/test_flip.py::test_explicit_yaml_rejects_bad_yaml" ]
[ "tests/test_flip.py::test_flip_to_json_with_datetimes", "tests/test_flip.py::test_flip_to_yaml_with_clean_getatt", "tests/test_flip.py::test_flip_to_yaml_with_multi_level_getatt", "tests/test_flip.py::test_flip_to_yaml_with_dotted_getatt", "tests/test_flip.py::test_flip_to_json_with_multi_level_getatt", "tests/test_flip.py::test_getatt_from_yaml", "tests/test_flip.py::test_flip_to_json_with_condition", "tests/test_flip.py::test_flip_to_yaml_with_newlines", "tests/test_flip.py::test_clean_flip_to_yaml_with_newlines", "tests/test_flip.py::test_unconverted_types", "tests/test_flip.py::test_get_dumper" ]
[]
Apache License 2.0
2,289
[ "cfn_flip/yaml_dumper.py" ]
[ "cfn_flip/yaml_dumper.py" ]
datosgobar__pydatajson-124
70ed537996200a79c3d3f16d4862160731314dac
2018-03-12 17:31:24
adb85a7de7dfa073ddf9817a5fe2d125f9ce4e54
diff --git a/docs/MANUAL.md b/docs/MANUAL.md index 556645c..2191b01 100644 --- a/docs/MANUAL.md +++ b/docs/MANUAL.md @@ -94,6 +94,8 @@ Toma los siguientes parámetros: - **portal_url**: URL del portal de CKAN de destino. - **apikey**: La apikey de un usuario del portal de destino con los permisos para crear el dataset bajo la organización pasada como parámetro. + - **demote_themes** (opcional, default: True): Si está en true, los labels de los themes del dataset, se escriben como + tags de CKAN; sino,se pasan como grupo. Retorna el id en el nodo de destino del dataset federado. diff --git a/pydatajson/ckan_utils.py b/pydatajson/ckan_utils.py index 65c4cb2..31f6737 100644 --- a/pydatajson/ckan_utils.py +++ b/pydatajson/ckan_utils.py @@ -14,7 +14,7 @@ def append_attribute_to_extra(package, dataset, attribute, serialize=False): package['extras'].append({'key': attribute, 'value': value}) -def map_dataset_to_package(dataset, catalog_id): +def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demote_themes=True): package = dict() package['extras'] = [] # Obligatorios @@ -24,6 +24,7 @@ def map_dataset_to_package(dataset, catalog_id): package['private'] = False package['notes'] = dataset['description'] package['author'] = dataset['publisher']['name'] + package['owner_org'] = owner_org append_attribute_to_extra(package, dataset, 'issued') append_attribute_to_extra(package, dataset, 'accrualPeriodicity') @@ -44,7 +45,6 @@ def map_dataset_to_package(dataset, catalog_id): append_attribute_to_extra(package, dataset, 'language', serialize=True) spatial = dataset.get('spatial') - if spatial: serializable = type(spatial) is list append_attribute_to_extra(package, dataset, 'spatial', serializable) @@ -58,6 +58,16 @@ def map_dataset_to_package(dataset, catalog_id): if keywords: package['tags'] = [{'name': keyword} for keyword in keywords] + # Move themes to keywords + themes = dataset.get('theme', []) + if themes and demote_themes: + package['tags'] = package.get('tags', []) + for theme in themes: + label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) + package['tags'].append({'name': label}) + else: + package['groups'] += [{'name': title_to_name(theme, decode=False)} for theme in themes] + return package diff --git a/pydatajson/federation.py b/pydatajson/federation.py index a8f137f..f9a4f6b 100644 --- a/pydatajson/federation.py +++ b/pydatajson/federation.py @@ -9,7 +9,8 @@ from .ckan_utils import map_dataset_to_package from .search import get_datasets -def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifier, portal_url, apikey): +def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifier, portal_url, apikey, + demote_themes=True): """Escribe la metadata de un dataset en el portal pasado por parámetro. Args: @@ -19,15 +20,17 @@ def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifi dataset_origin_identifier (str): El id del dataset que se va a federar. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. + demote_themes(bool): Si está en true, los labels de los themes del dataset, pasan a ser tags. Sino, + se pasan como grupo. Returns: str: El id del dataset en el catálogo de destino. """ dataset = catalog.get_dataset(dataset_origin_identifier) ckan_portal = RemoteCKAN(portal_url, apikey=apikey) + theme_taxonomy = catalog.themes - package = map_dataset_to_package(dataset, catalog_id) - package['owner_org'] = owner_org + package = map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demote_themes=demote_themes) # Get license id if dataset.get('license'): @@ -42,15 +45,6 @@ def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifi else: package['license_id'] = 'notspecified' - # Move themes to keywords - themes = dataset.get('theme') - if themes: - package['tags'] = package.get('tags') or [] - theme_taxonomy = catalog.themes - for theme in themes: - label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) - package['tags'].append({'name': label}) - try: pushed_package = ckan_portal.call_action( 'package_update', data_dict=package)
Agregar opción para mover `theme` a `keyword` en `push_dataset_to_ckan` **Contexto** Los temas específicos (`theme`) son temas que tienen sentido dentro del contexto y dominio temático bajo el cual cada nodo original opera. Sirven para clasificar a sus datasets según categorías temáticas específicas a la problemática que aborda ese nodo, pero que tienen una granularidad demasiado alta para la generalidad de los datos de toda la Administración Pública. El `theme` de un dataset en su nodo original, es una extensión del `keyword` de ese dataset en el nodo integrador (son etiquetas). **Implementar** `pydatajson` tiene que hacer un _downgrade_ de los `theme` originales hacia el rol de etiquetas (`keyword`) en el nodo integrador. Agregar a la función `push_dataset_to_ckan` un argumento opcional `theme_to_keyword` con default `True`. Este flag mueve (los _elimina_ de un lado y los escribe en otro) todos los elementos originales del array `theme` en la metadata original de un dataset y los agrega al `keyword` del dataset que va a ser empujado a un CKAN. **Notas** Esto es una operación de transformación de los metadatos originales cuyo sentido es preservar el uso exclusivo de temas transversales (evitando el uso de temas específicos) de un dataset en un contexto de portal diferente (con un público y alcance temático transversal, mientras que el nodo original tiene un público y alcance temático específico). **Entregable** La nueva opción en `push_dataset_to_ckan`, junto con sus tests asociados y las modificaciones correspondientes a la documentación en los docs de RTD.
datosgobar/pydatajson
diff --git a/tests/test_ckan_utils.py b/tests/test_ckan_utils.py index 942bd39..3884251 100644 --- a/tests/test_ckan_utils.py +++ b/tests/test_ckan_utils.py @@ -23,16 +23,17 @@ class DatasetConversionTestCase(unittest.TestCase): cls.distributions = cls.dataset['distribution'] def test_replicated_plain_attributes_are_corrext(self): - package = map_dataset_to_package(self.dataset, self.catalog_id) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) plain_replicated_attributes = [('title', 'title'), ('notes', 'description'), ('url', 'landingPage')] for fst, snd in plain_replicated_attributes: self.assertEqual(self.dataset.get(snd), package.get(fst)) + self.assertEqual('owner', package['owner_org']) self.assertEqual(self.catalog_id+'_'+self.dataset_id, package['id']) def test_dataset_nested_replicated_attributes_stay_the_same(self): - package = map_dataset_to_package(self.dataset, self.catalog_id) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) contact_point_nested = [('maintainer', 'fn'), ('maintainer_email', 'hasEmail')] for fst, snd in contact_point_nested: @@ -43,7 +44,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(self.dataset.get('publisher').get(snd), package.get(fst)) def test_dataset_array_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, self.catalog_id) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] try: @@ -53,13 +54,38 @@ class DatasetConversionTestCase(unittest.TestCase): tags = [tag['name'] for tag in package['tags']] keywords = self.dataset.get('keyword', []) + + themes = self.dataset.get('theme', []) + theme_labels = [] + for theme in themes: + label = next(x['label'] for x in self.catalog.themes if x['id'] == theme) + theme_labels.append(label) + + try: + self.assertItemsEqual(keywords + theme_labels, tags) + except AttributeError: + self.assertCountEqual(keywords + theme_labels, tags) + + def test_themes_are_preserved_if_not_demoted(self): + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes, + demote_themes=False) + groups = [group['name'] for group in package.get('groups', [])] + super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] + themes = self.dataset.get('theme', []) + tags = [tag['name'] for tag in package['tags']] + keywords = self.dataset.get('keyword', []) + + try: + self.assertItemsEqual(super_themes + themes, groups) + except AttributeError: + self.assertCountEqual(super_themes + themes, groups) try: self.assertItemsEqual(keywords, tags) except AttributeError: self.assertCountEqual(keywords, tags) def test_dataset_extra_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, self.catalog_id) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) # extras are included in dataset if package['extras']: for extra in package['extras']: @@ -78,7 +104,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(dataset_value, extra_value) def test_dataset_extra_attributes_are_complete(self): - package = map_dataset_to_package(self.dataset, self.catalog_id) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) # dataset attributes are included in extras extra_attrs = ['issued', 'modified', 'accrualPeriodicity', 'temporal', 'language', 'spatial'] for key in extra_attrs:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "coverage", "python-coveralls", "pytest" ], "pre_install": [ "mkdir tests/temp" ], "python": "3.6", "reqs_path": [ "requirements.txt", "requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 argh==0.27.2 asn1crypto==1.5.1 attrs==22.2.0 Babel==2.11.0 bumpversion==0.5.3 certifi==2021.5.30 cffi==1.15.1 chardet==3.0.4 ckanapi==4.0 CommonMark==0.5.4 coverage==4.1 cryptography==2.1.4 distlib==0.3.9 docopt==0.6.2 docutils==0.18.1 et-xmlfile==1.1.0 filelock==3.4.1 flake8==2.6.0 idna==2.6 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isodate==0.6.0 jdcal==1.4.1 Jinja2==3.0.3 jsonschema==2.6.0 MarkupSafe==2.0.1 mccabe==0.5.3 multidict==5.2.0 nose==1.3.7 openpyxl==2.4.11 packaging==21.3 pathtools==0.1.2 pkginfo==1.10.0 platformdirs==2.4.0 pluggy==0.13.1 pockets==0.9.1 py==1.11.0 pycodestyle==2.0.0 pycparser==2.21 -e git+https://github.com/datosgobar/pydatajson.git@70ed537996200a79c3d3f16d4862160731314dac#egg=pydatajson pyflakes==1.2.3 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-coveralls==2.9.3 python-dateutil==2.6.1 pytz==2025.2 PyYAML==3.11 recommonmark==0.4.0 requests==2.18.4 requests-toolbelt==1.0.0 rfc3987==1.3.7 six==1.11.0 snowballstemmer==2.2.0 Sphinx==1.5.2 sphinx-rtd-theme==0.2.4 sphinxcontrib-napoleon==0.6.1 tomli==1.2.3 tox==2.9.1 tqdm==4.64.1 twine==1.9.1 typing_extensions==4.1.1 unicodecsv==0.14.1 Unidecode==0.4.21 urllib3==1.22 vcrpy==1.11.1 virtualenv==20.17.1 watchdog==0.8.3 wrapt==1.16.0 yarl==1.7.2 zipp==3.6.0
name: pydatajson channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - argh==0.27.2 - asn1crypto==1.5.1 - attrs==22.2.0 - babel==2.11.0 - bumpversion==0.5.3 - cffi==1.15.1 - chardet==3.0.4 - ckanapi==4.0 - commonmark==0.5.4 - coverage==4.1 - cryptography==2.1.4 - distlib==0.3.9 - docopt==0.6.2 - docutils==0.18.1 - et-xmlfile==1.1.0 - filelock==3.4.1 - flake8==2.6.0 - idna==2.6 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isodate==0.6.0 - jdcal==1.4.1 - jinja2==3.0.3 - jsonschema==2.6.0 - markupsafe==2.0.1 - mccabe==0.5.3 - multidict==5.2.0 - nose==1.3.7 - openpyxl==2.4.11 - packaging==21.3 - pathtools==0.1.2 - pkginfo==1.10.0 - platformdirs==2.4.0 - pluggy==0.13.1 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.0.0 - pycparser==2.21 - pyflakes==1.2.3 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-coveralls==2.9.3 - python-dateutil==2.6.1 - pytz==2025.2 - pyyaml==3.11 - recommonmark==0.4.0 - requests==2.18.4 - requests-toolbelt==1.0.0 - rfc3987==1.3.7 - six==1.11.0 - snowballstemmer==2.2.0 - sphinx==1.5.2 - sphinx-rtd-theme==0.2.4 - sphinxcontrib-napoleon==0.6.1 - tomli==1.2.3 - tox==2.9.1 - tqdm==4.64.1 - twine==1.9.1 - typing-extensions==4.1.1 - unicodecsv==0.14.1 - unidecode==0.04.21 - urllib3==1.22 - vcrpy==1.11.1 - virtualenv==20.17.1 - watchdog==0.8.3 - wrapt==1.16.0 - yarl==1.7.2 - zipp==3.6.0 prefix: /opt/conda/envs/pydatajson
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_array_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_complete", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_nested_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_replicated_plain_attributes_are_corrext", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_themes_are_preserved_if_not_demoted" ]
[]
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_extra_attributes_are_created_correctly", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_transformed_attributes_are_correct", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_change_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_microseconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_seconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_timezones_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_timezones_are_handled_correctly" ]
[]
MIT License
2,290
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
datosgobar__pydatajson-125
3c428354f3f1b48b9b70815ba370e8cd1b11b07b
2018-03-12 20:48:48
adb85a7de7dfa073ddf9817a5fe2d125f9ce4e54
diff --git a/docs/MANUAL.md b/docs/MANUAL.md index 2191b01..39d9678 100644 --- a/docs/MANUAL.md +++ b/docs/MANUAL.md @@ -94,8 +94,11 @@ Toma los siguientes parámetros: - **portal_url**: URL del portal de CKAN de destino. - **apikey**: La apikey de un usuario del portal de destino con los permisos para crear el dataset bajo la organización pasada como parámetro. + - **demote_superThemes** (opcional, default: True):Si está en true, los ids de los themes del dataset, se escriben + como groups de CKAN. - **demote_themes** (opcional, default: True): Si está en true, los labels de los themes del dataset, se escriben como tags de CKAN; sino,se pasan como grupo. + Retorna el id en el nodo de destino del dataset federado. diff --git a/pydatajson/ckan_utils.py b/pydatajson/ckan_utils.py index 31f6737..b915bde 100644 --- a/pydatajson/ckan_utils.py +++ b/pydatajson/ckan_utils.py @@ -14,7 +14,8 @@ def append_attribute_to_extra(package, dataset, attribute, serialize=False): package['extras'].append({'key': attribute, 'value': value}) -def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demote_themes=True): +def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, + demote_superThemes=True, demote_themes=True): package = dict() package['extras'] = [] # Obligatorios @@ -33,8 +34,10 @@ def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demot package['resources'] = map_distributions_to_resources(distributions, catalog_id) super_themes = dataset['superTheme'] - package['groups'] = [{'name': title_to_name(super_theme, decode=False)} for super_theme in super_themes] append_attribute_to_extra(package, dataset, 'superTheme', serialize=True) + if demote_superThemes: + package['groups'] = [{'name': title_to_name(super_theme, decode=False)} for super_theme in super_themes] + # Recomendados y opcionales package['url'] = dataset.get('landingPage') @@ -66,7 +69,8 @@ def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demot label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) package['tags'].append({'name': label}) else: - package['groups'] += [{'name': title_to_name(theme, decode=False)} for theme in themes] + package['groups'] = package.get('groups', []) + [{'name': title_to_name(theme, decode=False)} + for theme in themes] return package diff --git a/pydatajson/federation.py b/pydatajson/federation.py index f9a4f6b..7807dfe 100644 --- a/pydatajson/federation.py +++ b/pydatajson/federation.py @@ -10,7 +10,7 @@ from .search import get_datasets def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifier, portal_url, apikey, - demote_themes=True): + demote_superThemes=True, demote_themes=True): """Escribe la metadata de un dataset en el portal pasado por parámetro. Args: @@ -20,6 +20,7 @@ def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifi dataset_origin_identifier (str): El id del dataset que se va a federar. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. + demote_superThemes(bool): Si está en true, los ids de los super themes del dataset, se propagan como grupo. demote_themes(bool): Si está en true, los labels de los themes del dataset, pasan a ser tags. Sino, se pasan como grupo. @@ -30,7 +31,8 @@ def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifi ckan_portal = RemoteCKAN(portal_url, apikey=apikey) theme_taxonomy = catalog.themes - package = map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, demote_themes=demote_themes) + package = map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, + demote_superThemes, demote_themes) # Get license id if dataset.get('license'):
Agregar opción para copiar `superTheme` a `theme` en `push_dataset_to_ckan` **Contexto** Los temas globales (`superTheme`) son temas transversales bajo los cuales los nodos originales clasifican a sus datasets para indexarlos en un nodo integrador. El `superTheme` de un dataset en su nodo original, es el `theme` de ese dataset en el nodo integrador (son los temas que usa CKAN para visualizar en la landing del Portal). **Implementar** Agregar a la función `push_dataset_to_ckan` un argumento opcional `superTheme_to_theme` con default `True`. Este _flag_ copia todos los elementos del array `superTheme` en la metadata original de un dataset y los agrega al array `theme` del dataset que va a ser empujado a un CKAN. **Notas** Esto es una operación de transformación de los metadatos originales cuyo sentido es federar correctamente los metadatos de un dataset en un contexto de portal diferente (con un público y alcance temático _transversal_, mientras que el nodo original tiene un público y alcance temático _específico_). **Entregable** La nueva opción en `push_dataset_to_ckan`, junto con sus tests asociados y las modificaciones correspondientes a la documentación en los docs de RTD.
datosgobar/pydatajson
diff --git a/tests/test_ckan_utils.py b/tests/test_ckan_utils.py index 3884251..dca112c 100644 --- a/tests/test_ckan_utils.py +++ b/tests/test_ckan_utils.py @@ -67,8 +67,8 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords + theme_labels, tags) def test_themes_are_preserved_if_not_demoted(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes, - demote_themes=False) + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', + self.catalog.themes, demote_themes=False) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] themes = self.dataset.get('theme', []) @@ -84,15 +84,48 @@ class DatasetConversionTestCase(unittest.TestCase): except AttributeError: self.assertCountEqual(keywords, tags) + def test_superThemes_dont_impact_groups_if_not_demoted(self): + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', + self.catalog.themes, demote_superThemes=False) + groups = [group['name'] for group in package.get('groups', [])] + tags = [tag['name'] for tag in package['tags']] + keywords = self.dataset.get('keyword', []) + themes = self.dataset.get('theme', []) + theme_labels = [] + for theme in themes: + label = next(x['label'] for x in self.catalog.themes if x['id'] == theme) + theme_labels.append(label) + try: + self.assertItemsEqual([], groups) + except AttributeError: + self.assertCountEqual([], groups) + try: + self.assertItemsEqual(keywords + theme_labels, tags) + except AttributeError: + self.assertCountEqual(keywords + theme_labels, tags) + + def test_preserve_themes_and_superThemes(self): + package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', + self.catalog.themes, False, False) + groups = [group['name'] for group in package.get('groups', [])] + tags = [tag['name'] for tag in package['tags']] + keywords = self.dataset.get('keyword', []) + themes = self.dataset.get('theme', []) + try: + self.assertItemsEqual(themes, groups) + except AttributeError: + self.assertCountEqual(themes, groups) + try: + self.assertItemsEqual(keywords, tags) + except AttributeError: + self.assertCountEqual(keywords, tags) + def test_dataset_extra_attributes_are_correct(self): package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) # extras are included in dataset if package['extras']: for extra in package['extras']: - if extra['key'] == 'super_theme': - dataset_value = self.dataset['superTheme'] - else: - dataset_value = self.dataset[extra['key']] + dataset_value = self.dataset[extra['key']] if type(dataset_value) is list: extra_value = json.loads(extra['value']) try: @@ -106,7 +139,7 @@ class DatasetConversionTestCase(unittest.TestCase): def test_dataset_extra_attributes_are_complete(self): package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) # dataset attributes are included in extras - extra_attrs = ['issued', 'modified', 'accrualPeriodicity', 'temporal', 'language', 'spatial'] + extra_attrs = ['issued', 'modified', 'accrualPeriodicity', 'temporal', 'language', 'spatial', 'superTheme'] for key in extra_attrs: value = self.dataset.get(key) if value: @@ -115,8 +148,6 @@ class DatasetConversionTestCase(unittest.TestCase): resulting_dict = {'key': key, 'value': value} self.assertTrue(resulting_dict in package['extras']) - self.assertTrue({'key': 'super_theme', 'value': json.dumps(self.dataset['superTheme'])}) - def test_resources_replicated_attributes_stay_the_same(self): resources = map_distributions_to_resources(self.distributions, self.catalog_id+'_'+self.dataset_id) for resource in resources:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "flake8", "pytest" ], "pre_install": [ "mkdir tests/temp" ], "python": "3.6", "reqs_path": [ "requirements.txt", "requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 argh==0.27.2 asn1crypto==1.5.1 attrs==22.2.0 Babel==2.11.0 bumpversion==0.5.3 certifi==2021.5.30 cffi==1.15.1 chardet==3.0.4 ckanapi==4.0 CommonMark==0.5.4 coverage==4.1 cryptography==2.1.4 distlib==0.3.9 docopt==0.6.2 docutils==0.18.1 et-xmlfile==1.1.0 filelock==3.4.1 flake8==2.6.0 idna==2.6 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isodate==0.6.0 jdcal==1.4.1 Jinja2==3.0.3 jsonschema==2.6.0 MarkupSafe==2.0.1 mccabe==0.5.3 multidict==5.2.0 nose==1.3.7 openpyxl==2.4.11 packaging==21.3 pathtools==0.1.2 pkginfo==1.10.0 platformdirs==2.4.0 pluggy==0.13.1 pockets==0.9.1 py==1.11.0 pycodestyle==2.0.0 pycparser==2.21 -e git+https://github.com/datosgobar/pydatajson.git@3c428354f3f1b48b9b70815ba370e8cd1b11b07b#egg=pydatajson pyflakes==1.2.3 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.6.1 pytz==2025.2 PyYAML==3.11 recommonmark==0.4.0 requests==2.18.4 requests-toolbelt==1.0.0 rfc3987==1.3.7 six==1.11.0 snowballstemmer==2.2.0 Sphinx==1.5.2 sphinx-rtd-theme==0.2.4 sphinxcontrib-napoleon==0.6.1 tomli==1.2.3 tox==2.9.1 tqdm==4.64.1 twine==1.9.1 typing_extensions==4.1.1 unicodecsv==0.14.1 Unidecode==0.4.21 urllib3==1.22 vcrpy==1.11.1 virtualenv==20.17.1 watchdog==0.8.3 wrapt==1.16.0 yarl==1.7.2 zipp==3.6.0
name: pydatajson channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - argh==0.27.2 - asn1crypto==1.5.1 - attrs==22.2.0 - babel==2.11.0 - bumpversion==0.5.3 - cffi==1.15.1 - chardet==3.0.4 - ckanapi==4.0 - commonmark==0.5.4 - coverage==4.1 - cryptography==2.1.4 - distlib==0.3.9 - docopt==0.6.2 - docutils==0.18.1 - et-xmlfile==1.1.0 - filelock==3.4.1 - flake8==2.6.0 - idna==2.6 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isodate==0.6.0 - jdcal==1.4.1 - jinja2==3.0.3 - jsonschema==2.6.0 - markupsafe==2.0.1 - mccabe==0.5.3 - multidict==5.2.0 - nose==1.3.7 - openpyxl==2.4.11 - packaging==21.3 - pathtools==0.1.2 - pkginfo==1.10.0 - platformdirs==2.4.0 - pluggy==0.13.1 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.0.0 - pycparser==2.21 - pyflakes==1.2.3 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.6.1 - pytz==2025.2 - pyyaml==3.11 - recommonmark==0.4.0 - requests==2.18.4 - requests-toolbelt==1.0.0 - rfc3987==1.3.7 - six==1.11.0 - snowballstemmer==2.2.0 - sphinx==1.5.2 - sphinx-rtd-theme==0.2.4 - sphinxcontrib-napoleon==0.6.1 - tomli==1.2.3 - tox==2.9.1 - tqdm==4.64.1 - twine==1.9.1 - typing-extensions==4.1.1 - unicodecsv==0.14.1 - unidecode==0.04.21 - urllib3==1.22 - vcrpy==1.11.1 - virtualenv==20.17.1 - watchdog==0.8.3 - wrapt==1.16.0 - yarl==1.7.2 - zipp==3.6.0 prefix: /opt/conda/envs/pydatajson
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_preserve_themes_and_superThemes", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_superThemes_dont_impact_groups_if_not_demoted" ]
[]
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_array_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_complete", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_nested_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_replicated_plain_attributes_are_corrext", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_extra_attributes_are_created_correctly", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_transformed_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_themes_are_preserved_if_not_demoted", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_change_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_microseconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_seconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_timezones_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_timezones_are_handled_correctly" ]
[]
MIT License
2,291
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
imageio__imageio-321
a22145a9ebbd3aa3bc742911e052ed1903381486
2018-03-12 22:22:56
a22145a9ebbd3aa3bc742911e052ed1903381486
coveralls: [![Coverage Status](https://coveralls.io/builds/15938362/badge)](https://coveralls.io/builds/15938362) Coverage increased (+0.01%) to 91.796% when pulling **ca0257922f85a3b76bcb813670da9feab6702b7e on ffmpeg** into **a22145a9ebbd3aa3bc742911e052ed1903381486 on master**.
diff --git a/imageio/plugins/ffmpeg.py b/imageio/plugins/ffmpeg.py index ae17fe2..61be3bf 100644 --- a/imageio/plugins/ffmpeg.py +++ b/imageio/plugins/ffmpeg.py @@ -174,6 +174,11 @@ class FfmpegFormat(Format): Parameters for reading ---------------------- + fps : scalar + The number of frames per second to read the data at. Default None (i.e. + read at the file's own fps). One can use this for files with a + variable fps, or in cases where imageio is unable to correctly detect + the fps. loop : bool If True, the video will rewind as soon as a frame is requested beyond the last frame. Otherwise, IndexError is raised. Default False. @@ -187,10 +192,14 @@ class FfmpegFormat(Format): "gray"). The camera needs to support the format in order for this to take effect. Note that the images produced by this reader are always rgb8. - ffmpeg_params: list + input_params : list List additional arguments to ffmpeg for input file options. + (Can also be provided as ``ffmpeg_params`` for backwards compatibility) Example ffmpeg arguments to use aggressive error handling: ['-err_detect', 'aggressive'] + output_params : list + List additional arguments to ffmpeg for output file options (i.e. the + stream being read by imageio). print_info : bool Print information about the video file as reported by ffmpeg. @@ -205,7 +214,7 @@ class FfmpegFormat(Format): quality : float | None Video output quality. Default is 5. Uses variable bit rate. Highest quality is 10, lowest is 0. Set to None to prevent variable bitrate - flags to FFMPEG so you can manually specify them using ffmpeg_params + flags to FFMPEG so you can manually specify them using output_params instead. Specifying a fixed bitrate using 'bitrate' disables this parameter. bitrate : int | None @@ -217,8 +226,12 @@ class FfmpegFormat(Format): pixelformat: str The output video pixel format. Default is 'yuv420p' which most widely supported by video players. - ffmpeg_params: list + input_params : list + List additional arguments to ffmpeg for input file options (i.e. the + stream that imageio provides). + output_params : list List additional arguments to ffmpeg for output file options. + (Can also be provided as ``ffmpeg_params`` for backwards compatibility) Example ffmpeg arguments to use only intra frames and set aspect ratio: ['-intra', '-aspect', '16:9'] ffmpeg_log_level: str @@ -285,7 +298,7 @@ class FfmpegFormat(Format): infos = proc.stderr.read().decode('utf-8') # Return device name at index try: - name = self._parse_device_names(infos)[index] + name = parse_device_names(infos)[index] except IndexError: raise IndexError('No ffdshow camera at index %i.' % index) return 'video=%s' % name @@ -299,26 +312,9 @@ class FfmpegFormat(Format): else: # pragma: no cover return '??' - @staticmethod - def _parse_device_names(ffmpeg_output): - """ Parse the output of the ffmpeg -list-devices command""" - device_names = [] - in_video_devices = False - for line in ffmpeg_output.splitlines(): - if line.startswith('[dshow'): - logging.debug(line) - line = line.split(']', 1)[1].strip() - if in_video_devices and line.startswith('"'): - device_names.append(line[1:-1]) - elif 'video devices' in line: - in_video_devices = True - elif 'devices' in line: - # set False for subsequent "devices" sections - in_video_devices = False - return device_names - def _open(self, loop=False, size=None, pixelformat=None, - ffmpeg_params=None, print_info=False): + print_info=False, ffmpeg_params=None, + input_params=None, output_params=None, fps=None): # Get exe self._exe = self._get_exe() # Process input args @@ -336,7 +332,9 @@ class FfmpegFormat(Format): elif not isinstance(pixelformat, string_types): raise ValueError('FFMPEG pixelformat must be str') self._arg_pixelformat = pixelformat - self._arg_ffmpeg_params = ffmpeg_params if ffmpeg_params else [] + self._arg_input_params = input_params or [] + self._arg_output_params = output_params or [] + self._arg_input_params += ffmpeg_params or [] # backward compat # Write "_video"_arg self.request._video = None if self.request.filename in ['<video%i>' % i for i in range(10)]: @@ -418,9 +416,13 @@ class FfmpegFormat(Format): '-pix_fmt', self._pix_fmt, '-vcodec', 'rawvideo'] oargs.extend(['-s', self._arg_size] if self._arg_size else []) + if self.request.kwargs.get('fps', None): + fps = float(self.request.kwargs['fps']) + oargs += ['-r', "%.02f" % fps] # Create process - cmd = [self._exe] + self._arg_ffmpeg_params - cmd += iargs + ['-i', self._filename] + oargs + ['-'] + cmd = [self._exe] + self._arg_input_params + cmd += iargs + ['-i', self._filename] + cmd += oargs + self._arg_output_params + ['-'] # For Windows, set `shell=True` in sp.Popen to prevent popup # of a command line window in frozen applications. self._proc = sp.Popen(cmd, stdin=sp.PIPE, @@ -447,19 +449,21 @@ class FfmpegFormat(Format): # Also appears this epsilon below is needed to ensure frame # accurate seeking in some cases epsilon = -1/self._meta['fps']*0.1 - iargs = ['-ss', "%.06f" % (starttime+epsilon), - '-i', self._filename, - ] + iargs = ['-ss', "%.06f" % (starttime+epsilon)] + iargs += ['-i', self._filename] # Output args, for writing to pipe oargs = ['-f', 'image2pipe', '-pix_fmt', self._pix_fmt, '-vcodec', 'rawvideo'] oargs.extend(['-s', self._arg_size] if self._arg_size else []) - + if self.request.kwargs.get('fps', None): + fps = float(self.request.kwargs['fps']) + oargs += ['-r', "%.02f" % fps] + # Create process - cmd = [self._exe] + self._arg_ffmpeg_params - cmd += iargs + oargs + ['-'] + cmd = [self._exe] + self._arg_input_params + iargs + cmd += oargs + self._arg_output_params + ['-'] # For Windows, set `shell=True` in sp.Popen to prevent popup # of a command line window in frozen applications. self._proc = sp.Popen(cmd, stdin=sp.PIPE, @@ -481,7 +485,7 @@ class FfmpegFormat(Format): # Using kill since self._proc.terminate() does not seem # to work for ffmpeg, leaves processes hanging self._proc.kill() - + # Tell threads to stop when they have a chance. They are probably # blocked on reading from their file, but let's play it safe. if self._stderr_catcher: @@ -528,51 +532,20 @@ class FfmpegFormat(Format): raise IOError("Could not open steam %s." % self._filename) else: # pragma: no cover - this is checked by Request raise IOError("%s not found! Wrong path?" % self._filename) - - # Get version - ver = lines[0].split('version', 1)[-1].split('Copyright')[0] - self._meta['ffmpeg_version'] = ver.strip() + ' ' + lines[1].strip() - - # get the output line that speaks about video - videolines = [l for l in lines if l.lstrip().startswith('Stream ') - and ' Video: ' in l] - line = videolines[0] - # get the frame rate - matches = re.findall(" ([0-9]+\.?[0-9]*) (tbr|fps)", line) - fps = 0 - if matches: # Can be empty, see #171, assume nframes = inf - fps = float(matches[0][0].strip()) - self._meta['fps'] = fps - - # get the size of the original stream, of the form 460x320 (w x h) - match = re.search(" [0-9]*x[0-9]*(,| )", line) - parts = line[match.start():match.end()-1].split('x') - self._meta['source_size'] = tuple(map(int, parts)) - - # get the size of what we receive, of the form 460x320 (w x h) - line = videolines[-1] # Pipe output - match = re.search(" [0-9]*x[0-9]*(,| )", line) - parts = line[match.start():match.end()-1].split('x') - self._meta['size'] = tuple(map(int, parts)) - - # Check the two sizes - if self._meta['source_size'] != self._meta['size']: - logging.warning('Warning: the frame size for reading %s is ' - 'different from the source frame size %s.' % - (self._meta['size'], - self._meta['source_size'])) - - # get duration (in seconds) - line = [l for l in lines if 'Duration: ' in l][0] - match = re.search(" [0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9]", - line) - if match is not None: - hms = map(float, line[match.start()+1:match.end()].split(':')) - self._meta['duration'] = duration = cvsecs(*hms) - if fps: - self._meta['nframes'] = int(round(duration*fps)) - + # Go! + self._meta.update(parse_ffmpeg_info(lines)) + + # Update with fps with user-value? + if self.request.kwargs.get('fps', None): + self._meta['fps'] = float(self.request.kwargs['fps']) + + # Estimate nframes + self._meta['nframes'] = np.inf + if self._meta['fps'] > 0 and 'duration' in self._meta: + n = int(round(self._meta['duration'] * self._meta['fps'])) + self._meta['nframes'] = n + def _read_frame_data(self): # Init and check w, h = self._meta['size'] @@ -632,6 +605,7 @@ class FfmpegFormat(Format): def _open(self, fps=10, codec='libx264', bitrate=None, pixelformat='yuv420p', ffmpeg_params=None, + input_params=None, output_params=None, ffmpeg_log_level="quiet", quality=5, macro_block_size=16): self._exe = self._get_exe() @@ -714,7 +688,9 @@ class FfmpegFormat(Format): quality = self.request.kwargs.get('quality', 5) ffmpeg_log_level = self.request.kwargs.get('ffmpeg_log_level', 'warning') - extra_ffmpeg_params = self.request.kwargs.get('ffmpeg_params', []) + input_params = self.request.kwargs.get('input_params') or [] + output_params = self.request.kwargs.get('output_params') or [] + output_params += self.request.kwargs.get('ffmpeg_params') or [] # You may need to use -pix_fmt yuv420p for your output to work in # QuickTime and most other players. These players only supports # the YUV planar color space with 4:2:0 chroma subsampling for @@ -730,11 +706,12 @@ class FfmpegFormat(Format): "-vcodec", "rawvideo", '-s', sizestr, '-pix_fmt', self._pix_fmt, - '-r', "%.02f" % fps, - '-i', '-', '-an', - '-vcodec', codec, - '-pix_fmt', pixelformat, - ] + '-r', "%.02f" % fps] + input_params + cmd += ['-i', '-'] + cmd += ['-an', + '-vcodec', codec, + '-pix_fmt', pixelformat, + ] # Add fixed bitrate or variable bitrate compression flags if bitrate is not None: cmd += ['-b:v', str(bitrate)] @@ -789,7 +766,7 @@ class FfmpegFormat(Format): # output from ffmpeg by default. That way if there are warnings # the user will see them. cmd += ['-v', ffmpeg_log_level] - cmd += extra_ffmpeg_params + cmd += output_params cmd.append(self._filename) self._cmd = " ".join(cmd) # For showing command if needed if any([level in ffmpeg_log_level for level in @@ -956,6 +933,80 @@ class StreamCatcher(threading.Thread): self._lines = limit_lines_local(self._lines) +def parse_device_names(ffmpeg_output): + """ Parse the output of the ffmpeg -list-devices command""" + device_names = [] + in_video_devices = False + for line in ffmpeg_output.splitlines(): + if line.startswith('[dshow'): + logging.debug(line) + line = line.split(']', 1)[1].strip() + if in_video_devices and line.startswith('"'): + device_names.append(line[1:-1]) + elif 'video devices' in line: + in_video_devices = True + elif 'devices' in line: + # set False for subsequent "devices" sections + in_video_devices = False + return device_names + + +def parse_ffmpeg_info(text): + meta = {} + + if isinstance(text, list): + lines = text + else: + lines = text.splitlines() + + # Get version + ver = lines[0].split('version', 1)[-1].split('Copyright')[0] + meta['ffmpeg_version'] = ver.strip() + ' ' + lines[1].strip() + + # get the output line that speaks about video + videolines = [l for l in lines if l.lstrip().startswith('Stream ') + and ' Video: ' in l] + line = videolines[0] + + # get the frame rate. + # matches can be empty, see #171, assume nframes = inf + # the regexp omits values of "1k tbr" which seems a specific edge-case #262 + # it seems that tbr is generally to be preferred #262 + matches = re.findall(" ([0-9]+\.?[0-9]*) (tbr|fps)", line) + fps = 0 + matches.sort(key=lambda x: x[1] == 'tbr', reverse=True) + if matches: + fps = float(matches[0][0].strip()) + meta['fps'] = fps + + # get the size of the original stream, of the form 460x320 (w x h) + match = re.search(" [0-9]*x[0-9]*(,| )", line) + parts = line[match.start():match.end()-1].split('x') + meta['source_size'] = tuple(map(int, parts)) + + # get the size of what we receive, of the form 460x320 (w x h) + line = videolines[-1] # Pipe output + match = re.search(" [0-9]*x[0-9]*(,| )", line) + parts = line[match.start():match.end()-1].split('x') + meta['size'] = tuple(map(int, parts)) + + # Check the two sizes + if meta['source_size'] != meta['size']: + logging.warning('Warning: the frame size for reading %s is ' + 'different from the source frame size %s.' % + (meta['size'], meta['source_size'])) + + # get duration (in seconds) + line = [l for l in lines if 'Duration: ' in l][0] + match = re.search(" [0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9]", + line) + if match is not None: + hms = map(float, line[match.start()+1:match.end()].split(':')) + meta['duration'] = cvsecs(*hms) + + return meta + + def get_output_video_line(lines): """Get the line that defines the video stream that ffmpeg outputs, and which we read.
ffmpeg: you are trying to apply an input option to an output file or vice versa. Move this option before the file it belongs to. I'm trying to get imageio reading only I-frames with ffmpeg plugin. Therefore, I'm using the following line in python: `video_reader = imageio.get_reader(video_path, 'ffmpeg', ffmpeg_params=['-vf', 'select eq(pict_type\,PICT_TYPE_I)'])` During the initialisation I get the following message from ffmpeg: `ffmpeg version 3.3.3 Copyright (c) 2000-2017 the FFmpeg developers built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.3) configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libfreetype --enable-gnutls --disable-ffserver --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libtheora --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvidstab --enable-libwavpack --enable-nvenc --enable-libzimg libavutil 55. 58.100 / 55. 58.100 libavcodec 57. 89.100 / 57. 89.100 libavformat 57. 71.100 / 57. 71.100 libavdevice 57. 6.100 / 57. 6.100 libavfilter 6. 82.100 / 6. 82.100 libavresample 3. 5. 0 / 3. 5. 0 libswscale 4. 6.100 / 4. 6.100 libswresample 2. 7.100 / 2. 7.100 libpostproc 54. 5.100 / 54. 5.100 Option vf (set video filters) cannot be applied to input url /home/konsof01/Work/FaceBackend/TestData/TestMedia/me.mp4 -- you are trying to apply an input option to an output file or vice versa. Move this option before the file it belongs to. Error parsing options for input file /home/konsof01/Work/FaceBackend/TestData/TestMedia/me.mp4. Error opening input files: Invalid argument` The end of this message clearly says that the given option is being put into a wrong place. Is this by-design or am I doing something wrong? Is it a good idea to have a way to tell that some options are for output files, not for input? Thanks, Konstantin
imageio/imageio
diff --git a/tests/test_dicom.py b/tests/test_dicom.py index 54c915d..23b24a2 100644 --- a/tests/test_dicom.py +++ b/tests/test_dicom.py @@ -58,6 +58,12 @@ def test_read_empty_dir(): assert imageio.formats.search_read_format(request) is None +def test_dcmtk(): + # This should not crach, we make no assumptions on whether its + # available or not + imageio.plugins.dicom.get_dcmdjpeg_exe() + + def test_selection(): dname1, dname2, fname1, fname2 = _prepare() diff --git a/tests/test_ffmpeg.py b/tests/test_ffmpeg.py index 2229c10..73710c5 100644 --- a/tests/test_ffmpeg.py +++ b/tests/test_ffmpeg.py @@ -393,12 +393,6 @@ def test_framecatcher(): file.close() -def test_dcmtk(): - # This should not crach, we make no assumptions on whether its - # available or not - imageio.plugins.dicom.get_dcmdjpeg_exe() - - def test_webcam(): need_internet() @@ -408,49 +402,6 @@ def test_webcam(): skip('no web cam') -# Sample from ffmpeg -device-list (distilled down to two lines per section) -ffmpeg_output_sample = \ - u'ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers\r\n' \ - u' built with gcc 6.3.0 (GCC)\r\n' \ - u' configuration: --enable-gpl --enable-version3 --enable-d3d11va' \ - u' --enable-dxva2 --enable-libmfx --enable-nvenc --enable-avisynth' \ - u'libswresample 2. 3.100 / 2. 3.100\r\n ' \ - u'libpostproc 54. 1.100 / 54. 1.100\r\n' \ - u'[dshow @ 039a7e20] DirectShow video devices (some may be both video ' \ - u'and audio devices)\r\n' \ - u'[dshow @ 039a7e20] "AVerMedia USB Polaris Analog Capture"\r\n' \ - u'[dshow @ 039a7e20] Alternative name "@device_pnp_\\\\?\\usb#vid_0' \ - u'7ca&pid_c039&mi_01#8&55f1102&0&0001#{65e8773d-8f56-11d0-a3b9-00a0c922' \ - u'3196}\\{9b365890-165f-11d0-a195-0020afd156e4}"\r\n' \ - u'[dshow @ 039a7e20] "Lenovo EasyCamera"\r\n' \ - u'[dshow @ 039a7e20] Alternative name "@device_pnp_\\\\?\\usb#vid_0' \ - u'4f2&pid_b50f&mi_00#6&bbc4ae1&1&0000#{65e8773d-8f56-11d0-a3b9-00a0c922' \ - u'3196}\\global"\r\n' \ - u'[dshow @ 039a7e20] DirectShow audio devices\r\n' \ - u'[dshow @ 039a7e20] "Microphone (2- USB Multimedia Audio Device)"\r\n' \ - u'[dshow @ 039a7e20] Alternative name "@device_cm_{33D9A762-90C8-11D' \ - u'0-BD43-00A0C911CE86}\\wave_{73C17834-AA57-4CA1-847A-6BBEB1E0F2E6}"\r\n' \ - u'[dshow @ 039a7e20] "SPDIF Interface (Multimedia Audio Device)"\r\n' \ - u'[dshow @ 039a7e20] Alternative name "@device_cm_{33D9A762-90C8-11D' \ - u'0-BD43-00A0C911CE86}\\wave_{617B63FB-CFC0-4D10-AE30-42A66CAF6A4E}"\r\n' \ - u'dummy: Immediate exit requested\r\n' - - -def test_webcam_parse_device_names(): - # Ensure that the device list parser returns all video devices (issue #283) - - # Specify test parameters - number_of_video_devices_in_sample = 2 # see ffmpeg_output_sample above - - # Parse the sample - device_names = \ - imageio.plugins.ffmpeg.FfmpegFormat.Reader._parse_device_names( - ffmpeg_output_sample) - - # Assert that the device_names list has the correct length - assert len(device_names) == number_of_video_devices_in_sample - - def test_webcam_get_next_data(): need_internet() diff --git a/tests/test_ffmpeg_info.py b/tests/test_ffmpeg_info.py new file mode 100644 index 0000000..6501380 --- /dev/null +++ b/tests/test_ffmpeg_info.py @@ -0,0 +1,165 @@ +# styletest: ignore E501 +""" Tests specific to parsing ffmpeg info. +""" + +from imageio.testing import run_tests_if_main, need_internet + +import imageio + + +def dedent(text, dedent=8): + lines = [line[dedent:] for line in text.splitlines()] + text = '\n'.join(lines) + return text.strip() + '\n' + + +def test_webcam_parse_device_names(): + # Ensure that the device list parser returns all video devices (issue #283) + + sample = dedent(r""" + ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers + built with gcc 6.3.0 (GCC) + configuration: --enable-gpl --enable-version3 --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-nvenc --enable-avisynthlibswresample 2. 3.100 / 2. 3.100 + libpostproc 54. 1.100 / 54. 1.100 + [dshow @ 039a7e20] DirectShow video devices (some may be both video and audio devices) + [dshow @ 039a7e20] "AVerMedia USB Polaris Analog Capture" + [dshow @ 039a7e20] Alternative name "@device_pnp_\\?\usb#vid_07ca&pid_c039&mi_01#8&55f1102&0&0001#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\{9b365890-165f-11d0-a195-0020afd156e4}" + [dshow @ 039a7e20] "Lenovo EasyCamera" + [dshow @ 039a7e20] Alternative name "@device_pnp_\\?\usb#vid_04f2&pid_b50f&mi_00#6&bbc4ae1&1&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global" + [dshow @ 039a7e20] DirectShow audio devices + [dshow @ 039a7e20] "Microphone (2- USB Multimedia Audio Device)" + [dshow @ 039a7e20] Alternative name "@device_cm_{33D9A762-90C8-11D0-BD43-00A0C911CE86}\wave_{73C17834-AA57-4CA1-847A-6BBEB1E0F2E6}" + [dshow @ 039a7e20] "SPDIF Interface (Multimedia Audio Device)" + [dshow @ 039a7e20] Alternative name "@device_cm_{33D9A762-90C8-11D0-BD43-00A0C911CE86}\wave_{617B63FB-CFC0-4D10-AE30-42A66CAF6A4E}" + dummy: Immediate exit requested + """) + + # Parse the sample + device_names = imageio.plugins.ffmpeg.parse_device_names(sample) + + # Assert that the device_names list has the correct length + assert len(device_names) == 2 + + +def test_get_correct_fps1(): + # from issue #262 + + sample = dedent(r""" + fmpeg version 3.2.2 Copyright (c) 2000-2016 the FFmpeg developers + built with Apple LLVM version 8.0.0 (clang-800.0.42.1) + configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.2 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda + libavutil 55. 34.100 / 55. 34.100 + libavcodec 57. 64.101 / 57. 64.101 + libavformat 57. 56.100 / 57. 56.100 + libavdevice 57. 1.100 / 57. 1.100 + libavfilter 6. 65.100 / 6. 65.100 + libavresample 3. 1. 0 / 3. 1. 0 + libswscale 4. 2.100 / 4. 2.100 + libswresample 2. 3.100 / 2. 3.100 + libpostproc 54. 1.100 / 54. 1.100 + Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/echeng/video.mp4': + Metadata: + major_brand : mp42 + minor_version : 1 + compatible_brands: isom3gp43gp5 + Duration: 00:16:05.80, start: 0.000000, bitrate: 1764 kb/s + Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 8000 Hz, mono, fltp, 40 kb/s (default) + Metadata: + handler_name : soun + Stream #0:1(eng): Video: mpeg4 (Simple Profile) (mp4v / 0x7634706D), yuv420p, 640x480 [SAR 1:1 DAR 4:3], 1720 kb/s, 29.46 fps, 26.58 tbr, 90k tbn, 1k tbc (default) + Metadata: + handler_name : vide + Output #0, image2pipe, to 'pipe:': + Metadata: + major_brand : mp42 + minor_version : 1 + compatible_brands: isom3gp43gp5 + encoder : Lavf57.56.100 + Stream #0:0(eng): Video: rawvideo (RGB[24] / 0x18424752), rgb24, 640x480 [SAR 1:1 DAR 4:3], q=2-31, 200 kb/s, 26.58 fps, 26.58 tbn, 26.58 tbc (default) + Metadata: + handler_name : vide + encoder : Lavc57.64.101 rawvideo + Stream mapping: + """) + + info = imageio.plugins.ffmpeg.parse_ffmpeg_info(sample) + assert info['fps'] == 26.58 + + +def test_get_correct_fps2(): + # from issue #262 + + sample = dedent(r""" + ffprobe version 3.2.2 Copyright (c) 2007-2016 the FFmpeg developers + built with Apple LLVM version 8.0.0 (clang-800.0.42.1) + configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.2 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda + libavutil 55. 34.100 / 55. 34.100 + libavcodec 57. 64.101 / 57. 64.101 + libavformat 57. 56.100 / 57. 56.100 + libavdevice 57. 1.100 / 57. 1.100 + libavfilter 6. 65.100 / 6. 65.100 + libavresample 3. 1. 0 / 3. 1. 0 + libswscale 4. 2.100 / 4. 2.100 + libswresample 2. 3.100 / 2. 3.100 + libpostproc 54. 1.100 / 54. 1.100 + Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'video.mp4': + Metadata: + major_brand : mp42 + minor_version : 1 + compatible_brands: isom3gp43gp5 + Duration: 00:08:44.53, start: 0.000000, bitrate: 1830 kb/s + Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 8000 Hz, mono, fltp, 40 kb/s (default) + Metadata: + handler_name : soun + Stream #0:1(eng): Video: mpeg4 (Simple Profile) (mp4v / 0x7634706D), yuv420p, 640x480 [SAR 1:1 DAR 4:3], 1785 kb/s, 29.27 fps, 1k tbr, 90k tbn, 1k tbc (default) + Metadata: + handler_name : vide + """) + + info = imageio.plugins.ffmpeg.parse_ffmpeg_info(sample) + assert info['fps'] == 29.27 + + +def test_overload_fps(): + + need_internet() + + # Native + r = imageio.get_reader('imageio:cockatoo.mp4') + assert len(r) == 280 # native + ims = [im for im in r] + assert len(ims) == 280 + # imageio.mimwrite('~/parot280.gif', ims[:30]) + + # Less + r = imageio.get_reader('imageio:cockatoo.mp4', fps=8) + assert len(r) == 112 + ims = [im for im in r] + assert len(ims) == 112 + # imageio.mimwrite('~/parot112.gif', ims[:30]) + + # More + r = imageio.get_reader('imageio:cockatoo.mp4', fps=24) + assert len(r) == 336 + ims = [im for im in r] + assert len(ims) == 336 + # imageio.mimwrite('~/parot336.gif', ims[:30]) + + # Do we calculate nframes correctly? To be fair, the reader wont try to + # read beyond what it thinks how many frames it has. But this at least + # makes sure that this works. + for fps in (8.0, 8.02, 8.04, 8.06, 8.08): + r = imageio.get_reader('imageio:cockatoo.mp4', fps=fps) + n = len(r) + i = 0 + try: + while True: + r.get_next_data() + i += 1 + except (StopIteration, IndexError): + pass + # print(r._meta['duration'], r._meta['fps'], r._meta['duration'] * fps, r._meta['nframes'], n) + assert i == n + + +run_tests_if_main()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_media", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
2.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "invoke", "pytest", "pytest-cov", "coveralls" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libfreeimage3" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 coveralls==4.0.1 docopt==0.6.2 exceptiongroup==1.2.2 idna==3.10 -e git+https://github.com/imageio/imageio.git@a22145a9ebbd3aa3bc742911e052ed1903381486#egg=imageio iniconfig==2.1.0 invoke==2.2.0 numpy==2.0.2 packaging==24.2 pillow==11.1.0 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 requests==2.32.3 tomli==2.2.1 urllib3==2.3.0
name: imageio channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - coveralls==4.0.1 - docopt==0.6.2 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - invoke==2.2.0 - numpy==2.0.2 - packaging==24.2 - pillow==11.1.0 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - requests==2.32.3 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/imageio
[ "tests/test_ffmpeg_info.py::test_webcam_parse_device_names", "tests/test_ffmpeg_info.py::test_get_correct_fps1", "tests/test_ffmpeg_info.py::test_get_correct_fps2" ]
[ "tests/test_dicom.py::test_read_empty_dir", "tests/test_dicom.py::test_selection", "tests/test_dicom.py::test_different_read_modes", "tests/test_ffmpeg_info.py::test_overload_fps" ]
[ "tests/test_dicom.py::test_dcmtk", "tests/test_dicom.py::test_progress", "tests/test_dicom.py::test_different_read_modes_with_readers" ]
[]
BSD 2-Clause "Simplified" License
2,292
[ "imageio/plugins/ffmpeg.py" ]
[ "imageio/plugins/ffmpeg.py" ]
conan-io__conan-2602
8151c4c39a5ffbf42f21a10d586fe88b8f1c8f04
2018-03-13 11:25:25
0f8b143c43d0354c6a75da94a1374d5ce39b7f96
diff --git a/conans/client/build/autotools_environment.py b/conans/client/build/autotools_environment.py index 015750341..0106e1cb0 100644 --- a/conans/client/build/autotools_environment.py +++ b/conans/client/build/autotools_environment.py @@ -57,47 +57,63 @@ class AutoToolsBuildEnvironment(object): # Not declared by default self.fpic = None + def _get_triplet(self, the_arch, the_os): + """ + machine-vendor-op_system, But vendor can be omitted in practice + """ + + # Calculate the arch + machine = {"x86": "i686" if the_os != "Linux" else "x86", + "x86_64": "x86_64", + "armv6": "arm", + "armv7": "arm", + "armv7s": "arm", + "armv7k": "arm", + "armv7hf": "arm", + "armv8": "aarch64"}.get(the_arch, None) + if machine is None: + self._conanfile.output.warn("Unknown '%s' machine, Conan doesn't know how to " + "translate it to the GNU triplet, please report at " + " https://github.com/conan-io/conan/issues" % the_arch) + return "unknown" + + # Calculate the OS + compiler = self._conanfile.settings.get_safe("compiler") + if compiler == "gcc": + windows_op = "w64-mingw32" + elif compiler == "Visual Studio": + windows_op = "windows-msvc" + else: + windows_op = "windows" + + op_system = {"Windows": windows_op, + "Linux": "linux-gnu", + "Darwin": "apple-darwin", + "Android": "linux-android", + "Macos": "apple-darwin", + "iOS": "apple-darwin", + "watchOS": "apple-darwin", + "tvOS": "apple-darwin"}.get(the_os, the_os.lower()) + + if the_os in ("Linux", "Android"): + if "arm" in the_arch and the_arch != "armv8": + op_system += "eabi" + + if the_arch == "armv7hf" and the_os == "Linux": + op_system += "hf" + + return "%s-%s" % (machine, op_system) + def _get_host_build_target_flags(self, arch_detected, os_detected): """Based on google search for build/host triplets, it could need a lot and complex verification""" + if not cross_building(self._conanfile.settings, os_detected, arch_detected): return False, False, False - arch_setting = self._conanfile.settings.get_safe("arch") - os_setting = self._conanfile.settings.get_safe("os") - - if os_detected == "Windows" and os_setting != "Windows": - # Don't know what to do with these, even exists? its only for configure - return None, None, None - - # Building FOR windows - if os_setting == "Windows": - build = "i686-w64-mingw32" if arch_detected == "x86" else "x86_64-w64-mingw32" - host = "i686-w64-mingw32" if arch_setting == "x86" else "x86_64-w64-mingw32" - else: # Building for Linux or Android - build = "%s-%s" % (arch_detected, {"Linux": "linux-gnu", - "Darwin": "apple-darwin"}.get(os_detected, - os_detected.lower())) - if arch_setting == "x86": - host_arch = "i686" - elif arch_setting == "armv8": - host_arch = "aarch64" - else: - host_arch = "arm" if "arm" in arch_setting else arch_setting - - host = "%s%s" % (host_arch, {"Linux": "-linux-gnu", - "Android": "-linux-android", - "Macos": "-apple-darwin", - "iOS": "-apple-darwin", - "watchOS": "-apple-darwin", - "tvOS": "-apple-darwin"}.get(os_setting, "")) - - if os_setting in ("Linux", "Android"): - if "arm" in arch_setting and arch_setting != "armv8": - host += "eabi" - - if arch_setting == "armv7hf" and os_setting == "Linux": - host += "hf" + build = self._get_triplet(arch_detected, os_detected) + host = self._get_triplet(self._conanfile.settings.get_safe("arch"), + self._conanfile.settings.get_safe("os")) return build, host, None @@ -127,7 +143,9 @@ class AutoToolsBuildEnvironment(object): configure_dir = "." auto_build, auto_host, auto_target = None, None, None if build is None or host is None or target is None: - flags = self._get_host_build_target_flags(detected_architecture(), platform.system()) + arch_detected = detected_architecture() or platform.machine() + os_detected = platform.system() + flags = self._get_host_build_target_flags(arch_detected, os_detected) auto_build, auto_host, auto_target = flags triplet_args = [] @@ -154,8 +172,10 @@ class AutoToolsBuildEnvironment(object): with environment_append(pkg_env): with environment_append(self.vars): configure_dir = self._adjust_path(configure_dir) - self._conanfile.run('%s/configure %s %s' - % (configure_dir, args_to_string(args), " ".join(triplet_args)), + command = '%s/configure %s %s' % (configure_dir, + args_to_string(args), " ".join(triplet_args)) + self._conanfile.output.info("Calling:\n > %s" % command) + self._conanfile.run(command, win_bash=self._win_bash, subsystem=self.subsystem) diff --git a/conans/client/client_cache.py b/conans/client/client_cache.py index b37344331..cd21b11b5 100644 --- a/conans/client/client_cache.py +++ b/conans/client/client_cache.py @@ -1,4 +1,5 @@ import os +from os.path import join, normpath from collections import OrderedDict from conans.client.conf import ConanClientConfigParser, default_client_conf, default_settings_yml @@ -22,6 +23,13 @@ LOCALDB = ".conan.db" REGISTRY = "registry.txt" PROFILES_FOLDER = "profiles" +# Client certificates +CLIENT_CERT = "client.crt" +CLIENT_KEY = "client.key" + +# Server authorities file +CACERT_FILE = "cacert.pem" + class ClientCache(SimplePaths): """ Class to represent/store/compute all the paths involved in the execution @@ -29,15 +37,22 @@ class ClientCache(SimplePaths): """ def __init__(self, base_folder, store_folder, output): - self.conan_folder = os.path.join(base_folder, ".conan") + self.conan_folder = join(base_folder, ".conan") self._conan_config = None self._settings = None self._output = output self._store_folder = store_folder or self.conan_config.storage_path or self.conan_folder self._default_profile = None self._no_lock = None + self.client_cert_path = normpath(join(self.conan_folder, CLIENT_CERT)) + self.client_cert_key_path = normpath(join(self.conan_folder, CLIENT_KEY)) + super(ClientCache, self).__init__(self._store_folder) + @property + def cacert_path(self): + return normpath(join(self.conan_folder, CACERT_FILE)) + def _no_locks(self): if self._no_lock is None: self._no_lock = self.conan_config.cache_no_locks @@ -61,12 +76,12 @@ class ClientCache(SimplePaths): def package_lock(self, package_ref): if self._no_locks(): return NoLock() - return SimpleLock(os.path.join(self.conan(package_ref.conan), "locks", + return SimpleLock(join(self.conan(package_ref.conan), "locks", package_ref.package_id)) @property def put_headers_path(self): - return os.path.join(self.conan_folder, PUT_HEADERS) + return join(self.conan_folder, PUT_HEADERS) def read_put_headers(self): ret = {} @@ -89,7 +104,7 @@ class ClientCache(SimplePaths): @property def registry(self): - return os.path.join(self.conan_folder, REGISTRY) + return join(self.conan_folder, REGISTRY) @property def conan_config(self): @@ -102,26 +117,26 @@ class ClientCache(SimplePaths): @property def localdb(self): - return os.path.join(self.conan_folder, LOCALDB) + return join(self.conan_folder, LOCALDB) @property def conan_conf_path(self): - return os.path.join(self.conan_folder, CONAN_CONF) + return join(self.conan_folder, CONAN_CONF) @property def profiles_path(self): - return os.path.join(self.conan_folder, PROFILES_FOLDER) + return join(self.conan_folder, PROFILES_FOLDER) @property def settings_path(self): - return os.path.join(self.conan_folder, CONAN_SETTINGS) + return join(self.conan_folder, CONAN_SETTINGS) @property def default_profile_path(self): if os.path.isabs(self.conan_config.default_profile): return self.conan_config.default_profile else: - return os.path.expanduser(os.path.join(self.conan_folder, PROFILES_FOLDER, + return os.path.expanduser(join(self.conan_folder, PROFILES_FOLDER, self.conan_config.default_profile)) @property @@ -177,7 +192,7 @@ class ClientCache(SimplePaths): packages_dir = self.packages(conan_reference) try: packages = [dirname for dirname in os.listdir(packages_dir) - if os.path.isdir(os.path.join(packages_dir, dirname))] + if os.path.isdir(join(packages_dir, dirname))] except OSError: # if there isn't any package folder packages = [] return packages @@ -188,7 +203,7 @@ class ClientCache(SimplePaths): builds_dir = self.builds(conan_reference) try: builds = [dirname for dirname in os.listdir(builds_dir) - if os.path.isdir(os.path.join(builds_dir, dirname))] + if os.path.isdir(join(builds_dir, dirname))] except OSError: # if there isn't any package folder builds = [] return builds @@ -205,7 +220,7 @@ class ClientCache(SimplePaths): @staticmethod def read_package_recipe_hash(package_folder): - filename = os.path.join(package_folder, CONANINFO) + filename = join(package_folder, CONANINFO) info = ConanInfo.loads(load(filename)) return info.recipe_hash diff --git a/conans/client/conan_api.py b/conans/client/conan_api.py index 4cc619762..98704dc90 100644 --- a/conans/client/conan_api.py +++ b/conans/client/conan_api.py @@ -16,6 +16,7 @@ from conans.client.remote_manager import RemoteManager from conans.client.remote_registry import RemoteRegistry from conans.client.rest.auth_manager import ConanApiAuthManager from conans.client.rest.rest_client import RestApiClient +from conans.client.rest.conan_requester import ConanRequester from conans.client.rest.version_checker import VersionCheckerRequester from conans.client.runner import ConanRunner from conans.client.store.localdb import LocalDB @@ -44,14 +45,8 @@ default_manifest_folder = '.conan_manifests' def get_basic_requester(client_cache): requester = requests.Session() - proxies = client_cache.conan_config.proxies - if proxies: - # Account for the requests NO_PROXY env variable, not defined as a proxy like http= - no_proxy = proxies.pop("no_proxy", None) - if no_proxy: - os.environ["NO_PROXY"] = no_proxy - requester.proxies = proxies - return requester + # Manage the verify and the client certificates and setup proxies + return ConanRequester(requester, client_cache) def api_method(f): @@ -114,28 +109,31 @@ def _get_conanfile_path(path, cwd, py): class ConanAPIV1(object): + @staticmethod + def instance_remote_manager(requester, client_cache, user_io, _client_version, + min_server_compatible_version): + + # Verify client version against remotes + version_checker_req = VersionCheckerRequester(requester, _client_version, + min_server_compatible_version, + user_io.out) + + # To handle remote connections + put_headers = client_cache.read_put_headers() + rest_api_client = RestApiClient(user_io.out, requester=version_checker_req, + put_headers=put_headers) + # To store user and token + localdb = LocalDB(client_cache.localdb) + # Wraps RestApiClient to add authentication support (same interface) + auth_manager = ConanApiAuthManager(rest_api_client, user_io, localdb) + # Handle remote connections + remote_manager = RemoteManager(client_cache, auth_manager, user_io.out) + return localdb, rest_api_client, remote_manager + @staticmethod def factory(): """Factory""" - def instance_remote_manager(client_cache): - requester = get_basic_requester(client_cache) - # Verify client version against remotes - version_checker_req = VersionCheckerRequester(requester, Version(client_version), - Version(MIN_SERVER_COMPATIBLE_VERSION), - out) - # To handle remote connections - put_headers = client_cache.read_put_headers() - rest_api_client = RestApiClient(out, requester=version_checker_req, - put_headers=put_headers) - # To store user and token - localdb = LocalDB(client_cache.localdb) - # Wraps RestApiClient to add authentication support (same interface) - auth_manager = ConanApiAuthManager(rest_api_client, user_io, localdb) - # Handle remote connections - remote_manager = RemoteManager(client_cache, auth_manager, out) - return remote_manager - use_color = get_env("CONAN_COLOR_DISPLAY", 1) if use_color and hasattr(sys.stdout, "isatty") and sys.stdout.isatty(): import colorama @@ -159,7 +157,15 @@ class ConanAPIV1(object): conans.util.log.logger = configure_logger() # Get the new command instance after migrations have been done - remote_manager = instance_remote_manager(client_cache) + requester = get_basic_requester(client_cache) + _, _, remote_manager = ConanAPIV1.instance_remote_manager( + requester, + client_cache, user_io, + Version(client_version), + Version(MIN_SERVER_COMPATIBLE_VERSION)) + + # Adjust global tool variables + set_global_instances(out, requester) # Get a search manager search_manager = DiskSearchManager(client_cache) @@ -180,8 +186,6 @@ class ConanAPIV1(object): self._remote_manager = remote_manager self._manager = ConanManager(client_cache, user_io, runner, remote_manager, search_manager, settings_preprocessor) - # Patch the tools module with a good requester and user_io - set_global_instances(self._user_io.out, get_basic_requester(self._client_cache)) @api_method def new(self, name, header=False, pure_c=False, test=False, exports_sources=False, bare=False, diff --git a/conans/client/conf/__init__.py b/conans/client/conf/__init__.py index 14f288a4e..6c04bae9c 100644 --- a/conans/client/conf/__init__.py +++ b/conans/client/conf/__init__.py @@ -127,6 +127,8 @@ path = ~/.conan/data # http = http://user:[email protected]:3128/ # http = http://10.10.1.10:3128 # https = http://10.10.1.10:1080 +# You can skip the proxy for the matching (fnmatch) urls (comma-separated) +# no_proxy_match = *bintray.com*, https://myserver.* # Default settings now declared in the default profile diff --git a/conans/client/rest/cacert.py b/conans/client/rest/cacert.py index d84ddbc8e..349c7c28c 100644 --- a/conans/client/rest/cacert.py +++ b/conans/client/rest/cacert.py @@ -1,5725 +1,3350 @@ # -*- coding: utf-8 -*- - -import os -from conans.util.files import save import logging -from conans.paths import conan_expand_user, get_conan_user_home # Capture SSL warnings as pointed out here: # https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning # TODO: Fix this security warning logging.captureWarnings(True) -# LAST LINES OF THIS CERTIFICATE BUNDLE INCLUDES OUT CHEAP COMODO AUTHORITY -# VERIFICATOR. IF ITS NOT INCLUDED, SSL REQUESTS WILL FAIL OR CANT BE VERIFIED -# (WITH THE WARNING OF URLLIB) - -cacert = ''' -# Issuer: O=Equifax OU=Equifax Secure Certificate Authority -# Subject: O=Equifax OU=Equifax Secure Certificate Authority -# Label: "Equifax Secure CA" -# Serial: 903804111 -# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 -# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a -# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +# Got from: https://curl.haxx.se/docs/caextract.html +# Updated at 12 March 2018 + +cacert = """## +## Bundle of CA Root Certificates +## +## Certificate data from Mozilla as of: Wed Mar 7 04:12:06 2018 GMT +## +## This is a bundle of X.509 certificates of public Certificate Authorities +## (CA). These were automatically extracted from Mozilla's root certificates +## file (certdata.txt). This file can be found in the mozilla source tree: +## https://hg.mozilla.org/releases/mozilla-release/raw-file/default/security/nss/lib/ckfw/builtins/certdata.txt +## +## It contains the certificates in PEM format and therefore +## can be directly used with curl / libcurl / php_curl, or with +## an Apache+mod_ssl webserver for SSL client authentication. +## Just configure this file as the SSLCACertificateFile. +## +## Conversion done with mk-ca-bundle.pl version 1.27. +## SHA256: 704f02707ec6b4c4a7597a8c6039b020def11e64f3ef0605a9c3543d48038a57 +## + + +GlobalSign Root CA +================== +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUx +GTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkds +b2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNV +BAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYD +VQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDa +DuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6sc +THAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlb +Kk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNP +c1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrX +gzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF +AAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6Dj +Y1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyG +j/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhH +hm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveC +X4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +GlobalSign Root CA - R2 +======================= +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xv +YmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh +bFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT +aWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln +bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6 +ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozp +s6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjN +S7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CL +TfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6C +ygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9i +YWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjAN +BgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp +9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu +01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG7 +9G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 4 Public Primary Certification Authority - G3" -# Serial: 314531972711909413743075096039378935511 -# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df -# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d -# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 -GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ -+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd -U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm -NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY -ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ -ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 -CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq -g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c -2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ -bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Low-Value Services Root" -# Serial: 1 -# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc -# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d -# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 ------BEGIN CERTIFICATE----- -MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw -MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD -VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul -CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n -tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl -dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch -PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC -+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O -BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl -MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk -ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB -IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X -7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz -43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY -eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl -pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA -WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Public Services Root" -# Serial: 1 -# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f -# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 -# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx -MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB -ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV -BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV -6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX -GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP -dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH -1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF -62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW -BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL -MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU -cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv -b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 -IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ -iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao -GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh -4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm -XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Qualified Certificates Root" -# Serial: 1 -# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb -# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf -# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 ------BEGIN CERTIFICATE----- -MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 -MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK -EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh -BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq -xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G -87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i -2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U -WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 -0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G -A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr -pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL -ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm -aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv -hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm -hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X -dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 -P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y -iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no -xqE= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3 -# Subject: O=RSA Security Inc OU=RSA Security 2048 V3 -# Label: "RSA Security 2048 v3" -# Serial: 13297492616345471454730593562152402946 -# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e -# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42 -# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c ------BEGIN CERTIFICATE----- -MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6 -MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp -dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX -BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy -MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp -eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg -/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl -wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh -AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2 -PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu -AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR -MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc -HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/ -Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+ -f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO -rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch -6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3 -7CAFYd4= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Global CA 2" -# Serial: 1 -# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 -# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d -# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 ------BEGIN CERTIFICATE----- -MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs -IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg -R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A -PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 -Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL -TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL -5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 -S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe -2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE -FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap -EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td -EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv -/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN -A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 -abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF -I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz -4iIprn2DQKi6bA== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Label: "Visa eCommerce Root" -# Serial: 25952180776285836048024890241505565794 -# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 -# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 -# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 ------BEGIN CERTIFICATE----- -MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr -MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl -cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv -bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw -CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h -dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l -cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h -2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E -lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV -ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq -299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t -vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL -dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF -AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR -zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 -LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd -7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw -++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +Verisign Class 3 Public Primary Certification Authority - G3 +============================================================ +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv +cmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRy +dXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAMu6nFL8eB8aHm8bN3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1 +EUGO+i2tKmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGukxUc +cLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBmCC+Vk7+qRy+oRpfw +EuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJXwzw3sJ2zq/3avL6QaaiMxTJ5Xpj +055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWuimi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEA +ERSWwauSCPc/L8my/uRan2Te2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5f +j267Cz3qWhMeDGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565pF4ErWjfJXir0 +xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGtTxzhT5yvDwyd93gN2PQ1VoDa +t20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +Entrust.net Premium 2048 Secure Server CA +========================================= +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChMLRW50cnVzdC5u +ZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBpbmNvcnAuIGJ5IHJlZi4gKGxp +bWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNV +BAMTKkVudHJ1c3QubmV0IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQx +NzUwNTFaFw0yOTA3MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3 +d3d3LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTEl +MCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEGA1UEAxMqRW50cnVzdC5u +ZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgpMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEArU1LqRKGsuqjIAcVFmQqK0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOL +Gp18EzoOH1u3Hs/lJBQesYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSr +hRSGlVuXMlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVTXTzW +nLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/HoZdenoVve8AjhUi +VBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH4QIDAQABo0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJ +KoZIhvcNAQEFBQADggEBADubj1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPy +T/4xmf3IDExoU8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5bu/8j72gZyxKT +J1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+bYQLCIt+jerXmCHG8+c8eS9e +nNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/ErfF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +Baltimore CyberTrust Root +========================= +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJRTESMBAGA1UE +ChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYDVQQDExlCYWx0aW1vcmUgQ3li +ZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoXDTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMC +SUUxEjAQBgNVBAoTCUJhbHRpbW9yZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFs +dGltb3JlIEN5YmVyVHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKME +uyKrmD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjrIZ3AQSsB +UnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeKmpYcqWe4PwzV9/lSEy/C +G9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSuXmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9 +XbIGevOF6uvUA65ehD5f/xXtabz5OTZydc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjpr +l3RjM71oGDHweI12v/yejl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoI +VDaGezq1BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEB +BQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT929hkTI7gQCvlYpNRh +cL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3WgxjkzSswF07r51XgdIGn9w/xZchMB5 +hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsa +Y71k5h+3zvDyny67G7fyUIhzksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9H +RCwBXbsdtTLSR9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +AddTrust External Root +====================== +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYD +VQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEw +NDgzOFowbzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRU +cnVzdCBFeHRlcm5hbCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0Eg +Um9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvtH7xsD821 ++iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9uMq/NzgtHj6RQa1wVsfw +Tz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzXmk6vBbOmcZSccbNQYArHE504B4YCqOmo +aSYYkKtMsE8jqzpPhNjfzp/haW+710LXa0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy +2xSoRcRdKn23tNbE7qzNE0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv7 +7+ldU9U0WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYDVR0P +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0Jvf6xCZU7wO94CTL +VBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRk +VHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZl +j7DYd7usQWxHYINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvCNr4TDea9Y355 +e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEXc4g/VhsxOBi0cQ+azcgOno4u +G+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5amnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +Entrust Root Certification Authority +==================================== +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMCVVMxFjAUBgNV +BAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0Lm5ldC9DUFMgaXMgaW5jb3Jw +b3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMWKGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsG +A1UEAxMkRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0 +MloXDTI2MTEyNzIwNTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMu +MTkwNwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSByZWZlcmVu +Y2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNVBAMTJEVudHJ1c3QgUm9v +dCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +ALaVtkNC+sZtKm9I35RMOVcF7sN5EUFoNu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYsz +A9u3g3s+IIRe7bJWKKf44LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOww +Cj0Yzfv9KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGIrb68 +j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi94DkZfs0Nw4pgHBN +rziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOBsDCBrTAOBgNVHQ8BAf8EBAMCAQYw +DwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAigA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1 +MzQyWjAfBgNVHSMEGDAWgBRokORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DH +hmak8fdLQ/uEvW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9tO1KzKtvn1ISM +Y/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6ZuaAGAT/3B+XxFNSRuzFVJ7yVTa +v52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTS +W3iDVuycNsMm4hH2Z0kdkquM++v/eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0 +tHuu2guQOHXvgR1m0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +GeoTrust Global CA +================== +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9iYWwgQ0EwHhcNMDIwNTIxMDQw +MDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5j +LjEbMBkGA1UEAxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjo +BbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDviS2Aelet +8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU1XupGc1V3sjs0l44U+Vc +T4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagU +vTLrGAMoUgRx5aszPeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVk +DBF9qn1luMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKInZ57Q +zxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfStQWVYrmm3ok9Nns4 +d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcFPseKUgzbFbS9bZvlxrFUaKnjaZC2 +mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Unhw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6p +XE0zX5IJL4hmXXeXxx12E6nV5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvm +Mw== +-----END CERTIFICATE----- + +GeoTrust Universal CA +===================== +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN +R2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVyc2FsIENBMB4XDTA0MDMwNDA1 +MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IElu +Yy4xHjAcBgNVBAMTFUdlb1RydXN0IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAKYVVaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9t +JPi8cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTTQjOgNB0e +RXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFhF7em6fgemdtzbvQKoiFs +7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2vc7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d +8Lsrlh/eezJS/R27tQahsiFepdaVaH/wmZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7V +qnJNk22CDtucvc+081xdVHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3Cga +Rr0BHdCXteGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZf9hB +Z3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfReBi9Fi1jUIxaS5BZu +KGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+nhutxx9z3SxPGWX9f5NAEC7S8O08 +ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0 +XG0D08DYj3rWMB8GA1UdIwQYMBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIB +hjANBgkqhkiG9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fXIwjhmF7DWgh2 +qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzynANXH/KttgCJwpQzgXQQpAvvL +oJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0zuzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsK +xr2EoyNB3tZ3b4XUhRxQ4K5RirqNPnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxF +KyDuSN/n3QmOGKjaQI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2 +DFKWkoRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9ER/frslK +xfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQtDF4JbAiXfKM9fJP/P6EU +p8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/SfuvmbJxPgWp6ZKy7PtXny3YuxadIwVyQD8vI +P/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +GeoTrust Universal CA 2 +======================= +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN +R2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwHhcNMDQwMzA0 +MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3Qg +SW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0 +DE81WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUGFF+3Qs17 +j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdqXbboW0W63MOhBW9Wjo8Q +JqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxLse4YuU6W3Nx2/zu+z18DwPw76L5GG//a +QMJS9/7jOvdqdzXQ2o3rXhhqMcceujwbKNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2 +WP0+GfPtDCapkzj4T8FdIgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP +20gaXT73y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRthAAn +ZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgocQIgfksILAAX/8sgC +SqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4Lt1ZrtmhN79UNdxzMk+MBB4zsslG +8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2 ++/CfXGJx7Tz0RzgQKzAfBgNVHSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8E +BAMCAYYwDQYJKoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQL1EuxBRa3ugZ +4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgrFg5fNuH8KrUwJM/gYwx7WBr+ +mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSoag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpq +A1Ihn0CoZ1Dy81of398j9tx4TuaYT1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpg +Y+RdM4kX2TGq2tbzGDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiP +pm8m1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJVOCiNUW7d +FGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH6aLcr34YEoP9VhdBLtUp +gn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwXQMAJKOSLakhT2+zNVVXxxvjpoixMptEm +X36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +Visa eCommerce Root +=================== +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQG +EwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2Ug +QXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2 +WhcNMjIwNjI0MDAxNjEyWjBrMQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMm +VmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h2mCxlCfL +F9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4ElpF7sDPwsRROEW+1QK8b +RaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdVZqW1LS7YgFmypw23RuwhY/81q6UCzyr0 +TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI +/k4+oKsGGelT84ATB+0tvz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzs +GHxBvfaLdXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEG +MB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUFAAOCAQEAX/FBfXxc +CLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcRzCSs00Rsca4BIGsDoo8Ytyk6feUW +YFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pz +zkWKsKZJ/0x9nXGIxHYdkFsd7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBu +YQa7FkKMcPcw++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt 398znM/jra6O1I7mT1GvFpLgXPYHDw== -----END CERTIFICATE----- -# Issuer: CN=Certum CA O=Unizeto Sp. z o.o. -# Subject: CN=Certum CA O=Unizeto Sp. z o.o. -# Label: "Certum Root CA" -# Serial: 65568 -# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9 -# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18 -# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24 ------BEGIN CERTIFICATE----- -MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM -MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD -QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM -MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD -QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E -jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo -ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI -ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu -Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg -AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7 -HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA -uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa -TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg -xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q -CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x -O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs -6GAqm4VKQPNriiTsBhYscw== ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=Secure Certificate Services O=Comodo CA Limited -# Subject: CN=Secure Certificate Services O=Comodo CA Limited -# Label: "Comodo Secure Services root" -# Serial: 1 -# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd -# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 -# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 ------BEGIN CERTIFICATE----- -MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp -ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow -fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV -BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM -cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S -HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 -CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk -3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz -6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV -HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud -EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv -Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw -Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww -DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 -5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj -Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI -gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ -aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl -izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= ------END CERTIFICATE----- - -# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited -# Subject: CN=Trusted Certificate Services O=Comodo CA Limited -# Label: "Comodo Trusted Services root" -# Serial: 1 -# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 -# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd -# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 -aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla -MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO -BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD -VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW -fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt -TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL -fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW -1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 -kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G -A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v -ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo -dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu -Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ -HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 -pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS -jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ -xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn -dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA" -# Serial: 10000010 -# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0 -# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04 -# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3 ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO -TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy -MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk -ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn -ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71 -9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO -hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U -tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o -BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh -SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww -OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv -cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA -7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k -/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm -eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6 -u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy -7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR -iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw== ------END CERTIFICATE----- - -# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN DATACorp SGC Root CA" -# Serial: 91374294542884689855167577680241077609 -# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 -# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 -# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG -EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD -VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu -dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 -E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ -D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK -4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq -lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW -bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB -o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT -MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js -LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr -BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB -AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj -j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH -KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv -2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 -mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- - -# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN USERFirst Hardware Root CA" -# Serial: 91374294542884704022267039221184531197 -# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 -# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 -# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB -lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt -SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG -A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe -MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v -d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh -cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn -0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ -M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a -MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd -oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI -DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy -oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 -dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy -bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF -BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli -CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE -CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t -3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS -KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Label: "Camerfirma Chambers of Commerce Root" -# Serial: 0 -# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84 -# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1 -# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3 ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn -MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL -ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg -b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa -MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB -ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw -IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B -AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb -unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d -BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq -7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3 -0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX -roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG -A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j -aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p -26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA -BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud -EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN -BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz -aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB -AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd -p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi -1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc -XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0 -eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu -tGWaIZDgqtCYvDi1czyL+Nw= ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Label: "Camerfirma Global Chambersign Root" -# Serial: 0 -# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19 -# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9 -# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed ------BEGIN CERTIFICATE----- -MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn -MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL -ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo -YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9 -MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy -NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G -A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA -A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0 -Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s -QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV -eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795 -B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh -z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T -AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i -ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w -TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH -MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD -VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE -VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh -bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B -AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM -bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi -ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG -VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c -ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/ -AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Notary (Class A) Root" -# Serial: 259 -# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7 -# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6 -# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67 ------BEGIN CERTIFICATE----- -MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV -MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe -TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0 -dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB -KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0 -N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC -dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu -MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL -b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD -zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi -3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8 -WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY -Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi -NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC -ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4 -QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0 -YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz -aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu -IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm -ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg -ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs -amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv -IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3 -Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6 -ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1 -YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg -dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs -b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G -CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO -xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP -0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ -QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk -f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK -8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 1 -# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 -# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f -# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea ------BEGIN CERTIFICATE----- -MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE -FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j -ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js -LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM -BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 -Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy -dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh -cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh -YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg -dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp -bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ -YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT -TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ -9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 -jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW -FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz -ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 -ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L -EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu -L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq -yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC -O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V -um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh -NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root CA 1" -# Serial: 122348795730808398873664200247279986742 -# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9 -# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51 -# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e ------BEGIN CERTIFICATE----- -MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk -MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 -YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg -Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT -AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp -Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9 -m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih -FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/ -TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F -EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco -kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu -HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF -vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo -19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC -L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW -bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX -JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw -FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j -BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc -K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf -ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik -Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB -sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e -3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR -ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip -mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH -b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf -rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms -hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y -zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6 -MBr1mmz0DlP5OlvRHA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe +Comodo AAA Services root +======================== +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS +R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg +TGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAw +MFoXDTI4MTIzMTIzNTk1OVowezELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hl +c3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNV +BAMMGEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQuaBtDFcCLNSS1UY8y2bmhG +C1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe3M/vg4aijJRPn2jymJBGhCfHdr/jzDUs +i14HZGWCwEiwqJH5YZ92IFCokcdmtet4YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszW +Y19zjNoFmag4qMsXeDZRrOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjH +Ypy+g8cmez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQUoBEK +Iz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wewYDVR0f +BHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20vQUFBQ2VydGlmaWNhdGVTZXJ2aWNl +cy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29tb2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2Vz +LmNybDANBgkqhkiG9w0BAQUFAAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm +7l3sAg9g1o1QGE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2G9w84FoVxp7Z +8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsil2D4kF501KKaU73yqWjgom7C +12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +QuoVadis Root CA +================ +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJCTTEZMBcGA1UE +ChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAz +MTkxODMzMzNaFw0yMTAzMTcxODMzMzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRp +cyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQD +EyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Ypli4kVEAkOPcahdxYTMuk +J0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2DrOpm2RgbaIr1VxqYuvXtdj182d6UajtL +F8HVj71lODqV0D1VNk7feVcxKh7YWWVJWCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeL +YzcS19Dsw3sgQUSj7cugF+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWen +AScOospUxbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCCAk4w +PQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVvdmFkaXNvZmZzaG9y +ZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREwggENMIIBCQYJKwYBBAG+WAABMIH7 +MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNlIG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmlj +YXRlIGJ5IGFueSBwYXJ0eSBhc3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJs +ZSBzdGFuZGFyZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYIKwYBBQUHAgEW +Fmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3TKbkGGew5Oanwl4Rqy+/fMIGu +BgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rqy+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkw +FwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6 +tlCLMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSkfnIYj9lo +fFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf87C9TqnN7Az10buYWnuul +LsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1RcHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2x +gI4JVrmcGmD+XcHXetwReNDWXcG31a0ymQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi +5upZIof4l/UO/erMkqQWxFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi +5nrQNiOKSnQ2+Q== +-----END CERTIFICATE----- + +QuoVadis Root CA 2 +================== +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT +EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMjAeFw0wNjExMjQx +ODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQCaGMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6 +XJxgFyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55JWpzmM+Yk +lvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bBrrcCaoF6qUWD4gXmuVbB +lDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp+ARz8un+XJiM9XOva7R+zdRcAitMOeGy +lZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt +66/3FsvbzSUr5R/7mp/iUcw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1Jdxn +wQ5hYIizPtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og/zOh +D7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UHoycR7hYQe7xFSkyy +BNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuIyV77zGHcizN300QyNQliBJIWENie +J0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1Ud +DgQWBBQahGK8SEwzJQTU7tD2A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGU +a6FJpEcwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2fBluornFdLwUv +Z+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzng/iN/Ae42l9NLmeyhP3ZRPx3 +UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2BlfF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodm +VjB3pjd4M1IQWK4/YY7yarHvGH5KWWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK ++JDSV6IZUaUtl0HaB0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrW +IozchLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPRTUIZ3Ph1 +WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWDmbA4CD/pXvk1B+TJYm5X +f6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0ZohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II +4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8 +VCLAAVBpQ570su9t+Oza8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +QuoVadis Root CA 3 +================== +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT +EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMzAeFw0wNjExMjQx +OTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDMV0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNgg +DhoB4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUrH556VOij +KTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd8lyyBTNvijbO0BNO/79K +DDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9CabwvvWhDFlaJKjdhkf2mrk7AyxRllDdLkgbv +BNDInIjbC3uBr7E9KsRlOni27tyAsdLTmZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwp +p5ijJUMv7/FfJuGITfhebtfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8 +nT8KKdjcT5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDtWAEX +MJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZc6tsgLjoC2SToJyM +Gf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A4iLItLRkT9a6fUg+qGkM17uGcclz +uD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYDVR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHT +BgkrBgEEAb5YAAMwgcUwgZMGCCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmlj +YXRlIGNvbnN0aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVudC4wLQYIKwYB +BQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2NwczALBgNVHQ8EBAMCAQYwHQYD +VR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4GA1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4 +ywLQoUmkRzBFMQswCQYDVQQGEwJCTTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UE +AxMSUXVvVmFkaXMgUm9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZV +qyM07ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSemd1o417+s +hvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd+LJ2w/w4E6oM3kJpK27z +POuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2 +Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadNt54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp +8kokUvd0/bpO5qgdAm6xDYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBC +bjPsMZ57k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6szHXu +g/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0jWy10QJLZYxkNc91p +vGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeTmJlglFwjz1onl14LBQaTNx47aTbr +qZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +Security Communication Root CA +============================== +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP +U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw +HhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP +U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw +8yl89f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJDKaVv0uM +DPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9Ms+k2Y7CI9eNqPPYJayX +5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/NQV3Is00qVUarH9oe4kA92819uZKAnDfd +DJZkndwi92SL32HeFZRSFaB9UslLqCHJxrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2 +JChzAgMBAAGjPzA9MB0GA1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYw +DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vGkl3g +0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfrUj94nK9NrvjVT8+a +mCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5Bw+SUEmK3TGXX8npN6o7WWWXlDLJ +s58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJUJRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ +6rBK+1YWc26sTfcioU+tHXotRSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAi +FL39vmwLAw== +-----END CERTIFICATE----- + +Sonera Class 2 Root CA +====================== +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEPMA0GA1UEChMG +U29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAxMDQwNjA3Mjk0MFoXDTIxMDQw +NjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNVBAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJh +IENsYXNzMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3 +/Ei9vX+ALTU74W+oZ6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybT +dXnt5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s3TmVToMG +f+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2EjvOr7nQKV0ba5cTppCD8P +tOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu8nYybieDwnPz3BjotJPqdURrBGAgcVeH +nfO+oJAjPYok4doh28MCAwEAAaMzMDEwDwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITT +XjwwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt +0jSv9zilzqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/3DEI +cbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvDFNr450kkkdAdavph +Oe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6Tk6ezAyNlNzZRZxe7EJQY670XcSx +EtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLH +llpwrN9M +-----END CERTIFICATE----- + +XRamp Global CA Root +==================== +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UE +BhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2Vj +dXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwHhcNMDQxMTAxMTcxNDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMx +HjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkg +U2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS638eMpSe2OAtp87ZOqCwu +IR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCPKZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMx +foArtYzAQDsRhtDLooY2YKTVMIJt2W7QDxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FE +zG+gSqmUsE3a56k0enI4qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqs +AxcZZPRaJSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNViPvry +xS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASsjVy16bYbMDYGA1UdHwQvMC0wK6Ap +oCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMC +AQEwDQYJKoZIhvcNAQEFBQADggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc +/Kh4ZzXxHfARvbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLaIR9NmXmd4c8n +nxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSyi6mx5O+aGtA9aZnuqCij4Tyz +8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQO+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +Go Daddy Class 2 CA +=================== +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMY +VGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkG +A1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g +RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQAD +ggENADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv +2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+qN1j3hybX2C32 +qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiOr18SPaAIBQi2XKVlOARFmR6j +YGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmY +vLEHZ6IVDd2gWMZEewo+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0O +BBYEFNLEsNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h/t2o +atTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMu +MTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwG +A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wim +PQoZ+YeAEW5p5JYXMP80kWNyOO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKt +I3lpjbi2Tc7PTMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mERdEr/VxqHD3VI +Ls9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5CufReYNnyicsbkqWletNw+vHX/b +vZ8= +-----END CERTIFICATE----- + +Starfield Class 2 CA +==================== +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzElMCMGA1UEChMc +U3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZpZWxkIENsYXNzIDIg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQwNjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBo +MQswCQYDVQQGEwJVUzElMCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAG +A1UECxMpU3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqG +SIb3DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf8MOh2tTY +bitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN+lq2cwQlZut3f+dZxkqZ +JRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVm +epsZGD3/cVE8MC5fvj13c7JdBmzDI1aaK4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSN +F4Azbl5KXZnJHoe0nRrA1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HF +MIHCMB0GA1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fRzt0f +hvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNo +bm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBDbGFzcyAyIENlcnRpZmljYXRpb24g +QXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGs +afPzWdqbAYcaT1epoXkJKtv3L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLM +PUxA2IGvd56Deruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynpVSJYACPq4xJD +KVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEYWQPJIrSPnNVeKtelttQKbfi3 +QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +Taiwan GRCA +=========== +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/MQswCQYDVQQG +EwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4X +DTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1owPzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dv +dmVybm1lbnQgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qN +w8XRIePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1qgQdW8or5 +BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKyyhwOeYHWtXBiCAEuTk8O +1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAtsF/tnyMKtsc2AtJfcdgEWFelq16TheEfO +htX7MfP6Mb40qij7cEwdScevLJ1tZqa2jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wov +J5pGfaENda1UhhXcSTvxls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7 +Q3hub/FCVGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHKYS1t +B6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoHEgKXTiCQ8P8NHuJB +O9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThNXo+EHWbNxWCWtFJaBYmOlXqYwZE8 +lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1UdDgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNV +HRMEBTADAQH/MDkGBGcqBwAEMTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg2 +09yewDL7MTqKUWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyfqzvS/3WXy6Tj +Zwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaKZEk9GhiHkASfQlK3T8v+R0F2 +Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFEJPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlU +D7gsL0u8qV1bYH+Mh6XgUmMqvtg7hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6Qz +DxARvBMB1uUO07+1EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+Hbk +Z6MmnD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WXudpVBrkk +7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44VbnzssQwmSNOXfJIoRIM3BKQ +CZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDeLMDDav7v3Aun+kbfYNucpllQdSNpc5Oy ++fwC00fmcc4QAu4njIT/rEUNE1yDMuAlpYYsfPQS +-----END CERTIFICATE----- + +DigiCert Assured ID Root CA +=========================== +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQw +IgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzEx +MTEwMDAwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQL +ExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7cJpSIqvTO +9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYPmDI2dsze3Tyoou9q+yHy +UmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW +/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpy +oeb6pNnVFzF1roV9Iq4/AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whf +GHdPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRF +66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYunpyGd823IDzANBgkq +hkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRCdWKuh+vy1dneVrOfzM4UKLkNl2Bc +EkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTffwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38Fn +SbNd67IJKusm7Xi+fT8r87cmNW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i +8b5QZ7dsvfPxH2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe +o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== -----END CERTIFICATE----- -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +DigiCert Global Root CA +======================= +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAw +MDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 +dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOn +TjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5 +BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H +4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y +7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQAB +o2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm +8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEF +BQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmr +EbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIt +tep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886 +UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= -----END CERTIFICATE----- -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +DigiCert High Assurance EV Root CA +================================== +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBsMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSsw +KQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5jZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAw +MFoXDTMxMTExMDAwMDAwMFowbDELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZ +MBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFu +Y2UgRVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm+9S75S0t +Mqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTWPNt0OKRKzE0lgvdKpVMS +OO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEMxChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3 +MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFBIk5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQ +NAQTXKFx01p8VdteZOE3hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUe +h10aUAsgEsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMB +Af8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSY +JhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3NecnzyIZgYIVyHbIUf4KmeqvxgydkAQ +V8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6zeM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFp +myPInngiK3BD41VHMWEZ71jFhS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkK +mNEVX58Svnw2Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep+OkuE6N36B9K +-----END CERTIFICATE----- + +Certplus Class 2 Primary CA +=========================== +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAwPTELMAkGA1UE +BhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFzcyAyIFByaW1hcnkgQ0EwHhcN +OTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2Vy +dHBsdXMxGzAZBgNVBAMTEkNsYXNzIDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBANxQltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR +5aiRVhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyLkcAbmXuZ +Vg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCdEgETjdyAYveVqUSISnFO +YFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yasH7WLO7dDWWuwJKZtkIvEcupdM5i3y95e +e++U8Rs+yskhwcWYAqqi9lt3m/V+llU0HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRME +CDAGAQH/AgEKMAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJ +YIZIAYb4QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMuY29t +L0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/AN9WM2K191EBkOvD +P9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8yfFC82x/xXp8HVGIutIKPidd3i1R +TtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMRFcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+ +7UCmnYR0ObncHoUW2ikbhiMAybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW +//1IMwrh3KWBkJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 l7+ijrRU -----END CERTIFICATE----- -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES -# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES -# Label: "DST ACES CA X6" -# Serial: 17771143917277623872238992636097467865 -# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8 -# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d -# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40 ------BEGIN CERTIFICATE----- -MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx -ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w -MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD -VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx -FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu -ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7 -gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH -fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a -ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT -ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk -c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto -dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt -aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI -hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk -QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/ -h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq -nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR -rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2 -9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis= ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Label: "TURKTRUST Certificate Services Provider Root 1" -# Serial: 1 -# MD5 Fingerprint: f1:6a:22:18:c9:cd:df:ce:82:1d:1d:b7:78:5c:a9:a5 -# SHA1 Fingerprint: 79:98:a3:08:e1:4d:65:85:e6:c2:1e:15:3a:71:9f:ba:5a:d3:4a:d9 -# SHA256 Fingerprint: 44:04:e3:3b:5e:14:0d:cf:99:80:51:fd:fc:80:28:c7:c8:16:15:c5:ee:73:7b:11:1b:58:82:33:a9:b5:35:a0 ------BEGIN CERTIFICATE----- -MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGDAJUUjEPMA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykg -MjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 -dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMxMDI3MTdaFw0xNTAz -MjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2Vy -dGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYD -VQQHDAZBTktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kg -xLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEu -xZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7 -XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GXyGl8hMW0kWxsE2qkVa2k -heiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8iSi9BB35J -YbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5C -urKZ8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1 -JuTm5Rh8i27fbMx4W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51 -b0dewQIDAQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV -9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46sWrv7/hg0Uw2ZkUd82YCdAR7 -kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxEq8Sn5RTOPEFh -fEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy -B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdA -aLX/7KfS0zgYnNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKS -RGQDJereW26fyfJOrN3H ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 -# Label: "TURKTRUST Certificate Services Provider Root 2" -# Serial: 1 -# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00 -# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7 -# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6 ------BEGIN CERTIFICATE----- -MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS -S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg -SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3 -WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv -bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU -UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw -bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe -LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef -J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh -R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ -Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX -JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p -zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S -Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ -KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq -ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4 -Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz -gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH -uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS -y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI= ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +DST Root CA X3 +============== +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/MSQwIgYDVQQK +ExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMTDkRTVCBSb290IENBIFgzMB4X +DTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVowPzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1 +cmUgVHJ1c3QgQ28uMRcwFQYDVQQDEw5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmT +rE4Orz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEqOLl5CjH9 +UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9bxiqKqy69cK3FCxolkHRy +xXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40d +utolucbY38EVAjqr2m7xPi71XAicPNaDaeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQ +MA0GCSqGSIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69ikug +dB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXrAvHRAosZy5Q6XkjE +GB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZzR8srzJmwN0jP41ZL9c8PDHIyh8bw +RLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubS +fZGL+T0yjWW06XyxV3bqxbYoOb8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +SwissSign Gold CA - G2 +====================== +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkNIMRUw +EwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2lnbiBHb2xkIENBIC0gRzIwHhcN +MDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBFMQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dp +c3NTaWduIEFHMR8wHQYDVQQDExZTd2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUq +t2/876LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+bbqBHH5C +jCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c6bM8K8vzARO/Ws/BtQpg +vd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqEemA8atufK+ze3gE/bk3lUIbLtK/tREDF +ylqM2tIrfKjuvqblCqoOpd8FUrdVxyJdMmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvR +AiTysybUa9oEVeXBCsdtMDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuend +jIj3o02yMszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69yFGkO +peUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPiaG59je883WX0XaxR +7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxMgI93e2CaHt+28kgeDrpOVG2Y4OGi +GqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUWyV7lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64 +OfPAeGZe6Drn8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe645R88a7A3hfm +5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczOUYrHUDFu4Up+GC9pWbY9ZIEr +44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOf +Mke6UiI0HTJ6CVanfCU2qT1L2sCCbwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6m +Gu6uLftIdxf+u+yvGPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxp +mo/a77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCChdiDyyJk +vC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid392qgQmwLOM7XdVAyksLf +KzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEppLd6leNcG2mqeSz53OiATIgHQv2ieY2Br +NU0LbbqhPcCT4H8js1WtciVORvnSFu+wZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6Lqj +viOvrv1vA+ACOzB2+httQc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +SwissSign Silver CA - G2 +======================== +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCQ0gxFTAT +BgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMB4X +DTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0NlowRzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3 +aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG +9w0BAQEFAAOCAg8AMIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644 +N0MvFz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7brYT7QbNHm ++/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieFnbAVlDLaYQ1HTWBCrpJH +6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH6ATK72oxh9TAtvmUcXtnZLi2kUpCe2Uu +MGoM9ZDulebyzYLs2aFK7PayS+VFheZteJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5h +qAaEuSh6XzjZG6k4sIN/c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5 +FZGkECwJMoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRHHTBs +ROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTfjNFusB3hB48IHpmc +celM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb65i/4z3GcRm25xBWNOHkDRUjvxF3X +CO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUF6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRB +tjpbO8tFnb0cwpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBAHPGgeAn0i0P +4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShpWJHckRE1qTodvBqlYJ7YH39F +kWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L +3XWgwF15kIwb4FDm3jH+mHtwX6WQ2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx +/uNncqCxv1yL5PqZIseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFa +DGi8aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2Xem1ZqSqP +e97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQRdAtq/gsD/KNVV4n+Ssuu +WxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJ +DIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ub +DgEj8Z+7fNzcbBGXJbLytGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority +======================================== +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMoR2VvVHJ1c3QgUHJpbWFyeSBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgx +CzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQ +cmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9AWbK7hWN +b6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjAZIVcFU2Ix7e64HXprQU9 +nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE07e9GceBrAqg1cmuXm2bgyxx5X9gaBGge +RwLmnWDiNpcB3841kt++Z8dtd1k7j53WkBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGt +tm/81w7a4DSwDRp35+MImO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJKoZI +hvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ16CePbJC/kRYkRj5K +Ts4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl4b7UVXGYNTq+k+qurUKykG/g/CFN +NWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6KoKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHa +Floxt/m0cYASSJlyc1pZU8FjUjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG +1riR/aYNKxoUAT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +thawte Primary Root CA +====================== +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCBqTELMAkGA1UE +BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 +aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3 +MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwg +SW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMv +KGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMT +FnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCs +oPD7gFnUnMekz52hWXMJEEUMDSxuaPFsW0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ +1CRfBsDMRJSUjQJib+ta3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGc +q/gcfomk6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6Sk/K +aAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94JNqR32HuHUETVPm4p +afs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XPr87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUF +AAOCAQEAeRHAS7ORtvzw6WfUDW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeE +uzLlQRHAd9mzYJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2/qxAeeWsEG89 +jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/LHbTY5xZ3Y+m4Q6gLkH3LpVH +z7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7jVaMaA== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G5 +============================================================ +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCByjELMAkGA1UE +BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO +ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk +IHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2ln +biBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBh +dXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQCvJAgIKXo1nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKz +j/i5Vbext0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIzSdhD +Y2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQGBO+QueQA5N06tRn/ +Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+rCpSx4/VBEnkjWNHiDxpg8v+R70r +fk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/ +BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2Uv +Z2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKvMzEzMA0GCSqG +SIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzEp6B4Eq1iDkVwZMXnl2YtmAl+ +X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKE +KQsTb47bDN0lAtukixlE0kF6BWlKWE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiC +Km0oHw0LxOXnGiYZ4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vE +ZV8NhnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +SecureTrust CA +============== +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBIMQswCQYDVQQG +EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xFzAVBgNVBAMTDlNlY3VyZVRy +dXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIzMTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAe +BgNVBAoTF1NlY3VyZVRydXN0IENvcnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQX +OZEzZum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO0gMdA+9t +DWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIaowW8xQmxSPmjL8xk037uH +GFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b +01k/unK8RCSc43Oz969XL0Imnal0ugBS8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmH +ursCAwEAAaOBnTCBmjATBgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCegJYYj +aHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ +KoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt36Z3q059c4EVlew3KW+JwULKUBRSu +SceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHf +mbx8IVQr5Fiiu1cprp6poxkmD5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZ +nMUFdAvnZyPSCPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR 3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= -----END CERTIFICATE----- -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +Secure Global CA +================ +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQG +EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBH +bG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkxMjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEg +MB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwg +Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jx +YDiJiQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa/FHtaMbQ +bqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJjnIFHovdRIWCQtBJwB1g +8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnIHmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYV +HDGA76oYa8J719rO+TMg1fW9ajMtgQT7sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi +0XPnj3pDAgMBAAGjgZ0wgZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCswKaAn +oCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsGAQQBgjcVAQQDAgEA +MA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0LURYD7xh8yOOvaliTFGCRsoTciE6+ +OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXOH0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cn +CDpOGR86p1hcF895P4vkp9MmI50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/5 +3CYNv6ZHdAbYiNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW -----END CERTIFICATE----- -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA -# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA -# Label: "WellsSecure Public Root Certificate Authority" -# Serial: 1 -# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36 -# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee -# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43 ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx -IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs -cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v -dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0 -MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl -bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD -DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r -WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU -Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs -HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj -z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf -SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl -AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG -KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P -AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j -BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC -VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX -ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg -Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB -ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd -/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB -A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn -k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9 -iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv -2G0xffX8oRAHh84vWdw+WNs= ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI -# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI -# Label: "IGC/A" -# Serial: 245102874772 -# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37 -# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c -# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32 ------BEGIN CERTIFICATE----- -MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT -AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ -TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG -9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw -MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM -BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO -MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2 -LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI -s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2 -xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4 -u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b -F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx -Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd -PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV -HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx -NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF -AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ -L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY -YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg -Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a -NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R -0982gaEbeC9xs/FZTEYYKKuF0mBWWg== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 -# Label: "Security Communication EV RootCA1" -# Serial: 0 -# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3 -# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d -# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37 ------BEGIN CERTIFICATE----- -MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz -MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N -IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11 -bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE -RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO -zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5 -bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF -MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1 -VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC -OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G -CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW -tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ -q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb -EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+ -Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O -VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA -# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA -# Label: "Microsec e-Szigno Root CA" -# Serial: 272122594155480254301341951808045322001 -# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5 -# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d -# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0 ------BEGIN CERTIFICATE----- -MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw -cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy -b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z -ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4 -NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN -TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p -Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u -uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+ -LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA -vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770 -Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx -62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB -AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw -LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP -BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB -AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov -MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5 -ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn -AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT -AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh -ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo -AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa -AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln -bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p -Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP -PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv -Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB -EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu -w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj -cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV -HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI -VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS -BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS -b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS -8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds -ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl -7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a -86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR -hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/ -MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +COMODO Certification Authority +============================== +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UE +BhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgG +A1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNVBAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTAeFw0wNjEyMDEwMDAwMDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEb +MBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFD +T01PRE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3UcEbVASY06m/weaKXTuH ++7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI2GqGd0S7WWaXUF601CxwRM/aN5VCaTww +xHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV +4EajcNxo2f8ESIl33rXp+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA +1KGzqSX+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5OnKVI +rLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW/zAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6gPKA6hjhodHRwOi8vY3JsLmNvbW9k +b2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOC +AQEAPpiem/Yb6dc5t3iuHXIYSdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CP +OGEIqB6BCsAvIC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4zJVSk/BwJVmc +IGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5ddBA6+C4OmF4O5MBKgxTMVBbkN ++8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IBZQ== +-----END CERTIFICATE----- + +Network Solutions Certificate Authority +======================================= +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQG +EwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydOZXR3b3Jr +IFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMx +MjM1OTU5WjBiMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwzc7MEL7xx +jOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPPOCwGJgl6cvf6UDL4wpPT +aaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rlmGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXT +crA/vGp97Eh/jcOrqnErU2lBUzS1sLnFBgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc +/Qzpf14Dl847ABSHJ3A4qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMB +AAGjgZcwgZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwubmV0c29sc3NsLmNv +bS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3JpdHkuY3JsMA0GCSqGSIb3DQEBBQUA +A4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc86fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q +4LqILPxFzBiwmZVRDuwduIj/h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/ +GGUsyfJj4akH/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHNpGxlaKFJdlxD +ydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +COMODO ECC Certification Authority +================================== +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTELMAkGA1UEBhMC +R0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UE +ChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwHhcNMDgwMzA2MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0Ix +GzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSRFtSrYpn1PlILBs5BAH+X +4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0JcfRK9ChQtP6IHG4/bC8vCVlbpVsLM5ni +wz2J+Wos77LTBumjQjBAMB0GA1UdDgQWBBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VG +FAkK+qDmfQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdvGDeA +U/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +OISTE WISeKey Global Root GA CA +=============================== +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UE +BhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHlyaWdodCAoYykgMjAwNTEiMCAG +A1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBH +bG9iYWwgUm9vdCBHQSBDQTAeFw0wNTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYD +VQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIw +IAYDVQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5 +IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy0+zAJs9 +Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxRVVuuk+g3/ytr6dTqvirdqFEr12bDYVxg +Asj1znJ7O7jyTmUIms2kahnBAbtzptf2w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbD +d50kc3vkDIzh2TbhmYsFmQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ +/yxViJGg4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t94B3R +LoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ +KoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOxSPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vIm +MMkQyh2I+3QZH4VFvbBsUfk2ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4 ++vg1YFkCExh8vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZiFj4A4xylNoEY +okxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ/L7fCg0= +-----END CERTIFICATE----- + +Certigna +======== +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNVBAYTAkZSMRIw +EAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4XDTA3MDYyOTE1MTMwNVoXDTI3 +MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwI +Q2VydGlnbmEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7q +XOEm7RFHYeGifBZ4QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyH +GxnygQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbwzBfsV1/p +ogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q130yGLMLLGq/jj8UEYkg +DncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKf +Irjxwo1p3Po6WAbfAgMBAAGjgbwwgbkwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQ +tCRZvgHyUtVF9lo53BEwZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJ +BgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzjAQ/J +SP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG9w0BAQUFAAOCAQEA +hQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8hbV6lUmPOEvjvKtpv6zf+EwLHyzs+ +ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFncfca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1klu +PBS1xp81HlDQwY9qcEQCYsuuHWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY +1gkIl2PlwS6wt0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Label: "TC TrustCenter Class 2 CA II" -# Serial: 941389028203453866782103406992443 -# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 -# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e -# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf -tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg -uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J -XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK -8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 -5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 -kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS -GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt -ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 -au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV -hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI -dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA I" -# Serial: 601024842042189035295619584734726 -# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c -# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 -# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx -MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg -R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD -VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR -JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T -fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu -jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z -wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ -fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD -VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G -CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 -7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn -8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs -ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ -2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Deutsche Telekom Root CA 2 +========================== +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMT +RGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEG +A1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENBIDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5 +MjM1OTAwWjBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0G +A1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBS +b290IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEUha88EOQ5 +bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhCQN/Po7qCWWqSG6wcmtoI +KyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1MjwrrFDa1sPeg5TKqAyZMg4ISFZbavva4VhY +AUlfckE8FQYBjl2tqriTtM2e66foai1SNNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aK +Se5TBY8ZTNXeWHmb0mocQqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTV +jlsB9WoHtxa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAPBgNV +HRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAlGRZrTlk5ynr +E/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756AbrsptJh6sTtU6zkXR34ajgv8HzFZMQSy +zhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpaIzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8 +rZ7/gFnkm0W09juwzTkZmDLl6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4G +dyd1Lx+4ivn+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU Cm26OWMohpLzGITY+9HPBVZkVw== -----END CERTIFICATE----- -# Issuer: CN=ComSign Secured CA O=ComSign -# Subject: CN=ComSign Secured CA O=ComSign -# Label: "ComSign Secured CA" -# Serial: 264725503855295744117309814499492384489 -# MD5 Fingerprint: 40:01:25:06:8d:21:43:6a:0e:43:00:9c:e7:43:f3:d5 -# SHA1 Fingerprint: f9:cd:0e:2c:da:76:24:c1:8f:bd:f0:f0:ab:b6:45:b8:f7:fe:d5:7a -# SHA256 Fingerprint: 50:79:41:c7:44:60:a0:b4:70:86:22:0d:4e:99:32:57:2a:b5:d1:b5:bb:cb:89:80:ab:1c:b1:76:51:a8:44:d2 ------BEGIN CERTIFICATE----- -MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAw -PDEbMBkGA1UEAxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWdu -MQswCQYDVQQGEwJJTDAeFw0wNDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwx -GzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBDQTEQMA4GA1UEChMHQ29tU2lnbjEL -MAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGtWhf -HZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs49oh -gHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sW -v+bznkqH7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ue -Mv5WJDmyVIRD9YTC2LxBkMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr -9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d19guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt -6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUwAwEB/zBEBgNVHR8EPTA7 -MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29tU2lnblNl -Y3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58 -ADsAj8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkq -hkiG9w0BAQUFAAOCAQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7p -iL1DRYHjZiM/EoZNGeQFsOY3wo3aBijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtC -dsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtpFhpFfTMDZflScZAmlaxMDPWL -kz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP51qJThRv4zdL -hfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz -OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +Cybertrust Global Root +====================== +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYGA1UEChMPQ3li +ZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBSb290MB4XDTA2MTIxNTA4 +MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQD +ExZDeWJlcnRydXN0IEdsb2JhbCBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA ++Mi8vRRQZhP/8NN57CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW +0ozSJ8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2yHLtgwEZL +AfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iPt3sMpTjr3kfb1V05/Iin +89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNzFtApD0mpSPCzqrdsxacwOUBdrsTiXSZT +8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAYXSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2 +MDSgMqAwhi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3JsMB8G +A1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUAA4IBAQBW7wojoFRO +lZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMjWqd8BfP9IjsO0QbE2zZMcwSO5bAi +5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUxXOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2 +hO0j9n0Hq0V+09+zv+mKts2oomcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+T +X3EJIrduPuocA06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW WL1WMRJOEcgh4LMRkWXbtKaIOM5V -----END CERTIFICATE----- -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi -# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi -# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3" -# Serial: 17 -# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26 -# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96 -# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a ------BEGIN CERTIFICATE----- -MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS -MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp -bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw -VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy -YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy -dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2 -ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe -Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx -GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls -aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU -QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh -xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0 -aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr -IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB -IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h -gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK -O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO -fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw -lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL -hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID -AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP -NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t -wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM -7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh -gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n -oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs -yZyQ2uypQjyttgI= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327 -# Label: "Buypass Class 2 CA 1" -# Serial: 1 -# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23 -# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc -# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38 ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg -Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL -MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD -VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0 -ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX -l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB -HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B -5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3 -WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD -AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP -gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+ -DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu -BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs -h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk -LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Label: "Buypass Class 3 CA 1" -# Serial: 2 -# MD5 Fingerprint: df:3c:73:59:81:e7:39:50:81:04:4c:34:a2:cb:b3:7b -# SHA1 Fingerprint: 61:57:3a:11:df:0e:d8:7e:d5:92:65:22:ea:d0:56:d7:44:b3:23:71 -# SHA256 Fingerprint: b7:b1:2b:17:1f:82:1d:aa:99:0c:d0:fe:50:87:b1:28:44:8b:a8:e5:18:4f:84:c5:1e:02:b5:c8:fb:96:2b:24 ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg -Q2xhc3MgMyBDQSAxMB4XDTA1MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzEL -MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD -VQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKxifZg -isRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//z -NIqeKNc0n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI -+MkcVyzwPX6UvCWThOiaAJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2R -hzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+ -mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0PAQH/BAQD -AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFP -Bdy7pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27s -EzNxZy5p+qksP2bAEllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2 -mSlf56oBzKwzqBwKu5HEA6BvtjT5htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yC -e/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQjel/wroQk5PMr+4okoyeYZdow -dXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915 ------END CERTIFICATE----- - -# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1" -# Serial: 5525761995591021570 -# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37 -# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58 -# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2 ------BEGIN CERTIFICATE----- -MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV -BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt -ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4 -MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg -SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl -a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h -4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk -tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s -tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL -dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4 -c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um -TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z -+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O -Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW -OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW -fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2 -l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB -/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw -FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+ -8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI -6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO -TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME -wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY -Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn -xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q -DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q -Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t -hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4 -7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7 -QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=CNNIC ROOT O=CNNIC -# Subject: CN=CNNIC ROOT O=CNNIC -# Label: "CNNIC ROOT" -# Serial: 1228079105 -# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19 -# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f -# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7 ------BEGIN CERTIFICATE----- -MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD -TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2 -MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF -Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB -DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh -IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6 -dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO -V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC -GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN -v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB -AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB -Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO -76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK -OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH -ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi -yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL -buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj -2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE= ------END CERTIFICATE----- - -# Issuer: O=Japanese Government OU=ApplicationCA -# Subject: O=Japanese Government OU=ApplicationCA -# Label: "ApplicationCA - Japanese Government" -# Serial: 49 -# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6 -# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74 -# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19 ------BEGIN CERTIFICATE----- -MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc -MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp -b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT -AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs -aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H -j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K -f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55 -IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw -FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht -QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm -/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ -k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ -MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC -seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ -hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+ -eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U -DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj -B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL -rosot4LKGAfmt1t06SAZf7IbiVQ= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Label: "NetLock Arany (Class Gold) Főtanúsítvány" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=CA Disig O=Disig a.s. -# Subject: CN=CA Disig O=Disig a.s. -# Label: "CA Disig" -# Serial: 1 -# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6 -# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41 -# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET -MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE -AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw -CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg -YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE -Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX -mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD -XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW -S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp -FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD -AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu -ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z -ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv -Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw -DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6 -yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq -EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/ -CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB -EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN -PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag= ------END CERTIFICATE----- - -# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Label: "Juur-SK" -# Serial: 999181308 -# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55 -# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89 -# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39 ------BEGIN CERTIFICATE----- -MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN -AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp -dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw -MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw -CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ -MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB -SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz -ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH -LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP -PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL -2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w -ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC -MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk -AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0 -AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz -AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz -AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f -BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE -FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY -P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi -CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g -kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95 -HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS -na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q -qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z -TbvGRNs2yyqcjg== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI -# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI -# Label: "ACEDICOM Root" -# Serial: 7029493972724711941 -# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6 -# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84 -# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a ------BEGIN CERTIFICATE----- -MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE -AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x -CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW -MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF -RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC -AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7 -09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7 -XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P -Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK -t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb -X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28 -MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU -fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI -2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH -K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae -ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP -BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ -MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw -RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv -bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm -fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3 -gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe -I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i -5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi -ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn -MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ -o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6 -zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN -GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt -r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK -Z05phkOTOPu220+DkdRgfks+KzgHVZhepA== ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +ePKI Root Certification Authority +================================= +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQG +EwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0ZC4xKjAoBgNVBAsMIWVQS0kg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMx +MjdaMF4xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEq +MCgGA1UECwwhZVBLSSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAHSyZbCUNs +IZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAhijHyl3SJCRImHJ7K2RKi +lTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3XDZoTM1PRYfl61dd4s5oz9wCGzh1NlDiv +qOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX +12ruOzjjK9SXDrkb5wdJfzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0O +WQqraffAsgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uUWH1+ +ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLSnT0IFaUQAS2zMnao +lQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pHdmX2Os+PYhcZewoozRrSgx4hxyy/ +vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJipNiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXi +Zo1jDiVN1Rmy5nk3pyKdVDECAwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/Qkqi +MAwGA1UdEwQFMAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGBuvl2ICO1J2B0 +1GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6YlPwZpVnPDimZI+ymBV3QGypzq +KOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkPJXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdV +xrsStZf0X4OFunHB2WyBEXYKCrC/gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEP +NXubrjlpC2JgQCA2j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+r +GNm65ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUBo2M3IUxE +xJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS/jQ6fbjpKdx2qcgw+BRx +gMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2zGp1iro2C6pSe3VkQw63d4k3jMdXH7Ojy +sP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTEW9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmOD +BCEIZ43ygknQW/2xzQ+DhNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +certSIGN ROOT CA +================ +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYTAlJPMREwDwYD +VQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTAeFw0wNjA3MDQxNzIwMDRa +Fw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UE +CxMQY2VydFNJR04gUk9PVCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7I +JUqOtdu0KBuqV5Do0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHH +rfAQUySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5dRdY4zTW2 +ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQOA7+j0xbm0bqQfWwCHTD +0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwvJoIQ4uNllAoEwF73XVv4EOLQunpL+943 +AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8B +Af8EBAMCAcYwHQYDVR0OBBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IB +AQA+0hyJLjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecYMnQ8 +SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ44gx+FkagQnIl6Z0 +x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6IJd1hJyMctTEHBDa0GpC9oHRxUIlt +vBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNwi/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7Nz +TogVZ96edhBiIL5VaZVDADlN9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority - G3 +============================================= +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA4IEdlb1RydXN0 +IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFy +eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIz +NTk1OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAo +YykgMjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMT +LUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5j +K/BGvESyiaHAKAxJcCGVn2TAppMSAmUmhsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdE +c5IiaacDiGydY8hS2pgn5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3C +IShwiP/WJmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exALDmKu +dlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZChuOl1UcCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMR5yo6hTgMdHNxr +2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IBAQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9 +cr5HqQ6XErhK8WTTOd8lNNTBzU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbE +Ap7aDHdlDkQNkv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUHSJsMC8tJP33s +t/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2Gspki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +thawte Primary Root CA - G2 +=========================== +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDELMAkGA1UEBhMC +VVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMpIDIwMDcgdGhhd3RlLCBJbmMu +IC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3Qg +Q0EgLSBHMjAeFw0wNzExMDUwMDAwMDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEV +MBMGA1UEChMMdGhhd3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBG +b3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAt +IEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/BebfowJPDQfGAFG6DAJS +LSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6papu+7qzcMBniKI11KOasf2twu8x+qi5 +8/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU +mtgAMADna3+FGO6Lts6KDPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUN +G4k8VIZ3KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41oxXZ3K +rr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +thawte Primary Root CA - G3 +=========================== +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCBrjELMAkGA1UE +BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 +aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0w +ODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9uMTgwNgYD +VQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIG +A1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAsr8nLPvb2FvdeHsbnndmgcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2At +P0LMqmsywCPLLEHd5N/8YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC ++BsUa0Lfb1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS99irY +7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2SzhkGcuYMXDhpxwTW +vGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUkOQIDAQABo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJ +KoZIhvcNAQELBQADggEBABpA2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweK +A3rD6z8KLFIWoCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7cKUGRIjxpp7sC +8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fMm7v/OeZWYdMKp8RcTGB7BXcm +er/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZuMdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority - G2 +============================================= +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA3IEdlb1RydXN0IElu +Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1 +OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwNyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMTLUdl +b1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjB2MBAGByqGSM49AgEG +BSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcLSo17VDs6bl8VAsBQps8lL33KSLjHUGMc +KiEIfJo22Av+0SbFWDEwKCXzXV2juLaltJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+ +EVXVMAoGCCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGTqQ7m +ndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBuczrD6ogRLQy7rQkgu2 +npaqBA+K +-----END CERTIFICATE----- + +VeriSign Universal Root Certification Authority +=============================================== +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCBvTELMAkGA1UE +BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO +ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk +IHVzZSBvbmx5MTgwNgYDVQQDEy9WZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9u +IEF1dGhvcml0eTAeFw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJV +UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv +cmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj +1mCOkdeQmIN65lgZOIzF9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGP +MiJhgsWHH26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+HLL72 +9fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN/BMReYTtXlT2NJ8I +AfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPTrJ9VAMf2CGqUuV/c4DPxhGD5WycR +tPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0G +CCsGAQUFBwEMBGEwX6FdoFswWTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2O +a8PPgGrUSBgsexkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4sAPmLGd75JR3 +Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+seQxIcaBlVZaDrHC1LGmWazx +Y8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTx +P/jgdFcrGJ2BtMQo2pSXpXDrrB2+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+P +wGZsY6rp2aQW9IHRlRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4 +mJO37M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G4 +============================================================ +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjELMAkGA1UEBhMC +VVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3 +b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVz +ZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBU +cnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRo +b3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5 +IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8 +Utpkmw4tXNherJI9/gHmGUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGz +rl0Bp3vefLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEw +HzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVyaXNpZ24u +Y29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMWkf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMD +A2gAMGUCMGYhDBgmYFo4e1ZC4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIx +AJw9SDkjOVgaFRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +NetLock Arany (Class Gold) Főtanúsítvány +======================================== +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQGEwJIVTERMA8G +A1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3MDUGA1UECwwuVGFuw7pzw610 +dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBB +cmFueSAoQ2xhc3MgR29sZCkgRsWRdGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgx +MjA2MTUwODIxWjCBpzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxO +ZXRMb2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNzIEdvbGQpIEbFkXRhbsO6 +c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCRec75LbRTDofTjl5Bu +0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrTlF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw +/HpYzY6b7cNGbIRwXdrzAZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAk +H3B5r9s5VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRGILdw +fzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2BJtr+UBdADTHLpl1 +neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2MU9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwW +qZw8UQCgwBEIBaeZ5m8BiFRhbvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTta +YtOUZcTh5m2C+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2FuLjbvrW5Kfna +NwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2XjG4Kvte9nHfRCaexOYNkbQu +dZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +Staat der Nederlanden Root CA - G2 +================================== +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UE +CgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4g +Um9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oXDTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMC +TkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5l +ZGVybGFuZGVuIFJvb3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ +5291qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8SpuOUfiUtn +vWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPUZ5uW6M7XxgpT0GtJlvOj +CwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvEpMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiil +e7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCR +OME4HYYEhLoaJXhena/MUGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpI +CT0ugpTNGmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy5V65 +48r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv6q012iDTiIJh8BIi +trzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEKeN5KzlW/HdXZt1bv8Hb/C3m1r737 +qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMB +AAGjgZcwgZQwDwYDVR0TAQH/BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcC +ARYxaHR0cDovL3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqGSIb3DQEBCwUA +A4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLySCZa59sCrI2AGeYwRTlHSeYAz ++51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwj +f/ST7ZwaUb7dRUG/kSS0H4zpX897IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaN +kqbG9AclVMwWVxJKgnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfk +CpYL+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxLvJxxcypF +URmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkmbEgeqmiSBeGCc1qb3Adb +CG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvkN1trSt8sV4pAWja63XVECDdCcAz+3F4h +oKOKwJCcaNpQ5kUQR3i2TtJlycM33+FCY7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoV +IPVVYpbtbZNQvOSqeK3Zywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm +66+KAQ== +-----END CERTIFICATE----- + +Hongkong Post Root CA 1 +======================= +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoT +DUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMB4XDTAzMDUx +NTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25n +IFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1 +ApzQjVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEnPzlTCeqr +auh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjhZY4bXSNmO7ilMlHIhqqh +qZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9nnV0ttgCXjqQesBCNnLsak3c78QA3xMY +V18meMjWCnl3v/evt3a5pQuEF10Q6m/hq5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNV +HRMBAf8ECDAGAQH/AgEDMA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7i +h9legYsCmEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI37pio +l7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clBoiMBdDhViw+5Lmei +IAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJsEhTkYY2sEJCehFC78JZvRZ+K88ps +T/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpOfMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilT +c4afU9hDDl3WY4JxHYB0yvbiAmvZWg== +-----END CERTIFICATE----- + +SecureSign RootCA11 +=================== +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDErMCkGA1UEChMi +SmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoGA1UEAxMTU2VjdXJlU2lnbiBS +b290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSsw +KQYDVQQKEyJKYXBhbiBDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1 +cmVTaWduIFJvb3RDQTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvL +TJszi1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8h9uuywGO +wvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOVMdrAG/LuYpmGYz+/3ZMq +g6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rP +O7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitA +bpSACW22s293bzUIUPsCh8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZX +t94wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAKCh +OBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xmKbabfSVSSUOrTC4r +bnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQX5Ucv+2rIrVls4W6ng+4reV6G4pQ +Oh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWrQbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01 +y8hSyn+B/tlr0/cR7SXf+Of5pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061 +lgeLKBObjBmNQSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +Microsec e-Szigno Root CA 2009 +============================== +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYDVQQGEwJIVTER +MA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jv +c2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTAeFw0wOTA2MTYxMTMwMThaFw0yOTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UE +BwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUt +U3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvPkd6mJviZpWNwrZuuyjNA +fW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tccbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG +0IMZfcChEhyVbUr02MelTTMuhTlAdX4UfIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKA +pxn1ntxVUwOXewdI/5n7N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm +1HxdrtbCxkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1+rUC +AwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTLD8bf +QkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAbBgNVHREE +FDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqGSIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0o +lZMEyL/azXm4Q5DwpL7v8u8hmLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfX +I/OMn74dseGkddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c2Pm2G2JwCz02 +yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5tHMN1Rq41Bab2XD0h7lbwyYIi +LXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +GlobalSign Root CA - R3 +======================= +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xv +YmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh +bFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT +aWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln +bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWt +iHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ +0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3 +rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjl +OCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2 +xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7 +lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8 +EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1E +bddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18 +YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7r +kpeDMdmztcpHWD9f +-----END CERTIFICATE----- + +Autoridad de Certificacion Firmaprofesional CIF A62634068 +========================================================= +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UEBhMCRVMxQjBA +BgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 +MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEyMzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIw +QAYDVQQDDDlBdXRvcmlkYWQgZGUgQ2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBB +NjI2MzQwNjgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDD +Utd9thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQMcas9UX4P +B99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefGL9ItWY16Ck6WaVICqjaY +7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15iNA9wBj4gGFrO93IbJWyTdBSTo3OxDqqH +ECNZXyAFGUftaI6SEspd/NYrspI8IM/hX68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyI +plD9amML9ZMWGxmPsu2bm8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctX +MbScyJCyZ/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirjaEbsX +LZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/TKI8xWVvTyQKmtFLK +bpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF6NkBiDkal4ZkQdU7hwxu+g/GvUgU +vzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVhOSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1Ud +EwEB/wQIMAYBAf8CAQEwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNH +DhpkLzCBpgYDVR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBvACAAZABlACAA +bABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBlAGwAbwBuAGEAIAAwADgAMAAx +ADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx +51tkljYyGOylMnfX40S2wBEqgLk9am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qk +R71kMrv2JYSiJ0L1ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaP +T481PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS3a/DTg4f +Jl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5kSeTy36LssUzAKh3ntLFl +osS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF3dvd6qJ2gHN99ZwExEWN57kci57q13XR +crHedUTnQn3iV2t93Jm8PYMo6oCTjcVMZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoR +saS8I8nkvof/uZS2+F0gStRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTD +KCOM/iczQ0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQBjLMi +6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +Izenpe.com +========== +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4MQswCQYDVQQG +EwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wHhcNMDcxMjEz +MTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMu +QS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ +03rKDx6sp4boFmVqscIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAK +ClaOxdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6HLmYRY2xU ++zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFXuaOKmMPsOzTFlUFpfnXC +PCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQDyCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxT +OTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbK +F7jJeodWLBoBHmy+E60QrLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK +0GqfvEyNBjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8Lhij+ +0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIBQFqNeb+Lz0vPqhbB +leStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+HMh3/1uaD7euBUbl8agW7EekFwID +AQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2luZm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+ +SVpFTlBFIFMuQS4gLSBDSUYgQTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBG +NjIgUzgxQzBBBgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0O +BBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUAA4ICAQB4pgwWSp9MiDrAyw6l +Fn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWblaQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbga +kEyrkgPH7UIBzg/YsfqikuFgba56awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8q +hT/AQKM6WfxZSzwoJNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Cs +g1lwLDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCTVyvehQP5 +aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGkLhObNA5me0mrZJfQRsN5 +nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJbUjWumDqtujWTI6cfSN01RpiyEGjkpTHC +ClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZo +Q0iy2+tzJOeRf1SktoA+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1Z +WrOZyGlsQyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +Chambers of Commerce Root - 2008 +================================ +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYDVQQGEwJFVTFD +MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv +bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu +QS4xKTAnBgNVBAMTIENoYW1iZXJzIG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEy +Mjk1MFoXDTM4MDczMTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNl +ZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQF +EwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJl +cnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW928sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKA +XuFixrYp4YFs8r/lfTJqVKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorj +h40G072QDuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR5gN/ +ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfLZEFHcpOrUMPrCXZk +NNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05aSd+pZgvMPMZ4fKecHePOjlO+Bd5g +D2vlGts/4+EhySnB8esHnFIbAURRPHsl18TlUlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331 +lubKgdaX8ZSD6e2wsWsSaR6s+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ +0wlf2eOKNcx5Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAxhduub+84Mxh2 +EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNVHQ4EFgQU+SSsD7K1+HnA+mCI +G8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJ +BgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNh +bWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENh +bWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDiC +CQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUH +AgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAJASryI1 +wqM58C7e6bXpeHxIvj99RZJe6dqxGfwWPJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH +3qLPaYRgM+gQDROpI9CF5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbU +RWpGqOt1glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaHFoI6 +M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2pSB7+R5KBWIBpih1 +YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MDxvbxrN8y8NmBGuScvfaAFPDRLLmF +9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QGtjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcK +zBIKinmwPQN/aUv0NCB9szTqjktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvG +nrDQWzilm1DefhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZd0jQ +-----END CERTIFICATE----- + +Global Chambersign Root - 2008 +============================== +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYDVQQGEwJFVTFD +MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv +bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu +QS4xJzAlBgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMx +NDBaFw0zODA3MzExMjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUg +Y3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJ +QTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDf +VtPkOpt2RbQT2//BthmLN0EYlVJH6xedKYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXf +XjaOcNFccUMd2drvXNL7G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0 +ZJJ0YPP2zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4ddPB +/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyGHoiMvvKRhI9lNNgA +TH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2Id3UwD2ln58fQ1DJu7xsepeY7s2M +H/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3VyJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfe +Ox2YItaswTXbo6Al/3K1dh3ebeksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSF +HTynyQbehP9r6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsogzCtLkykPAgMB +AAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQWBBS5CcqcHtvTbDprru1U8VuT +BjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDprru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UE +BhMCRVUxQzBBBgNVBAcTOk1hZHJpZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJm +aXJtYS5jb20vYWRkcmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJm +aXJtYSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiCCQDJzdPp +1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUHAgEWHGh0 +dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAICIf3DekijZBZRG +/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZUohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6 +ReAJ3spED8IXDneRRXozX1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/s +dZ7LoR/xfxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVza2Mg +9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yydYhz2rXzdpjEetrHH +foUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMdSqlapskD7+3056huirRXhOukP9Du +qqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9OAP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETr +P3iZ8ntxPjzxmKfFGBI/5rsoM0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVq +c5iJWzouE4gev8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z 09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B -----END CERTIFICATE----- -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +Go Daddy Root Certificate Authority - G2 +======================================== +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoTEUdvRGFkZHkuY29tLCBJbmMu +MTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8G +A1UEAxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKDE6bFIEMBO4Tx5oVJnyfq +9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD ++qK+ihVqf94Lw7YZFAXK6sOoBJQ7RnwyDfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutd +fMh8+7ArU6SSYmlRJQVhGkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMl +NAJWJwGRtDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFDqahQcQZyi27/a9 +BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmXWWcDYfF+OwYxdS2hII5PZYe096ac +vNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r +5N9ss4UXnT3ZJE95kTXWXwTrgIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYV +N8Gb5DKj7Tjo2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI4uJEvlz36hz1 +-----END CERTIFICATE----- + +Starfield Root Certificate Authority - G2 +========================================= +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s +b2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVsZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0 +eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAw +DgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQg +VGVjaG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZpY2F0ZSBB +dXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL3twQP89o/8ArFv +W59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMgnLRJdzIpVv257IzdIvpy3Cdhl+72WoTs +bhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNk +N3mSwOxGXn/hbVNMYq/NHwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7Nf +ZTD4p7dNdloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0HZbU +JtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0GCSqGSIb3DQEBCwUAA4IBAQARWfol +TwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjUsHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx +4mcujJUDJi5DnUox9g61DLu34jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUw +F5okxBDgBPfg8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1mMpYjn0q7pBZ +c2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +Starfield Services Root Certificate Authority - G2 +================================================== +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s +b2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVsZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRl +IEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxT +dGFyZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2VydmljZXMg +Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20pOsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2 +h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm28xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4Pa +hHQUw2eeBGg6345AWh1KTs9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLP +LJGmpufehRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk6mFB +rMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+qAdcwKziIorhtSpzyEZGDMA0GCSqG +SIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMIbw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPP +E95Dz+I0swSdHynVv/heyNXBve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTy +xQGjhdByPq1zqwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn0q23KXB56jza +YyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCNsSi6 +-----END CERTIFICATE----- + +AffirmTrust Commercial +====================== +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMB4XDTEw +MDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly +bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6Eqdb +DuKPHx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yrba0F8PrV +C8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPALMeIrJmqbTFeurCA+ukV6 +BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1yHp52UKqK39c/s4mT6NmgTWvRLpUHhww +MmWd5jyTXlBOeuM61G7MGvv50jeuJCqrVwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNV +HQ4EFgQUnZPGU4teyq8/nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYGXUPG +hi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNjvbz4YYCanrHOQnDi +qX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivtZ8SOyUOyXGsViQK8YvxO8rUzqrJv +0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9gN53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0kh +sUlHRUe072o0EclNmsxZt9YCnlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +AffirmTrust Networking +====================== +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMB4XDTEw +MDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly +bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SE +Hi3yYJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbuakCNrmreI +dIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRLQESxG9fhwoXA3hA/Pe24 +/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gb +h+0t+nvujArjqWaJGctB+d1ENmHP4ndGyH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNV +HQ4EFgQUBx/S55zawm6iQLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfOtDIu +UFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzuQY0x2+c06lkh1QF6 +12S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZLgo/bNjR9eUJtGxUAArgFU2HdW23 +WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4uolu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9 +/ZFvgrG+CJPbFEfxojfHRZ48x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +AffirmTrust Premium +=================== +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMB4XDTEwMDEy +OTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRy +dXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxBLfqV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtn +BKAQJG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ+jjeRFcV +5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrSs8PhaJyJ+HoAVt70VZVs ++7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmd +GPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d770O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5R +p9EixAqnOEhss/n/fauGV+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NI +S+LI+H+SqHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S5u04 +6uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4IaC1nEWTJ3s7xgaVY5 +/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TXOwF0lkLgAOIua+rF7nKsu7/+6qqo ++Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYEFJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByv +MiPIs0laUZx2KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B8OWycvpEgjNC +6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQMKSOyARiqcTtNd56l+0OOF6S +L5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK ++4w1IX2COPKpVJEZNZOUbWo6xbLQu4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmV +BtWVyuEklut89pMFu+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFg +IxpHYoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8GKa1qF60 +g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaORtGdFNrHF+QFlozEJLUb +zxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6eKeC2uAloGRwYQw== +-----END CERTIFICATE----- + +AffirmTrust Premium ECC +======================= +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMCVVMxFDASBgNV +BAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQcmVtaXVtIEVDQzAeFw0xMDAx +MjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJBgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1U +cnVzdDEgMB4GA1UEAwwXQWZmaXJtVHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQNMF4bFZ0D0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQ +N8O9ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0GA1UdDgQW +BBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAK +BggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/VsaobgxCd05DhT1wV/GzTjxi+zygk8N53X +57hG8f2h4nECMEJZh0PUUd+60wkyWs6Iflc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKM +eQ== +-----END CERTIFICATE----- + +Certum Trusted Network CA +========================= +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBMMSIwIAYDVQQK +ExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBUcnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIy +MTIwNzM3WhcNMjkxMjMxMTIwNzM3WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBU +ZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MSIwIAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rHUV+rpDKmYYe2bg+G0jAC +l/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LMTXPb865Px1bVWqeWifrzq2jUI4ZZJ88J +J7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVUBBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4 +fOQtf/WsX+sWn7Et0brMkUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0 +cvW0QM8xAcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNVHRMB +Af8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNVHQ8BAf8EBAMCAQYw +DQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15ysHhE49wcrwn9I0j6vSrEuVUEtRCj +jSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfLI9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1 +mS1FhIrlQgnXdAIv94nYmem8J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5aj +Zt3hrvJBW8qYVoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI 03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= -----END CERTIFICATE----- -# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Autorité Racine" -# Serial: 1 -# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a -# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3 -# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17 ------BEGIN CERTIFICATE----- -MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk -BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4 -Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl -cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0 -aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY -F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N -8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe -rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K -/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu -7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC -28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6 -lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E -nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB -0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09 -5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj -WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN -jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ -KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s -ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM -OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q -619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn -2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj -o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v -nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG -5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq -pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb -dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0 -BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5 ------END CERTIFICATE----- - -# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA -# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA -# Label: "Root CA Generalitat Valenciana" -# Serial: 994436456 -# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2 -# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46 -# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e ------BEGIN CERTIFICATE----- -MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF -UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ -R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN -MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G -A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw -JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+ -WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj -SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl -u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy -A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk -Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7 -MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr -aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC -IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A -cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA -YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA -bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA -bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA -aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA -aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA -ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA -YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA -ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA -LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6 -Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y -eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw -CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G -A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu -Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn -lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt -b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg -9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF -ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC -IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM= ------END CERTIFICATE----- - -# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03 -# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03 -# Label: "A-Trust-nQual-03" -# Serial: 93214 -# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53 -# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2 -# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb ------BEGIN CERTIFICATE----- -MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB -VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp -bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R -dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw -MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy -dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52 -ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM -EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj -lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ -znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH -2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1 -k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs -2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD -VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG -KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+ -8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R -FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS -mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE -DNuxUCAKGkq6ahq97BvIxYSazQ== ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 45 -# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 -# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 -# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 ------BEGIN CERTIFICATE----- -MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul -F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC -ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w -ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk -aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 -YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg -c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 -d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG -CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF -wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS -Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst -0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc -pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl -CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF -P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK -1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm -KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE -JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ -8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm -fyWl8kgAwKQB2j8= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Label: "StartCom Certification Authority G2" -# Serial: 59 -# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 -# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 -# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 ------BEGIN CERTIFICATE----- -MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 -OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG -A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ -JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD -vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo -D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ -Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW -RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK -HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN -nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM -0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i -UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 -Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg -TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL -BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K -2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX -UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl -6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK -9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ -HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI -wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY -XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l -IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo -hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr -so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007 -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007 -# Label: "TURKTRUST Certificate Services Provider Root 2007" -# Serial: 1 -# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72 -# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33 -# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50 ------BEGIN CERTIFICATE----- -MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS -S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg -SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx -OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry -b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC -VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE -sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F -ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY -KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG -+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG -HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P -IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M -733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk -Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G -CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW -AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I -aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5 -mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa -XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ -qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica -# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT -# Label: "PSCProcert" -# Serial: 11 -# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec -# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74 -# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0 ------BEGIN CERTIFICATE----- -MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s -YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz -dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0 -aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh -IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ -KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw -MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy -b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx -KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG -A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u -aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9 -7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74 -BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G -ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9 -JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0 -PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2 -0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH -0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/ -6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m -v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7 -K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev -bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw -MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w -MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD -gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0 -b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh -bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0 -cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp -ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg -ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq -hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD -AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w -MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag -RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t -UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl -cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v -Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG -AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN -AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS -1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB -3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv -Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh -HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm -pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz -sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE -qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb -mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9 -opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H -YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km ------END CERTIFICATE----- - -# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center -# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center -# Label: "China Internet Network Information Center EV Certificates Root" -# Serial: 1218379777 -# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15 -# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e -# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7 ------BEGIN CERTIFICATE----- -MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC -Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g -Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0 -aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa -Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg -SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo -aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp -ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z -7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA// -DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx -zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8 -hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs -4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u -gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY -NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E -FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3 -j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG -52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB -echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws -ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI -zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy -wy39FCqQmbkHzJ8= ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root CA 2" -# Serial: 40698052477090394928831521023204026294 -# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19 -# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec -# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41 ------BEGIN CERTIFICATE----- -MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk -MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 -YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg -Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT -AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp -Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr -jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r -0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f -2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP -ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF -y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA -tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL -6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0 -uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL -acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh -k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q -VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw -FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O -BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh -b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R -fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv -/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI -REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx -srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv -aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT -woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n -Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W -t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N -8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2 -9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5 -wSsSnqaeG8XmDtkx2Q== ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root EV CA 2" -# Serial: 322973295377129385374608406479535262296 -# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec -# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b -# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d ------BEGIN CERTIFICATE----- -MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw -ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp -dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290 -IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD -VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy -dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg -MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx -UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD -1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH -oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR -HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/ -5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv -idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL -OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC -NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f -46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB -UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth -7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G -A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED -MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB -bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x -XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T -PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0 -Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70 -WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL -Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm -7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S -nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN -vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB -WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI -fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb -I+2ksx0WckNLIOFZfsLorSa/ovc= ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R1 O=Disig a.s. -# Subject: CN=CA Disig Root R1 O=Disig a.s. -# Label: "CA Disig Root R1" -# Serial: 14052245610670616104 -# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a -# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6 -# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy -MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk -D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o -OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A -fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe -IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n -oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK -/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj -rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD -3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE -7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC -yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd -qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI -hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR -xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA -SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo -HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB -emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC -AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb -7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x -DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk -F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF -a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT -Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +TWCA Root Certification Authority +================================= +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJ +VEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMzWhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQG +EwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NB +IFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFEAcK0HMMx +QhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HHK3XLfJ+utdGdIzdjp9xC +oi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeXRfwZVzsrb+RH9JlF/h3x+JejiB03HFyP +4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/zrX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1r +y+UPizgN7gr8/g+YnzAx3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkqhkiG +9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeCMErJk/9q56YAf4lC +mtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdlsXebQ79NqZp4VKIV66IIArB6nCWlW +QtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62Dlhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVY +T0bf+215WfKEIlKuD8z7fDvnaspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocny +Yh0igzyXxfkZYiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +Security Communication RootCA2 +============================== +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMeU2VjdXJpdHkgQ29tbXVuaWNh +dGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoXDTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMC +SlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3Vy +aXR5IENvbW11bmljYXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +ANAVOVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGrzbl+dp++ ++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVMVAX3NuRFg3sUZdbcDE3R +3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQhNBqyjoGADdH5H5XTz+L62e4iKrFvlNV +spHEfbmwhRkGeC7bYRr6hfVKkaHnFtWOojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1K +EOtOghY6rCcMU/Gt1SSwawNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8 +QIH4D5csOPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEB +CwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpFcoJxDjrSzG+ntKEj +u/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXcokgfGT+Ok+vx+hfuzU7jBBJV1uXk +3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6q +tnRGEmyR7jTV7JqR50S+kDFy1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29 +mvVXIwAHIRc/SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +EC-ACC +====== +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB8zELMAkGA1UE +BhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2VydGlmaWNhY2lvIChOSUYgUS0w +ODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYD +VQQLEyxWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UE +CxMsSmVyYXJxdWlhIEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMT +BkVDLUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQGEwJFUzE7 +MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8gKE5JRiBRLTA4MDExNzYt +SSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBDZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZl +Z2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQubmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJh +cnF1aWEgRW50aXRhdHMgZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUND +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R85iK +w5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm4CgPukLjbo73FCeT +ae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaVHMf5NLWUhdWZXqBIoH7nF2W4onW4 +HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNdQlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0a +E9jD2z3Il3rucO2n5nzbcc8tlGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw +0JDnJwIDAQABo4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4opvpXY0wfwYD +VR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBodHRwczovL3d3dy5jYXRjZXJ0 +Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5l +dC92ZXJhcnJlbCAwDQYJKoZIhvcNAQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJ +lF7W2u++AVtd0x7Y/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNa +Al6kSBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhyRp/7SNVe +l+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOSAgu+TGbrIP65y7WZf+a2 +E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xlnJ2lYJU6Un/10asIbvPuW/mIPX64b24D +5EI= +-----END CERTIFICATE----- + +Hellenic Academic and Research Institutions RootCA 2011 +======================================================= +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1IxRDBCBgNVBAoT +O0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9y +aXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z +IFJvb3RDQSAyMDExMB4XDTExMTIwNjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYT +AkdSMUQwQgYDVQQKEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z +IENlcnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNo +IEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPzdYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI +1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJfel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa +71HFK9+WXesyHgLacEnsbgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u +8yBRQlqD75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSPFEDH +3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNVHRMBAf8EBTADAQH/ +MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp5dgTBCPuQSUwRwYDVR0eBEAwPqA8 +MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQub3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQu +b3JnMA0GCSqGSIb3DQEBBQUAA4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVt +XdMiKahsog2p6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7dIsXRSZMFpGD +/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8AcysNnq/onN694/BtZqhFLKPM58N +7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXIl7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +Actalis Authentication Root CA +============================== +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UEBhMCSVQxDjAM +BgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UE +AwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDky +MjExMjIwMlowazELMAkGA1UEBhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlz +IFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNvUTufClrJ +wkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX4ay8IMKx4INRimlNAJZa +by/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9KK3giq0itFZljoZUj5NDKd45RnijMCO6 +zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1f +YVEiVRvjRuPjPdA1YprbrxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2 +oxgkg4YQ51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2Fbe8l +EfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxeKF+w6D9Fz8+vm2/7 +hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4Fv6MGn8i1zeQf1xcGDXqVdFUNaBr8 +EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbnfpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5 +jF66CyCU3nuDuP/jVo23Eek7jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLY +iDrIn3hm7YnzezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQALe3KHwGCmSUyI +WOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70jsNjLiNmsGe+b7bAEzlgqqI0 +JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDzWochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKx +K3JCaKygvU5a2hi/a5iB0P2avl4VSM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+ +Xlff1ANATIGk0k9jpwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC +4yyXX04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+OkfcvHlXHo +2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7RK4X9p2jIugErsWx0Hbhz +lefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btUZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXem +OR/qnuOf0GZvBeyqdn6/axag67XH/JJULysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9 +vwGYT7JZVEc+NHt4bVaTLnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +Trustis FPS Root CA +=================== +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQG +EwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQLExNUcnVzdGlzIEZQUyBSb290 +IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTExMzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNV +BAoTD1RydXN0aXMgTGltaXRlZDEcMBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQ +RUN+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihHiTHcDnlk +H5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjjvSkCqPoc4Vu5g6hBSLwa +cY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zt +o3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlBOrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEA +AaNTMFEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAd +BgNVHQ4EFgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01GX2c +GE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmWzaD+vkAMXBJV+JOC +yinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP41BIy+Q7DsdwyhEQsb8tGD+pmQQ9P +8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZEf1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHV +l/9D7S3B2l0pKoU/rGXuhg8FjZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYl +iB6XzCGcKQENZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +Buypass Class 2 Root CA +======================= +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU +QnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMiBSb290IENBMB4X +DTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1owTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1 +eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIw +DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1 +g1Lr6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPVL4O2fuPn +9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC911K2GScuVr1QGbNgGE41b +/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHxMlAQTn/0hpPshNOOvEu/XAFOBz3cFIqU +CqTqc/sLUegTBxj6DvEr0VQVfTzh97QZQmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeff +awrbD02TTqigzXsu8lkBarcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgI +zRFo1clrUs3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLiFRhn +Bkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRSP/TizPJhk9H9Z2vX +Uq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN9SG9dKpN6nIDSdvHXx1iY8f93ZHs +M+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxPAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFMmAd+BikoL1RpzzuvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAgEAU18h9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3tOluwlN5E40EI +osHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo+fsicdl9sz1Gv7SEr5AcD48S +aq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYd +DnkM/crqJIByw5c/8nerQyIKx+u2DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWD +LfJ6v9r9jv6ly0UsH8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0 +oyLQI+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK75t98biGC +wWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h3PFaTWwyI0PurKju7koS +CTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPzY11aWOIv4x3kqdbQCtCev9eBCfHJxyYN +rJgWVqA= +-----END CERTIFICATE----- + +Buypass Class 3 Root CA +======================= +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU +QnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMyBSb290IENBMB4X +DTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFowTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1 +eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIw +DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRH +sJ8YZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3EN3coTRiR +5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9tznDDgFHmV0ST9tD+leh +7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX0DJq1l1sDPGzbjniazEuOQAnFN44wOwZ +ZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH +2xc519woe2v1n/MuwU8XKhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV +/afmiSTYzIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvSO1UQ +RwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D34xFMFbG02SrZvPA +Xpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgPK9Dx2hzLabjKSWJtyNBjYt1gD1iq +j6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFEe4zf/lb+74suwvTg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAgEAACAjQTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXSIGrs/CIBKM+G +uIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2HJLw5QY33KbmkJs4j1xrG0aG +Q0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsaO5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8 +ZORK15FTAaggiG6cX0S5y2CBNOxv033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2 +KSb12tjE8nVhz36udmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz +6MkEkbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg413OEMXbug +UZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvDu79leNKGef9JOxqDDPDe +eOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq4/g7u9xN12TyUb7mqqta6THuBrxzvxNi +Cp/HuZc= +-----END CERTIFICATE----- + +T-TeleSec GlobalRoot Class 3 +============================ +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoM +IlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBU +cnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgx +MDAxMTAyOTU2WhcNMzMxMDAxMjM1OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lz +dGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBD +ZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN8ELg63iIVl6bmlQdTQyK +9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/RLyTPWGrTs0NvvAgJ1gORH8EGoel15YU +NpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZF +iP0Zf3WHHx+xGwpzJFu5ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W +0eDrXltMEnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1A/d2O2GCahKqGFPr +AyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOyWL6ukK2YJ5f+AbGwUgC4TeQbIXQb +fsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzT +ucpH9sry9uetuUg/vBa3wW306gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7h +P0HHRwA11fXT91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4pTpPDpFQUWw== +-----END CERTIFICATE----- + +EE Certification Centre Root CA +=============================== +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQG +EwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEoMCYGA1UEAwwfRUUgQ2Vy +dGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIw +MTAxMDMwMTAxMDMwWhgPMjAzMDEyMTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlB +UyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRy +ZSBSb290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEBAQUAA4IB +DwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUyeuuOF0+W2Ap7kaJjbMeM +TC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvObntl8jixwKIy72KyaOBhU8E2lf/slLo2 +rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIwWFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw +93X2PaRka9ZP585ArQ/dMtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtN +P2MbRMNE1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/zQas8fElyalL1BSZ +MEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMGCCsGAQUFBwMEBggrBgEF +BQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEFBQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+Rj +xY6hUFaTlrg4wCQiZrxTFGGVv9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqM +lIpPnTX/dqQGE5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIWiAYLtqZLICjU +3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/vGVCJYMzpJJUPwssd8m92kMfM +dcGWxZ0= +-----END CERTIFICATE----- + +D-TRUST Root Class 3 CA 2 2009 +============================== +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQK +DAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTAe +Fw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NThaME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxE +LVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOAD +ER03UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42tSHKXzlA +BF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9RySPocq60vFYJfxLLHLGv +KZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsMlFqVlNpQmvH/pStmMaTJOKDfHR+4CS7z +p+hnUquVH+BGPtikw8paxTGA6Eian5Rp/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUC +AwEAAaOCARowggEWMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ +4PGEMA4GA1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVjdG9y +eS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUyMENBJTIwMiUyMDIw +MDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRlcmV2b2NhdGlvbmxpc3QwQ6BBoD+G +PWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAw +OS5jcmwwDQYJKoZIhvcNAQELBQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm +2H6NMLVwMeniacfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4KzCUqNQT4YJEV +dT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8PIWmawomDeCTmGCufsYkl4ph +X5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3YJohw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +D-TRUST Root Class 3 CA 2 EV 2009 +================================= +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQK +DAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAw +OTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUwNDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQK +DAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAw +OTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfS +egpnljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM03TP1YtHh +zRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6ZqQTMFexgaDbtCHu39b+T +7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lRp75mpoo6Kr3HGrHhFPC+Oh25z1uxav60 +sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure35 +11H3a6UCAwEAAaOCASQwggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyv +cop9NteaHNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFwOi8v +ZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xhc3MlMjAzJTIwQ0El +MjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1ERT9jZXJ0aWZpY2F0ZXJldm9jYXRp +b25saXN0MEagRKBChkBodHRwOi8vd3d3LmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xh +c3NfM19jYV8yX2V2XzIwMDkuY3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+ +PPoeUSbrh/Yp3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNFCSuGdXzfX2lX +ANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7naxpeG0ILD5EJt/rDiZE4OJudA +NCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqXKVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVv +w9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +CA Disig Root R2 +================ +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNVBAYTAlNLMRMw +EQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMuMRkwFwYDVQQDExBDQSBEaXNp +ZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQyMDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sx +EzARBgNVBAcTCkJyYXRpc2xhdmExEzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERp +c2lnIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbC +w3OeNcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNHPWSb6Wia +xswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3Ix2ymrdMxp7zo5eFm1tL7 +A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbeQTg06ov80egEFGEtQX6sx3dOy1FU+16S +GBsEWmjGycT6txOgmLcRK7fWV8x8nhfRyyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqV +g8NTEQxzHQuyRpDRQjrOQG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa +5Beny912H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJQfYE +koopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUDi/ZnWejBBhG93c+A +Ak9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORsnLMOPReisjQS1n6yqEm70XooQL6i +Fh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5u +Qu0wDQYJKoZIhvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqfGopTpti72TVV +sRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkblvdhuDvEK7Z4bLQjb/D907Je +dR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W8 +1k/BfDxujRNt+3vrMNDcTa/F1balTFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjx +mHHEt38OFdAlab0inSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01 +utI3gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18DrG5gPcFw0 +sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3OszMOl6W8KjptlwlCFtaOg +UxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8xL4ysEr3vQCj8KWefshNPZiTEUxnpHikV +7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +ACCVRAIZ1 +========= +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UEAwwJQUNDVlJB +SVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQswCQYDVQQGEwJFUzAeFw0xMTA1 +MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQBgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwH +UEtJQUNDVjENMAsGA1UECgwEQUNDVjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQCbqau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gM +jmoYHtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWoG2ioPej0 +RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpAlHPrzg5XPAOBOp0KoVdD +aaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhrIA8wKFSVf+DuzgpmndFALW4ir50awQUZ +0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDG +WuzndN9wrqODJerWx5eHk6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs7 +8yM2x/474KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMOm3WR +5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpacXpkatcnYGMN285J +9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPluUsXQA+xtrn13k/c4LOsOxFwYIRK +Q26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYIKwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRw +Oi8vd3d3LmFjY3YuZXMvZmlsZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEu +Y3J0MB8GCCsGAQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeTVfZW6oHlNsyM +Hj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIGCCsGAQUFBwICMIIBFB6CARAA +QQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUAcgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBh +AO0AegAgAGQAZQAgAGwAYQAgAEEAQwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUA +YwBuAG8AbABvAGcA7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBj +AHQAcgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAAQwBQAFMA +IABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUAczAwBggrBgEFBQcCARYk +aHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2MuaHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0 +dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRtaW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2 +MV9kZXIuY3JsMA4GA1UdDwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZI +hvcNAQEFBQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdpD70E +R9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gUJyCpZET/LtZ1qmxN +YEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+mAM/EKXMRNt6GGT6d7hmKG9Ww7Y49 +nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepDvV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJ +TS+xJlsndQAJxGJ3KQhfnlmstn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3 +sCPdK6jT2iWH7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szAh1xA2syVP1Xg +Nce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xFd3+YJ5oyXSrjhO7FmGYvliAd +3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2HpPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3p +EfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +TWCA Global Root CA +=================== +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcxEjAQBgNVBAoT +CVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMTVFdDQSBHbG9iYWwgUm9vdCBD +QTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQK +EwlUQUlXQU4tQ0ExEDAOBgNVBAsTB1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2C +nJfF10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz0ALfUPZV +r2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfChMBwqoJimFb3u/Rk28OKR +Q4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbHzIh1HrtsBv+baz4X7GGqcXzGHaL3SekV +tTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1W +KKD+u4ZqyPpcC1jcxkt2yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99 +sy2sbZCilaLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYPoA/p +yJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQABDzfuBSO6N+pjWxn +kjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcEqYSjMq+u7msXi7Kx/mzhkIyIqJdI +zshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMC +AQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6g +cFGn90xHNcgL1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WFH6vPNOw/KP4M +8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNoRI2T9GRwoD2dKAXDOXC4Ynsg +/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlg +lPx4mI88k1HtQJAH32RjJMtOcQWh15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryP +A9gK8kxkRr05YuWW6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3m +i4TWnsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5jwa19hAM8 +EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWzaGHQRiapIVJpLesux+t3 +zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmyKwbQBM0= +-----END CERTIFICATE----- + +TeliaSonera Root CA v1 +====================== +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAwNzEUMBIGA1UE +CgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJvb3QgQ0EgdjEwHhcNMDcxMDE4 +MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYDVQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwW +VGVsaWFTb25lcmEgUm9vdCBDQSB2MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+ +6yfwIaPzaSZVfp3FVRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA +3GV17CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+XZ75Ljo1k +B1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+/jXh7VB7qTCNGdMJjmhn +Xb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxH +oLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkmdtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3 +F0fUTPHSiXk+TT2YqGHeOh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJ +oWjiUIMusDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4pgd7 +gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fsslESl1MpWtTwEhDc +TwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQarMCpgKIv7NHfirZ1fpoeDVNAgMB +AAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qW +DNXr+nuqF+gTEjANBgkqhkiG9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNm +zqjMDfz1mgbldxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1TjTQpgcmLNkQfW +pb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBedY2gea+zDTYa4EzAvXUYNR0PV +G6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpc +c41teyWRyu5FrgZLAMzTsVlQ2jqIOylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOT +JsjrDNYmiLbAJM+7vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2 +qReWt88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcnHL/EVlP6 +Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVxSK236thZiNSQvxaz2ems +WWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +E-Tugra Certification Authority +=============================== +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNVBAYTAlRSMQ8w +DQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamls +ZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMw +NTEyMDk0OFoXDTIzMDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmEx +QDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxl +cmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQD +DB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEA4vU/kwVRHoViVF56C/UYB4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vd +hQd2h8y/L5VMzH2nPbxHD5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5K +CKpbknSFQ9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEoq1+g +ElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3Dk14opz8n8Y4e0ypQ +BaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcHfC425lAcP9tDJMW/hkd5s3kc91r0 +E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsutdEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gz +rt48Ue7LE3wBf4QOXVGUnhMMti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAq +jqFGOjGY5RH8zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUXU8u3Zg5mTPj5 +dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6Jyr+zE7S6E5UMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEG +MA0GCSqGSIb3DQEBCwUAA4ICAQAFNzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAK +kEh47U6YA5n+KGCRHTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jO +XKqYGwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c77NCR807 +VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3+GbHeJAAFS6LrVE1Uweo +a2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WKvJUawSg5TB9D0pH0clmKuVb8P7Sd2nCc +dlqMQ1DujjByTd//SffGqWfZbawCEeI6FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEV +KV0jq9BgoRJP3vQXzTLlyb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gT +Dx4JnW2PAJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpDy4Q0 +8ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8dNL/+I5c30jn6PQ0G +C7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +T-TeleSec GlobalRoot Class 2 +============================ +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoM +IlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBU +cnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgx +MDAxMTA0MDE0WhcNMzMxMDAxMjM1OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lz +dGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBD +ZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUdAqSzm1nzHoqvNK38DcLZ +SBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiCFoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/F +vudocP05l03Sx5iRUKrERLMjfTlH6VJi1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx970 +2cu+fjOlbpSD8DT6IavqjnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGV +WOHAD3bZwI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/WSA2AHmgoCJrjNXy +YdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhyNsZt+U2e+iKo4YFWz827n+qrkRk4 +r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPACuvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNf +vNoBYimipidx5joifsFvHZVwIEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR +3p1m0IvVVGb6g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlPBSeOE6Fuwg== +-----END CERTIFICATE----- + +Atos TrustedRoot 2011 +===================== +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UEAwwVQXRvcyBU +cnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0xMTA3MDcxNDU4 +MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMMFUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsG +A1UECgwEQXRvczELMAkGA1UEBhMCREUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV +hTuXbyo7LjvPpvMpNb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr +54rMVD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+SZFhyBH+ +DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ4J7sVaE3IqKHBAUsR320 +HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0Lcp2AMBYHlT8oDv3FdU9T1nSatCQujgKR +z3bFmx5VdJx4IbHwLfELn8LVlhgf8FQieowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7R +l+lwrrw7GWzbITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZ +bNshMBgGA1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +CwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8jvZfza1zv7v1Apt+h +k6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kPDpFrdRbhIfzYJsdHt6bPWHJxfrrh +TZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pcmaHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a9 +61qn8FYiqTxlVMYVqL2Gns2Dlmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G +3mB/ufNPRJLvKrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +QuoVadis Root CA 1 G3 +===================== +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQELBQAwSDELMAkG +A1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJv +b3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJN +MRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEg +RzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakE +PBtVwedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWerNrwU8lm +PNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF34168Xfuw6cwI2H44g4hWf6 +Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh4Pw5qlPafX7PGglTvF0FBM+hSo+LdoIN +ofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXpUhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/l +g6AnhF4EwfWQvTA9xO+oabw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV +7qJZjqlc3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/GKubX +9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSthfbZxbGL0eUQMk1f +iyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KOTk0k+17kBL5yG6YnLUlamXrXXAkg +t3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOtzCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZI +hvcNAQELBQADggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2cDMT/uFPpiN3 +GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUNqXsCHKnQO18LwIE6PWThv6ct +Tr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP ++V04ikkwj+3x6xn0dxoxGE1nVGwvb2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh +3jRJjehZrJ3ydlo28hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fa +wx/kNSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNjZgKAvQU6 +O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhpq1467HxpvMc7hU6eFbm0 +FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFtnh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOV +hMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +QuoVadis Root CA 2 G3 +===================== +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQELBQAwSDELMAkG +A1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJv +b3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJN +MRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIg +RzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFh +ZiFfqq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMWn4rjyduY +NM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ymc5GQYaYDFCDy54ejiK2t +oIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+o +MiwMzAkd056OXbxMmO7FGmh77FOm6RQ1o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+l +V0POKa2Mq1W/xPtbAd0jIaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZo +L1NesNKqIcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz8eQQ +sSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43ehvNURG3YBZwjgQQvD +6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l7ZizlWNof/k19N+IxWA1ksB8aRxh +lRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALGcC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZI +hvcNAQELBQADggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RCroijQ1h5fq7K +pVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0GaW/ZZGYjeVYg3UQt4XAoeo0L9 +x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4nlv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgz +dWqTHBLmYF5vHX/JHyPLhGGfHoJE+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6X +U/IyAgkwo1jwDQHVcsaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+Nw +mNtddbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNgKCLjsZWD +zYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeMHVOyToV7BjjHLPj4sHKN +JeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4WSr2Rz0ZiC3oheGe7IUIarFsNMkd7Egr +O3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +QuoVadis Root CA 3 G3 +===================== +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQELBQAwSDELMAkG +A1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJv +b3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJN +MRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMg +RzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286 +IxSR/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNuFoM7pmRL +Mon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXRU7Ox7sWTaYI+FrUoRqHe +6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+cra1AdHkrAj80//ogaX3T7mH1urPnMNA3 +I4ZyYUUpSFlob3emLoG+B01vr87ERRORFHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3U +VDmrJqMz6nWB2i3ND0/kA9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f7 +5li59wzweyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634RylsSqi +Md5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBpVzgeAVuNVejH38DM +dyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0QA4XN8f+MFrXBsj6IbGB/kE+V9/Yt +rQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZI +hvcNAQELBQADggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnIFUBhynLWcKzS +t/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5WvvoxXqA/4Ti2Tk08HS6IT7SdEQ +TXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFgu/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9Du +DcpmvJRPpq3t/O5jrFc/ZSXPsoaP0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGib +Ih6BJpsQBJFxwAYf3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmD +hPbl8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+DhcI00iX +0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HNPlopNLk9hM6xZdRZkZFW +dSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ywaZWWDYWGWVjUTR939+J399roD1B0y2 +PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +DigiCert Assured ID Root G2 +=========================== +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQw +IgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgw +MTE1MTIwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQL +ExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSAn61UQbVH +35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4HteccbiJVMWWXvdMX0h5i89vq +bFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9HpEgjAALAcKxHad3A2m67OeYfcgnDmCXRw +VWmvo2ifv922ebPynXApVfSr/5Vh88lAbx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OP +YLfykqGxvYmJHzDNw6YuYjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+Rn +lTGNAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTO +w0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPIQW5pJ6d1Ee88hjZv +0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I0jJmwYrA8y8678Dj1JGG0VDjA9tz +d29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4GnilmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAW +hsI6yLETcDbYz+70CjTVW0z9B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0M +jomZmWzwPDCvON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo IhNzbM8m9Yop5w== -----END CERTIFICATE----- -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +DigiCert Assured ID Root G3 +=========================== +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYD +VQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1 +MTIwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQ +BgcqhkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJfZn4f5dwb +RXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17QRSAPWXYQ1qAk8C3eNvJs +KTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgF +UaFNN6KDec6NHSrkhDAKBggqhkjOPQQDAwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5Fy +YZ5eEJJZVrmDxxDnOOlYJjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy +1vUhZscv6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +DigiCert Global Root G2 +======================= +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUx +MjAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 +dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI2/Ou8jqJ +kTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx1x7e/dfgy5SDN67sH0NO +3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQq2EGnI/yuum06ZIya7XzV+hdG82MHauV +BJVJ8zUtluNJbd134/tJS7SsVQepj5WztCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyM +UNGPHgm+F6HmIcr9g+UQvIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQAB +o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV5uNu +5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY1Yl9PMWLSn/pvtsr +F9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4NeF22d+mQrvHRAiGfzZ0JFrabA0U +WTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NGFdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBH +QRFXGU7Aj64GxJUTFy8bJZ918rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/ +iyK5S9kJRaTepLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl MrY= -----END CERTIFICATE----- -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited -# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited -# Label: "WoSign" -# Serial: 125491772294754854453622855443212256657 -# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d -# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb -# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08 ------BEGIN CERTIFICATE----- -MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV -MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV -BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw -MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX -b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN -rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U -fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc -f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2 -ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M -x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR -aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch -zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar -uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K -mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA -Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv -HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H -EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1 -LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ -MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e -JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN -g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp -dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab -R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ -PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce -xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+ -J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl -OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT -ee5Ehr7XHuQe+w== ------END CERTIFICATE----- - -# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited -# Subject: CN=CA 沃通根证书 O=WoSign CA Limited -# Label: "WoSign China" -# Serial: 106921963437422998931660691310149453965 -# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93 -# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6 -# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54 ------BEGIN CERTIFICATE----- -MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG -MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV -BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw -MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl -ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r -D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1 -9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf -v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk -UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L -NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb -+gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V -qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K -yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G -AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK -J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC -AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4 -WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6 -yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj -/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6 -jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2 -ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX -X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n -FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D -u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l -O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le -ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1 -2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- -# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Secure Server CA" -# Serial: 927650371 -# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee -# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 -# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Label: "ValiCert Class 2 VA" -# Serial: 1 -# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 -# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 -# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - -# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Express (Class C) Root" -# Serial: 104 -# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 -# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b -# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f ------BEGIN CERTIFICATE----- -MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD -EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X -DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw -DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u -c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr -TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN -BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA -OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC -2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW -RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P -AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW -ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 -YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz -b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO -ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB -IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs -b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs -ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s -YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg -a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g -SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 -aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg -YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg -Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY -ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g -pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 -Fp1hBWeAyNDYpQcCNJgEjTME1A== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Business (Class B) Root" -# Serial: 105 -# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 -# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af -# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 ------BEGIN CERTIFICATE----- -MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD -EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 -OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G -A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh -Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l -dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG -SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK -gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX -iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc -Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E -BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G -SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu -b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh -bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv -Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln -aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 -IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh -c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph -biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo -ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP -UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj -YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo -dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA -bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 -sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa -n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS -NitjrFgBazMpUIaD8QFI ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Label: "RSA Root Certificate 1" -# Serial: 1 -# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 -# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb -# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy -NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD -cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs -2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY -JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE -Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ -n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A -PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Label: "ValiCert Class 1 VA" -# Serial: 1 -# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb -# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e -# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy -NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y -LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ -TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y -TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 -LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW -I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw -nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure eBusiness CA 1" -# Serial: 4 -# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d -# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 -# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure Global eBusiness CA" -# Serial: 1 -# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc -# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 -# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Premium Server CA" -# Serial: 1 -# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a -# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a -# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Server CA" -# Serial: 1 -# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d -# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c -# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 149843929435818692848040365716851702463 -# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 -# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 -# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do -lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc -AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 80507572722862485515306429940691309246 -# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 -# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b -# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i -2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ -2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Label: "Verisign Class 3 Public Primary Certification Authority - G2" -# Serial: 167285380242319648451154478808036881606 -# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 -# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f -# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - -# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Label: "GTE CyberTrust Global Root" -# Serial: 421 -# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db -# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 -# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD -VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv -bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv -b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV -UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU -cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds -b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH -iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS -r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 -04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r -GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 -3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P -lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIGCDCCA/CgAwIBAgIQKy5u6tl1NmwUim7bo3yMBzANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTQwMjEy -MDAwMDAwWhcNMjkwMjExMjM1OTU5WjCBkDELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxNjA0BgNVBAMTLUNPTU9ETyBSU0EgRG9tYWluIFZh -bGlkYXRpb24gU2VjdXJlIFNlcnZlciBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAI7CAhnhoFmk6zg1jSz9AdDTScBkxwtiBUUWOqigwAwCfx3M28Sh -bXcDow+G+eMGnD4LgYqbSRutA776S9uMIO3Vzl5ljj4Nr0zCsLdFXlIvNN5IJGS0 -Qa4Al/e+Z96e0HqnU4A7fK31llVvl0cKfIWLIpeNs4TgllfQcBhglo/uLQeTnaG6 -ytHNe+nEKpooIZFNb5JPJaXyejXdJtxGpdCsWTWM/06RQ1A/WZMebFEh7lgUq/51 -UHg+TLAchhP6a5i84DuUHoVS3AOTJBhuyydRReZw3iVDpA3hSqXttn7IzW3uLh0n -c13cRTCAquOyQQuvvUSH2rnlG51/ruWFgqUCAwEAAaOCAWUwggFhMB8GA1UdIwQY -MBaAFLuvfgI9+qbxPISOre44mOzZMjLUMB0GA1UdDgQWBBSQr2o6lFoL2JDqElZz -30O0Oija5zAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNV -HSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwGwYDVR0gBBQwEjAGBgRVHSAAMAgG -BmeBDAECATBMBgNVHR8ERTBDMEGgP6A9hjtodHRwOi8vY3JsLmNvbW9kb2NhLmNv -bS9DT01PRE9SU0FDZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDBxBggrBgEFBQcB -AQRlMGMwOwYIKwYBBQUHMAKGL2h0dHA6Ly9jcnQuY29tb2RvY2EuY29tL0NPTU9E -T1JTQUFkZFRydXN0Q0EuY3J0MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5jb21v -ZG9jYS5jb20wDQYJKoZIhvcNAQEMBQADggIBAE4rdk+SHGI2ibp3wScF9BzWRJ2p -mj6q1WZmAT7qSeaiNbz69t2Vjpk1mA42GHWx3d1Qcnyu3HeIzg/3kCDKo2cuH1Z/ -e+FE6kKVxF0NAVBGFfKBiVlsit2M8RKhjTpCipj4SzR7JzsItG8kO3KdY3RYPBps -P0/HEZrIqPW1N+8QRcZs2eBelSaz662jue5/DJpmNXMyYE7l3YphLG5SEXdoltMY -dVEVABt0iN3hxzgEQyjpFv3ZBdRdRydg1vs4O2xyopT4Qhrf7W8GjEXCBgCq5Ojc -2bXhc3js9iPc0d1sjhqPpepUfJa3w/5Vjo1JXvxku88+vZbrac2/4EjxYoIQ5QxG -V/Iz2tDIY+3GH5QFlkoakdH368+PUq4NCNk+qKBR6cGHdNXJ93SrLlP7u3r7l+L4 -HyaPs9Kg4DdbKDsx5Q5XLVq4rXmsXiBmGqW5prU5wfWYQ//u+aen/e7KJD2AFsQX -j4rBYKEMrltDR5FL1ZoXX/nUh8HCjLfn4g8wGTeGrODcQgPmlKidrv0PJFGUzpII -0fxQ8ANAe4hZ7Q7drNJ3gjTcBpUC2JD5Leo31Rpg0Gcg19hCC0Wvgmje3WYkN5Ap -lBlGGSW4gNfL1IYoakRwJiNiqZ+Gb7+6kHDSVneFeO/qJakXzlByjAA6quPbYzSf -+AZxAeKCINT+b72x ------END CERTIFICATE----- -''' - -# Workaround to avoid pyinstaller statics hell. -# request (at the end because openssl) needs a file with -# certs, it can't be injected. Damned coupled code. -file_path = os.path.join(get_conan_user_home(), ".conan", "cacert.pem") -if not os.path.exists(file_path): - save(file_path, cacert) +DigiCert Global Root G3 +======================= +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAwHgYD +VQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAw +MDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5k +aWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0C +AQYFK4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FGfp4tn+6O +YwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPOZ9wj/wMco+I+o0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNp +Yim8S8YwCgYIKoZIzj0EAwMDaAAwZQIxAK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y +3maTD/HMsQmP3Wyr+mt/oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34 +VOKa5Vt8sycX +-----END CERTIFICATE----- + +DigiCert Trusted Root G4 +======================== +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBiMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSEw +HwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1 +MTIwMDAwWjBiMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3yithZwuEp +pz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1Ifxp4VpX6+n6lXFllVcq9o +k3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDVySAdYyktzuxeTsiT+CFhmzTrBcZe7Fsa +vOvJz82sNEBfsXpm7nfISKhmV1efVFiODCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGY +QJB5w3jHtrHEtWoYOAMQjdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6 +MUSaM0C/CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCiEhtm +mnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADMfRyVw4/3IbKyEbe7 +f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QYuKZ3AeEPlAwhHbJUKSWJbOUOUlFH +dL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXKchYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8 +oR7FwI+isX4KJpn15GkvmB0t9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBhjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2SV1EY+CtnJYY +ZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd+SeuMIW59mdNOj6PWTkiU0Tr +yF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWcfFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy +7zBZLq7gcfJW5GqXb5JQbZaNaHqasjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iah +ixTXTBmyUEFxPT9NcCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN +5r5N0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie4u1Ki7wb +/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mIr/OSmbaz5mEP0oUA51Aa +5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tK +G48BtieVU+i2iW1bvGjUI+iLUaJW+fCmgKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP +82Z+ +-----END CERTIFICATE----- + +COMODO RSA Certification Authority +================================== +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCBhTELMAkGA1UE +BhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgG +A1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwHhcNMTAwMTE5MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMC +R0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UE +ChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR6FSS0gpWsawNJN3Fz0Rn +dJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8Xpz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZ +FGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+ +5eNu/Nio5JIk2kNrYrhV/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pG +x8cgoLEfZd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z+pUX +2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7wqP/0uK3pN/u6uPQL +OvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZahSL0896+1DSJMwBGB7FY79tOi4lu3 +sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVICu9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+C +GCe01a60y1Dma/RMhnEw6abfFobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5 +WdYgGq/yapiqcrxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8w +DQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvlwFTPoCWOAvn9sKIN9SCYPBMt +rFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+ +nq6PK7o9mfjYcwlYRm6mnPTXJ9OV2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSg +tZx8jb8uk2IntznaFxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwW +sRqZCuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiKboHGhfKp +pC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmckejkk9u+UJueBPSZI9FoJA +zMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yLS0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHq +ZJx64SIDqZxubw5lT2yHh17zbqD5daWbQOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk52 +7RH89elWsn2/x20Kk4yl0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7I +LaZRfyHBNVOFBkpdn627G190 +-----END CERTIFICATE----- + +USERTrust RSA Certification Authority +===================================== +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCBiDELMAkGA1UE +BhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQK +ExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMTAwMjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UE +BhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQK +ExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCAEmUXNg7D2wiz +0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2j +Y0K2dvKpOyuR+OJv0OwWIJAJPuLodMkYtJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFn +RghRy4YUVD+8M/5+bJz/Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O ++T23LLb2VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT79uq +/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6c0Plfg6lZrEpfDKE +Y1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmTYo61Zs8liM2EuLE/pDkP2QKe6xJM +lXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97lc6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8 +yexDJtC/QV9AqURE9JnnV4eeUB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+ +eLf8ZxXhyVeEHg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPFUp/L+M+ZBn8b2kMVn54CVVeW +FPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KOVWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ +7l8wXEskEVX/JJpuXior7gtNn3/3ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQ +Eg9zKC7F4iRO/Fjs8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM +8WcRiQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYzeSf7dNXGi +FSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZXHlKYC6SQK5MNyosycdi +yA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9c +J2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRBVXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGw +sAvgnEzDHNb842m1R0aBL6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gx +Q+6IHdfGjjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +USERTrust ECC Certification Authority +===================================== +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDELMAkGA1UEBhMC +VVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwHhcNMTAwMjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMC +VVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqfloI+d61SRvU8Za2EurxtW2 +0eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinngo4N+LZfQYcTxmdwlkWOrfzCjtHDix6Ez +nPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0GA1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNV +HQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBB +HU6+4WMBzzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbWRNZu +9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +GlobalSign ECC Root CA - R4 +=========================== +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEkMCIGA1UECxMb +R2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQD +EwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoXDTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMb +R2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQD +EwpHbG9iYWxTaWduMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprl +OQcJFspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61FuOJAf/sKbvu+M8k8o4TV +MAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGXkPoUVy0D7O48027KqGx2vKLeuwIgJ6iF +JzWbVsaj8kfSt24bAgAXqmemFZHe+pTsewv4n4Q= +-----END CERTIFICATE----- + +GlobalSign ECC Root CA - R5 +=========================== +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEkMCIGA1UECxMb +R2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQD +EwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoXDTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMb +R2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQD +EwpHbG9iYWxTaWduMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6 +SFkc8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8kehOvRnkmS +h5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAd +BgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYIKoZIzj0EAwMDaAAwZQIxAOVpEslu28Yx +uglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7 +yFz9SO8NdCKoCOJuxUnOxwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +Staat der Nederlanden Root CA - G3 +================================== +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UE +CgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4g +Um9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloXDTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMC +TkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5l +ZGVybGFuZGVuIFJvb3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4y +olQPcPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WWIkYFsO2t +x1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqXxz8ecAgwoNzFs21v0IJy +EavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFyKJLZWyNtZrVtB0LrpjPOktvA9mxjeM3K +Tj215VKb8b475lRgsGYeCasH/lSJEULR9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUur +mkVLoR9BvUhTFXFkC4az5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU5 +1nus6+N86U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7Ngzp +07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHPbMk7ccHViLVlvMDo +FxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXtBznaqB16nzaeErAMZRKQFWDZJkBE +41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTtXUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleu +yjWcLhL75LpdINyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwpLiniyMMB8jPq +KqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8Ipf3YF3qKS9Ysr1YvY2WTxB1 +v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixpgZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA +8KCWAg8zxXHzniN9lLf9OtMJgwYh/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b +8KKaa8MFSu1BYBQw0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0r +mj1AfsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq4BZ+Extq +1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR1VmiiXTTn74eS9fGbbeI +JG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/QFH1T/U67cjF68IeHRaVesd+QnGTbksV +tzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM94B7IWcnMFk= +-----END CERTIFICATE----- + +Staat der Nederlanden EV Root CA +================================ +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJOTDEeMBwGA1UE +CgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFhdCBkZXIgTmVkZXJsYW5kZW4g +RVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0yMjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5M +MR4wHAYDVQQKDBVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRl +cmxhbmRlbiBFViBSb290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkk +SzrSM4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nCUiY4iKTW +O0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3dZ//BYY1jTw+bbRcwJu+r +0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46prfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8 +Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13lpJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gV +XJrm0w912fxBmJc+qiXbj5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr +08C+eKxCKFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS/ZbV +0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0XcgOPvZuM5l5Tnrmd +74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH1vI4gnPah1vlPNOePqc7nvQDs/nx +fRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrPpx9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwa +ivsnuL8wbqg7MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u2dfOWBfoqSmu +c0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHSv4ilf0X8rLiltTMMgsT7B/Zq +5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTCwPTxGfARKbalGAKb12NMcIxHowNDXLldRqAN +b/9Zjr7dn3LDWyvfjFvO5QxGbJKyCqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tN +f1zuacpzEPuKqf2evTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi +5Dp6Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIaGl6I6lD4 +WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeLeG9QgkRQP2YGiqtDhFZK +DyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGy +eUN51q1veieQA6TqJIc/2b3Z6fJfUEkc7uzXLg== +-----END CERTIFICATE----- + +IdenTrust Commercial Root CA 1 +============================== +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBKMQswCQYDVQQG +EwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBS +b290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQwMTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzES +MBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENB +IDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ld +hNlT3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU+ehcCuz/ +mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gpS0l4PJNgiCL8mdo2yMKi +1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1bVoE/c40yiTcdCMbXTMTEl3EASX2MN0C +XZ/g1Ue9tOsbobtJSdifWwLziuQkkORiT0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl +3ZBWzvurpWCdxJ35UrCLvYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzy +NeVJSQjKVsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZKdHzV +WYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHTc+XvvqDtMwt0viAg +xGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hvl7yTmvmcEpB4eoCHFddydJxVdHix +uuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5NiGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMC +AQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZI +hvcNAQELBQADggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwtLRvM7Kqas6pg +ghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93nAbowacYXVKV7cndJZ5t+qnt +ozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmV +YjzlVYA211QC//G5Xc7UI2/YRYRKW2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUX +feu+h1sXIFRRk0pTAwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/ro +kTLql1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG4iZZRHUe +2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZmUlO+KWA2yUPHGNiiskz +Z2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7R +cGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +IdenTrust Public Sector Root CA 1 +================================= +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQG +EwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3Rv +ciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcNMzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJV +UzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBS +b290IENBIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTy +P4o7ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGyRBb06tD6 +Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlSbdsHyo+1W/CD80/HLaXI +rcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF/YTLNiCBWS2ab21ISGHKTN9T0a9SvESf +qy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoS +mJxZZoY+rfGwyj4GD3vwEUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFn +ol57plzy9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9VGxyh +LrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ2fjXctscvG29ZV/v +iDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsVWaFHVCkugyhfHMKiq3IXAAaOReyL +4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gDW/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8B +Af8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMw +DQYJKoZIhvcNAQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHVDRDtfULAj+7A +mgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9TaDKQGXSc3z1i9kKlT/YPyNt +GtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8GlwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFt +m6/n6J91eEyrRjuazr8FGF1NFTwWmhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMx +NRF4eKLg6TCMf4DfWN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4 +Mhn5+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJtshquDDI +ajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhAGaQdp/lLQzfcaFpPz+vC +ZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ +3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +Entrust Root Certification Authority - G2 +========================================= +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMCVVMxFjAUBgNV +BAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVy +bXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ug +b25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIw +HhcNMDkwNzA3MTcyNTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoT +DUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMx +OTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP +/vaCeb9zYQYKpSfYs1/TRU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXz +HHfV1IWNcCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hWwcKU +s/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1U1+cPvQXLOZprE4y +TGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0jaWvYkxN4FisZDQSA/i2jZRjJKRx +AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ6 +0B7vfec7aVHUbI2fkBJmqzANBgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5Z +iXMRrEPR9RP/jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v1fN2D807iDgi +nWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4RnAuknZoh8/CbCzB428Hch0P+ +vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmHVHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xO +e4pIb4tF9g== +-----END CERTIFICATE----- + +Entrust Root Certification Authority - EC1 +========================================== +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkGA1UEBhMCVVMx +FjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVn +YWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEzMDEGA1UEAxMqRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRUMxMB4XDTEyMTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYw +FAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2Fs +LXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQg +dXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt +IEVDMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHy +AsWfoPZb1YsGGYZPUxBtByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef +9eNi1KlHBz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVCR98crlOZF7ZvHH3h +vxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nXhTcGtXsI/esni0qU+eH6p44mCOh8 +kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +CFCA EV ROOT +============ +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJDTjEwMC4GA1UE +CgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNB +IEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkxMjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEw +MC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQD +DAxDRkNBIEVWIFJPT1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnV +BU03sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpLTIpTUnrD +7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5/ZOkVIBMUtRSqy5J35DN +uF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp7hZZLDRJGqgG16iI0gNyejLi6mhNbiyW +ZXvKWfry4t3uMCz7zEasxGPrb382KzRzEpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7 +xzbh72fROdOXW3NiGUgthxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9f +py25IGvPa931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqotaK8K +gWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNgTnYGmE69g60dWIol +hdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfVPKPtl8MeNPo4+QgO48BdK4PRVmrJ +tqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hvcWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAf +BgNVHSMEGDAWgBTj/i39KNALtbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB +/wQEAwIBBjAdBgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObTej/tUxPQ4i9q +ecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdLjOztUmCypAbqTuv0axn96/Ua +4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBSESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sG +E5uPhnEFtC+NiWYzKXZUmhH4J/qyP5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfX +BDrDMlI1Dlb4pd19xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjn +aH9dCi77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN5mydLIhy +PDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe/v5WOaHIz16eGWRGENoX +kbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+ZAAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3C +ekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 +==================================================== +-----BEGIN CERTIFICATE----- +MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UEBhMCVFIxDzAN +BgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxnaSDEsGxldGnFn2ltIHZlIEJp +bGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1Qg +RWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAw +ODA3MDFaFw0yMzA0MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0w +SwYDVQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnE +n2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRp +ZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEApCUZ4WWe60ghUEoI5RHwWrom/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537 +jVJp45wnEFPzpALFp/kRGml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1m +ep5Fimh34khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z5UNP +9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0hO8EuPbJbKoCPrZV +4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QIDAQABo0IwQDAdBgNVHQ4EFgQUVpkH +HtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI +hvcNAQELBQADggEBAJ5FdnsXSDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPo +BP5yCccLqh0lVX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq +URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nfpeYVhDfwwvJl +lpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CFYv4HAqGEVka+lgqaE9chTLd8 +B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW+qtB4Uu2NQvAmxU= +-----END CERTIFICATE----- + +Certinomis - Root CA +==================== +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjETMBEGA1UEChMK +Q2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAbBgNVBAMTFENlcnRpbm9taXMg +LSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMzMTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIx +EzARBgNVBAoTCkNlcnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRD +ZXJ0aW5vbWlzIC0gUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQos +P5L2fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJflLieY6pOo +d5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQVWZUKxkd8aRi5pwP5ynap +z8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDFTKWrteoB4owuZH9kb/2jJZOLyKIOSY00 +8B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09x +RLWtwHkziOC/7aOgFLScCbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE +6OXWk6RiwsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJwx3t +FvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SGm/lg0h9tkQPTYKbV +PZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4F2iw4lNVYC2vPsKD2NkJK/DAZNuH +i5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZngWVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGj +YzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I +6tNxIqSSaHh02TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/0KGRHCwPT5iV +WVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWwF6YSjNRieOpWauwK0kDDPAUw +Pk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZSg081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAX +lCOotQqSD7J6wWAsOMwaplv/8gzjqh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJ +y29SWwNyhlCVCNSNh4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9 +Iff/ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8Vbtaw5Bng +DwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwjY/M50n92Uaf0yKHxDHYi +I0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nM +cyrDflOR1m749fPH0FFNjkulW+YZFzvWgQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVr +hkIGuUE= +-----END CERTIFICATE----- + +OISTE WISeKey Global Root GB CA +=============================== +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBtMQswCQYDVQQG +EwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAw +MzJaFw0zOTEyMDExNTEwMzFaMG0xCzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEds +b2JhbCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3HEokKtaX +scriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGxWuR51jIjK+FTzJlFXHtP +rby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk +9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNku7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4o +Qnc/nSMbsrY9gBQHTC5P99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvg +GUpuuy9rM2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZI +hvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrghcViXfa43FK8+5/ea4n32cZiZBKpD +dHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0 +VQreUGdNZtGn//3ZwLWoo4rOZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEui +HZeeevJuQHHfaPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +SZAFIR ROOT CA2 +=============== +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQELBQAwUTELMAkG +A1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6ZW5pb3dhIFMuQS4xGDAWBgNV +BAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkwNzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJ +BgNVBAYTAlBMMSgwJgYDVQQKDB9LcmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYD +VQQDDA9TWkFGSVIgUk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5Q +qEvNQLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT3PSQ1hNK +DJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw3gAeqDRHu5rr/gsUvTaE +2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr63fE9biCloBK0TXC5ztdyO4mTp4CEHCdJ +ckm1/zuVnsHMyAHs6A6KCpbns6aH5db5BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwi +ieDhZNRnvDF5YTy7ykHNXGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0P +AQH/BAQDAgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsFAAOC +AQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw8PRBEew/R40/cof5 +O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOGnXkZ7/e7DDWQw4rtTw/1zBLZpD67 +oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCPoky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul +4+vJhaAlIDf7js4MNIThPIGyd05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6 ++/NNIxuZMzSgLvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +Certum Trusted Network CA 2 +=========================== +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCBgDELMAkGA1UE +BhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMuQS4xJzAlBgNVBAsTHkNlcnR1 +bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIGA1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29y +ayBDQSAyMCIYDzIwMTExMDA2MDgzOTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQ +TDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENB +IDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWADGSdhhuWZGc/IjoedQF9 +7/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+o +CgCXhVqqndwpyeI1B+twTUrWwbNWuKFBOJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40b +Rr5HMNUuctHFY9rnY3lEfktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2p +uTRZCr+ESv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1mo130 +GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02isx7QBlrd9pPPV3WZ +9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOWOZV7bIBaTxNyxtd9KXpEulKkKtVB +Rgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgezTv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pye +hizKV/Ma5ciSixqClnrDvFASadgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vM +BhBgu4M1t15n3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZI +hvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQF/xlhMcQSZDe28cmk4gmb3DW +Al45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTfCVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuA +L55MYIR4PSFk1vtBHxgP58l1cb29XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMo +clm2q8KMZiYcdywmdjWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tM +pkT/WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jbAoJnwTnb +w3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksqP/ujmv5zMnHCnsZy4Ypo +J/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Kob7a6bINDd82Kkhehnlt4Fj1F4jNy3eFm +ypnTycUm/Q1oBEauttmbjL4ZvrHG8hnjXALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLX +is7VmFxWlgPF7ncGNf/P5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7 +zAYspsbiDrW5viSP +-----END CERTIFICATE----- + +Hellenic Academic and Research Institutions RootCA 2015 +======================================================= +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1IxDzANBgNVBAcT +BkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0 +aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgUm9vdENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAx +MTIxWjCBpjELMAkGA1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMg +QWNhZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNV +BAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9vdENBIDIw +MTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDC+Kk/G4n8PDwEXT2QNrCROnk8Zlrv +bTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+eh +iGsxr/CL0BgzuNtFajT0AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+ +6PAQZe104S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06CojXd +FPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV9Cz82XBST3i4vTwr +i5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrDgfgXy5I2XdGj2HUb4Ysn6npIQf1F +GQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2 +fu/Z8VFRfS0myGlZYeCsargqNhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9mu +iNX6hME6wGkoLfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVdctA4GGqd83EkVAswDQYJKoZI +hvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0IXtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+ +D1hYc2Ryx+hFjtyp8iY/xnmMsVMIM4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrM +d/K4kPFox/la/vot9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+y +d+2VZ5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/eaj8GsGsVn +82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnhX9izjFk0WaSrT2y7Hxjb +davYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQl033DlZdwJVqwjbDG2jJ9SrcR5q+ss7F +Jej6A7na+RZukYT1HCjI/CbM1xyQVqdfbzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVt +J94Cj8rDtSvK6evIIVM4pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGa +JI7ZjnHKe7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0vm9q +p/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +Hellenic Academic and Research Institutions ECC RootCA 2015 +=========================================================== +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzANBgNVBAcTBkF0 +aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9u +cyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgRUNDIFJvb3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEw +MzcxMlowgaoxCzAJBgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmlj +IEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUQwQgYD +VQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIEVDQyBSb290 +Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKgQehLgoRc4vgxEZmGZE4JJS+dQS8KrjVP +dJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJajq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoK +Vlp8aQuqgAkkbH7BRqNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0O +BBYEFLQiC4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaeplSTA +GiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7SofTUwJCA3sS61kFyjn +dc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +Certplus Root CA G1 +=================== +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUAMD4xCzAJBgNV +BAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBsdXMgUm9vdCBDQSBHMTAe +Fw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhD +ZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHN +r49aiZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt6kuJPKNx +Qv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP0FG7Yn2ksYyy/yARujVj +BYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTv +LRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDEEW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2 +z4QTd28n6v+WZxcIbekN1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc +4nBvCGrch2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCTmehd +4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV4EJQeIQEQWGw9CEj +jy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPOWftwenMGE9nTdDckQQoRb5fc5+R+ +ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0G +A1UdDgQWBBSowcCbkahDFXxdBie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHY +lwuBsTANBgkqhkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7/SMNkPX0XtPG +YX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BSS7CTKtQ+FjPlnsZlFT5kOwQ/ +2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F +6ALEUz65noe8zDUa3qHpimOHZR4RKttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilX +CNQ314cnrUlZp5GrRHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWe +tUNy6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEVV/xuZDDC +VRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5g4VCXA9DO2pJNdWY9BW/ ++mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl++O/QmueD6i9a5jc2NvLi6Td11n0bt3+ +qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +Certplus Root CA G2 +=================== +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4xCzAJBgNVBAYT +AkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBsdXMgUm9vdCBDQSBHMjAeFw0x +NDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0 +cGx1czEcMBoGA1UEAwwTQ2VydHBsdXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IA +BM0PW1aC3/BFGtat93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uN +Am8xIk0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMB8GA1Ud +IwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqGSM49BAMDA2gAMGUCMHD+sAvZ94OX7PNV +HdTcswYO/jOYnYs5kGuUIe22113WTNchp+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjl +vPl5adytRSv3tjFzzAalU5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +OpenTrust Root CA G1 +==================== +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUAMEAxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5UcnVzdCBSb290IENBIEcx +MB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAwMFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoM +CU9wZW5UcnVzdDEdMBsGA1UEAwwUT3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7fa +Yp6bwiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX/uMftk87 +ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR077F9jAHiOH3BX2pfJLKO +YheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGPuY4zbGneWK2gDqdkVBFpRGZPTBKnjix9 +xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLxp2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO +9z0M+Yo0FMT7MzUj8czxKselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq +3ywgsNw2TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+WG+Oi +n6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPwvFEVVJSmdz7QdFG9 +URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYYEQRVzXR7z2FwefR7LFxckvzluFqr +TJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUl0YhVyE12jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/Px +N3DlCPaTKbYwDQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kfgLMtMrpkZ2Cv +uVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbSFXJfLkur1J1juONI5f6ELlgK +n0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLh +X4SPgPL0DTatdrOjteFkdjpY3H1PXlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80 +nR14SohWZ25g/4/Ii+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcm +GS3tTAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L9109S5zvE/ +bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/KyPu1svf0OnWZzsD2097+o +4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJAwSQiumPv+i2tCqjI40cHLI5kqiPAlxA +OXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj1oxx +-----END CERTIFICATE----- + +OpenTrust Root CA G2 +==================== +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUAMEAxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5UcnVzdCBSb290IENBIEcy +MB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoM +CU9wZW5UcnVzdDEdMBsGA1UEAwwUT3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+ +Ntmh/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78eCbY2albz +4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/61UWY0jUJ9gNDlP7ZvyCV +eYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fEFY8ElggGQgT4hNYdvJGmQr5J1WqIP7wt +UdGejeBSzFfdNTVY27SPJIjki9/ca1TSgSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz +3GIZ38i1MH/1PCZ1Eb3XG7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj +3CzMpSZyYhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaHvGOz +9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4t/bQWVyJ98LVtZR0 +0dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/gh7PU3+06yzbXfZqfUAkBXKJOAGT +y3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUajn6QiL35okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59 +M4PLuG53hq8wDQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0nXGEL8pZ0keI +mUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qTRmTFAHneIWv2V6CG1wZy7HBG +S4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpTwm+bREx50B1ws9efAvSyB7DH5fitIw6mVskp +EndI2S9G/Tvw/HRwkqWOOAgfZDC2t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ +6e18CL13zSdkzJTaTkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97kr +gCf2o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU3jg9CcCo +SmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eAiN1nE28daCSLT7d0geX0 +YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14fWKGVyasvc0rQLW6aWQ9VGHgtPFGml4vm +u7JwqkwR3v98KzfUetF3NI/n+UL3PIEMS1IK +-----END CERTIFICATE----- + +OpenTrust Root CA G3 +==================== +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5UcnVzdCBSb290IENBIEczMB4X +DTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9w +ZW5UcnVzdDEdMBsGA1UEAwwUT3BlblRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAARK7liuTcpm3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5B +ta1doYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAf +BgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAKBggqhkjOPQQDAwNpADBmAjEAj6jcnboM +BBf6Fek9LykBl7+BFjNAk2z8+e2AcG+qj9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta +3U1fJAuwACEl74+nBCZx4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +ISRG Root X1 +============ +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAwTzELMAkGA1UE +BhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2VhcmNoIEdyb3VwMRUwEwYDVQQD +EwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQG +EwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMT +DElTUkcgUm9vdCBYMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54r +Vygch77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+0TM8ukj1 +3Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6UA5/TR5d8mUgjU+g4rk8K +b4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sWT8KOEUt+zwvo/7V3LvSye0rgTBIlDHCN +Aymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyHB5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ +4Q7e2RCOFvu396j3x+UCB5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf +1b0SHzUvKBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWnOlFu +hjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTnjh8BCNAw1FtxNrQH +usEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbwqHyGO0aoSCqI3Haadr8faqU9GY/r +OPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CIrU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY +9umbbjANBgkqhkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ3BebYhtF8GaV +0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KKNFtY2PwByVS5uCbMiogziUwt +hDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJw +TdwJx4nLCgdNbOhdjsnvzqvHu7UrTkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nx +e5AW0wdeRlN8NwdCjNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZA +JzVcoyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq4RgqsahD +YVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPAmRGunUHBcnWEvgJBQl9n +JEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57demyPxgcYxn/eR44/KJ4EBs+lVDR3veyJ +m+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +AC RAIZ FNMT-RCM +================ +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsxCzAJBgNVBAYT +AkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBGTk1ULVJDTTAeFw0wODEw +MjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJD +TTEZMBcGA1UECwwQQUMgUkFJWiBGTk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBALpxgHpMhm5/yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcf +qQgfBBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAzWHFctPVr +btQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxFtBDXaEAUwED653cXeuYL +j2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z374jNUUeAlz+taibmSXaXvMiwzn15Cou +08YfxGyqxRxqAQVKL9LFwag0Jl1mpdICIfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mw +WsXmo8RZZUc1g16p6DULmbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnT +tOmlcYF7wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peSMKGJ +47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2ZSysV4999AeU14EC +ll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMetUqIJ5G+GR4of6ygnXYMgrwTJbFaa +i0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FPd9xf3E6Jobd2Sn9R2gzL+HYJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1o +dHRwOi8vd3d3LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1RXxlDPiyN8+s +D8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYMLVN0V2Ue1bLdI4E7pWYjJ2cJ +j+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrT +Qfv6MooqtyuGC2mDOL7Nii4LcK2NJpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW ++YJF1DngoABd15jmfZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7 +Ixjp6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp1txyM/1d +8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B9kiABdcPUXmsEKvU7ANm +5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wokRqEIr9baRRmW1FMdW4R58MD3R++Lj8UG +rp1MYp3/RgT408m2ECVAdf4WqslKYIYvuu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +Amazon Root CA 1 +================ +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsFADA5MQswCQYD +VQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24gUm9vdCBDQSAxMB4XDTE1 +MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTELMAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpv +bjEZMBcGA1UEAxMQQW1hem9uIFJvb3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBALJ4gHHKeNXjca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgH +FzZM9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qwIFAGbHrQ +gLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6VOujw5H5SNz/0egwLX0t +dHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L93FcXmn/6pUCyziKrlA4b9v7LWIbxcce +VOF34GfID5yHI9Y/QCB/IIDEgEw+OyQmjgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3 +DQEBCwUAA4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDIU5PM +CCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUsN+gDS63pYaACbvXy +8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vvo/ufQJVtMVT8QtPHRh8jrdkPSHCa +2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2 +xJNDd2ZhwLnoQdeXeGADbkpyrqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +Amazon Root CA 2 +================ +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwFADA5MQswCQYD +VQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24gUm9vdCBDQSAyMB4XDTE1 +MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpv +bjEZMBcGA1UEAxMQQW1hem9uIFJvb3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBAK2Wny2cSkxKgXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4 +kHbZW0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg1dKmSYXp +N+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K8nu+NQWpEjTj82R0Yiw9 +AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvd +fLC6HM783k81ds8P+HgfajZRRidhW+mez/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAEx +kv8LV/SasrlX6avvDXbR8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSS +btqDT6ZjmUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz7Mt0 +Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6+XUyo05f7O0oYtlN +c/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI0u1ufm8/0i2BWSlmy5A5lREedCf+ +3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSw +DPBMMPQFWAJI/TPlUq9LhONmUjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oA +A7CXDpO8Wqj2LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kSk5Nrp+gvU5LE +YFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl7uxMMne0nxrpS10gxdr9HIcW +xkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygmbtmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQ +gj9sAq+uEjonljYE1x2igGOpm/HlurR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbW +aQbLU8uz/mtBzUF+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoV +Yh63n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE76KlXIx3 +KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H9jVlpNMKVv/1F2Rs76gi +JUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT4PsJYGw= +-----END CERTIFICATE----- + +Amazon Root CA 3 +================ +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5MQswCQYDVQQG +EwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24gUm9vdCBDQSAzMB4XDTE1MDUy +NjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZ +MBcGA1UEAxMQQW1hem9uIFJvb3QgQ0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZB +f8ANm+gBG1bG8lKlui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjr +Zt6jQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSrttvXBp43 +rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkrBqWTrBqYaGFy+uGh0Psc +eGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteMYyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +Amazon Root CA 4 +================ +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5MQswCQYDVQQG +EwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24gUm9vdCBDQSA0MB4XDTE1MDUy +NjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZ +MBcGA1UEAxMQQW1hem9uIFJvb3QgQ0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN +/sGKe0uoe0ZLY7Bi9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri +83BkM6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WBMAoGCCqGSM49BAMDA2gA +MGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlwCkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1 +AE47xDqUEpHJWEadIRNyp4iciuRMStuW1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +LuxTrust Global Root 2 +====================== +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQELBQAwRjELMAkG +A1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNVBAMMFkx1eFRydXN0IEdsb2Jh +bCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUwMzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEW +MBQGA1UECgwNTHV4VHJ1c3QgUy5BLjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wm +Kb3FibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTemhfY7RBi2 +xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1EMShduxq3sVs35a0VkBC +wGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsnXpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm +1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkm +FRseTJIpgp7VkoGSQXAZ96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niF +wpN6cj5mj5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4gDEa/ +a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+8kPREd8vZS9kzl8U +ubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2jX5t/Lax5Gw5CMZdjpPuKadUiDTSQ +MC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmHhFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB +/zBCBgNVHSAEOzA5MDcGByuBKwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5 +Lmx1eHRydXN0Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQELBQADggIBAGoZ +FO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9BzZAcg4atmpZ1gDlaCDdLnIN +H2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTOjFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW +7MM3LGVYvlcAGvI1+ut7MV3CwRI9loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIu +ZY+kt9J/Z93I055cqqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWA +VWe+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/JEAdemrR +TxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKrezrnK+T+Tb/mjuuqlPpmt +/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQfLSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc +7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31I +iyBMz2TWuJdGsE7RKlY6oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 +============================================= +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIxGDAWBgNVBAcT +D0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxpbXNlbCB2ZSBUZWtub2xvamlr +IEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0wKwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24g +TWVya2V6aSAtIEthbXUgU00xNjA0BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRp +ZmlrYXNpIC0gU3VydW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYD +VQQGEwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXllIEJpbGlt +c2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklUQUsxLTArBgNVBAsTJEth +bXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBTTTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11 +IFNNIFNTTCBLb2sgU2VydGlmaWthc2kgLSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAr3UwM6q7a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y8 +6Ij5iySrLqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INrN3wc +wv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2XYacQuFWQfw4tJzh0 +3+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/iSIzL+aFCr2lqBs23tPcLG07xxO9 +WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4fAJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQU +ZT/HiobGPN08VFw1+DrtUgxHV8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJ +KoZIhvcNAQELBQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPfIPP54+M638yc +lNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4lzwDGrpDxpa5RXI4s6ehlj2R +e37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0j +q5Rm+K37DwhuJi1/FwcJsoz7UMCflo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +GDCA TrustAUTH R5 ROOT +====================== +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UEBhMCQ04xMjAw +BgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8wHQYDVQQD +DBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVow +YjELMAkGA1UEBhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJjDp6L3TQs +AlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBjTnnEt1u9ol2x8kECK62p +OqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+uKU49tm7srsHwJ5uu4/Ts765/94Y9cnrr +pftZTqfrlYwiOXnhLQiPzLyRuEH3FMEjqcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ +9Cy5WmYqsBebnh52nUpmMUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQ +xXABZG12ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloPzgsM +R6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3GkL30SgLdTMEZeS1SZ +D2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeCjGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4 +oR24qoAATILnsn8JuLwwoC8N9VKejveSswoAHQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx +9hoh49pwBiFYFIeFd3mqgnkCAwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlR +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZmDRd9FBUb1Ov9 +H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5COmSdI31R9KrO9b7eGZONn35 +6ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ryL3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd ++PwyvzeG5LuOmCd+uh8W4XAR8gPfJWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQ +HtZa37dG/OaG+svgIHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBD +F8Io2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV09tL7ECQ +8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQXR4EzzffHqhmsYzmIGrv +/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrqT8p+ck0LcIymSLumoRT2+1hEmRSuqguT +aaApJUqlyyvdimYHFngVV3Eb7PVHhPOeMTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +TrustCor RootCert CA-1 +====================== +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYDVQQGEwJQQTEP +MA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEkMCIGA1UECgwbVHJ1c3RDb3Ig +U3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkxHzAdBgNVBAMMFlRydXN0Q29yIFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkx +MjMxMTcyMzE2WjCBpDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFu +YW1hIENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUGA1UECwwe +VHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZUcnVzdENvciBSb290Q2Vy +dCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv463leLCJhJrMxnHQFgKq1mq +jQCj/IDHUHuO1CAmujIS2CNUSSUQIpidRtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4 +pQa81QBeCQryJ3pS/C3Vseq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0 +JEsq1pme9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CVEY4h +gLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorWhnAbJN7+KIor0Gqw +/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/DeOxCbeKyKsZn3MzUOcwHwYDVR0j +BBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwDQYJKoZIhvcNAQELBQADggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5 +mDo4Nvu7Zp5I/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZyonnMlo2HD6C +qFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djtsL1Ac59v2Z3kf9YKVmgenFK+P +3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdNzl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +TrustCor RootCert CA-2 +====================== +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNVBAYTAlBBMQ8w +DQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQwIgYDVQQKDBtUcnVzdENvciBT +eXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRydXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0 +eTEfMB0GA1UEAwwWVHJ1c3RDb3IgUm9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEy +MzExNzI2MzlaMIGkMQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5h +bWEgQ2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29yIFJvb3RDZXJ0 +IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCnIG7CKqJiJJWQdsg4foDSq8Gb +ZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9Nk +RvRUqdw6VC0xK5mC8tkq1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1 +oYxOdqHp2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nKDOOb +XUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hapeaz6LMvYHL1cEksr1 +/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF3wP+TfSvPd9cW436cOGlfifHhi5q +jxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQP +eSghYA2FFn3XVDjxklb9tTNMg9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+Ctg +rKAmrhQhJ8Z3mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAdBgNVHQ4EFgQU +2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6UnrybPZx9mCAZ5YwwYrIwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/h +Osh80QA9z+LqBrWyOrsGS2h60COXdKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnp +kpfbsEZC89NiqpX+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv +2wnL/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RXCI/hOWB3 +S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYaZH9bDTMJBzN7Bj8RpFxw +PIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dv +DDqPys/cA8GiCcjl/YBeyGBCARsaU1q7N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYU +RpFHmygk71dSTlxCnKr3Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANE +xdqtvArBAs8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp5KeX +RKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu1uwJ +-----END CERTIFICATE----- + +TrustCor ECA-1 +============== +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYDVQQGEwJQQTEP +MA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEkMCIGA1UECgwbVHJ1c3RDb3Ig +U3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkxFzAVBgNVBAMMDlRydXN0Q29yIEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3Mjgw +N1owgZwxCzAJBgNVBAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5 +MSQwIgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRydXN0Q29y +IENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3IgRUNBLTEwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb3w9U73NjKYKtR8aja+3+XzP4Q1HpGjOR +MRegdMTUpwHmspI+ap3tDvl0mEDTPwOABoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23 +xFUfJ3zSCNV2HykVh0A53ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmc +p0yJF4OuowReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/wZ0+ +fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZFZtS6mFjBAgMBAAGj +YzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAfBgNVHSMEGDAWgBREnkj1zG1I1KBL +f/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF +AAOCAQEABT41XBVwm8nHc2FvcivUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u +/ukZMjgDfxT2AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50soIipX1TH0Xs +J5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BIWJZpTdwHjFGTot+fDz2LYLSC +jaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1WitJ/X5g== +-----END CERTIFICATE----- + +SSL.com Root Certification Authority RSA +======================================== +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UEBhMCVVMxDjAM +BgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9TU0wgQ29ycG9yYXRpb24x +MTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYw +MjEyMTczOTM5WhcNNDEwMjEyMTczOTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMx +EDAOBgNVBAcMB0hvdXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NM +LmNvbSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2RxFdHaxh3a3by/ZPkPQ/C +Fp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aXqhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8 +P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcCC52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/ge +oeOy3ZExqysdBP+lSgQ36YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkp +k8zruFvh/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrFYD3Z +fBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93EJNyAKoFBbZQ+yODJ +gUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVcUS4cK38acijnALXRdMbX5J+tB5O2 +UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi8 +1xtZPCvM8hnIk2snYxnP/Okm+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4s +bE6x/c+cCbqiM+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGVcpNxJK1ok1iOMq8bs3AD/CUr +dIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBcHadm47GUBwwyOabqG7B52B2ccETjit3E+ZUf +ijhDPwGFpUenPUayvOUiaPd7nNgsPgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAsl +u1OJD7OAUN5F7kR/q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjq +erQ0cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jra6x+3uxj +MxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90IH37hVZkLId6Tngr75qNJ +vTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/YK9f1JmzJBjSWFupwWRoyeXkLtoh/D1JI +Pb9s2KJELtFOt3JY04kTlf5Eq/jXixtunLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406y +wKBjYZC6VWg3dGq2ktufoYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NI +WuuA8ShYIc2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +SSL.com Root Certification Authority ECC +======================================== +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMCVVMxDjAMBgNV +BAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9TU0wgQ29ycG9yYXRpb24xMTAv +BgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEy +MTgxNDAzWhcNNDEwMjEyMTgxNDAzWjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAO +BgNVBAcMB0hvdXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuBBAAiA2IA +BEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI7Z4INcgn64mMU1jrYor+ +8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPgCemB+vNH06NjMGEwHQYDVR0OBBYEFILR +hXMw5zUE044CkvvlpNHEIejNMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTT +jgKS++Wk0cQh6M0wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCW +e+0F+S8Tkdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+gA0z +5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +SSL.com EV Root Certification Authority RSA R2 +============================================== +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNVBAYTAlVTMQ4w +DAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9u +MTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MB4XDTE3MDUzMTE4MTQzN1oXDTQyMDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQI +DAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYD +VQQDDC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvqM0fNTPl9fb69LT3w23jh +hqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssufOePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7w +cXHswxzpY6IXFJ3vG2fThVUCAtZJycxa4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTO +Zw+oz12WGQvE43LrrdF9HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+ +B6KjBSYRaZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcAb9Zh +CBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQGp8hLH94t2S42Oim +9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQVPWKchjgGAGYS5Fl2WlPAApiiECto +RHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMOpgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+Slm +JuwgUHfbSguPvuUCYHBBXtSuUDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48 ++qvWBkofZ6aYMBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa49QaAJadz20Zp +qJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBWs47LCp1Jjr+kxJG7ZhcFUZh1 +++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nx +Y/hoLVUE0fKNsKTPvDxeH3jnpaAgcLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2G +guDKBAdRUNf/ktUM79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDz +OFSz/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXtll9ldDz7 +CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEmKf7GUmG6sXP/wwyc5Wxq +lD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKKQbNmC1r7fSOl8hqw/96bg5Qu0T/fkreR +rwU7ZcegbLHNYhLDkBvjJc40vG93drEQw/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1 +hlMYegouCRw2n5H9gooiS9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX +9hwJ1C07mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +SSL.com EV Root Certification Authority ECC +=========================================== +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMCVVMxDjAMBgNV +BAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9TU0wgQ29ycG9yYXRpb24xNDAy +BgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYw +MjEyMTgxNTIzWhcNNDEwMjEyMTgxNTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMx +EDAOBgNVBAcMB0hvdXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NM +LmNvbSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMAVIbc/R/fALhBYlzccBYy +3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1KthkuWnBaBu2+8KGwytAJKaNjMGEwHQYDVR0O +BBYEFFvKXuXe0oGqzagtZFG22XKbl+ZPMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe +5d7SgarNqC1kUbbZcpuX5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJ +N+vp1RPZytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZgh5Mm +m7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- +""" \ No newline at end of file diff --git a/conans/client/rest/conan_requester.py b/conans/client/rest/conan_requester.py new file mode 100644 index 000000000..49b8bfe5c --- /dev/null +++ b/conans/client/rest/conan_requester.py @@ -0,0 +1,68 @@ +import fnmatch +import os + +from conans.util.files import save + + +class ConanRequester(object): + + def __init__(self, requester, client_cache): + self.proxies = client_cache.conan_config.proxies or {} + self._no_proxy_match = [el.strip() for el in + self.proxies.pop("no_proxy_match", "").split(",")] + + # Retrocompatibility with deprecated no_proxy + # Account for the requests NO_PROXY env variable, not defined as a proxy like http= + no_proxy = self.proxies.pop("no_proxy", None) + if no_proxy: + os.environ["NO_PROXY"] = no_proxy + + self._requester = requester + self._client_cache = client_cache + + if not os.path.exists(self._client_cache.cacert_path): + from conans.client.rest.cacert import cacert + save(self._client_cache.cacert_path, cacert) + + if not os.path.exists(client_cache.client_cert_path): + self._client_certificates = None + else: + if os.path.exists(client_cache.client_cert_key_path): + # Requests can accept a tuple with cert and key, or just an string with a + # file having both + self._client_certificates = (client_cache.client_cert_path, + client_cache.client_cert_key_path) + else: + self._client_certificates = client_cache.client_cert_path + + def _should_skip_proxy(self, url): + + for entry in self._no_proxy_match: + if fnmatch.fnmatch(url, entry): + return True + + return False + + def _add_kwargs(self, url, kwargs): + if kwargs.get("verify", None) is True: + kwargs["verify"] = self._client_cache.cacert_path + else: + kwargs["verify"] = False + kwargs["cert"] = self._client_certificates + if self.proxies: + if not self._should_skip_proxy(url): + kwargs["proxies"] = self.proxies + return kwargs + + def get(self, url, **kwargs): + return self._requester.get(url, **self._add_kwargs(url, kwargs)) + + def put(self, url, **kwargs): + return self._requester.put(url, **self._add_kwargs(url, kwargs)) + + def delete(self, url, **kwargs): + return self._requester.delete(url, **self._add_kwargs(url, kwargs)) + + def post(self, url, **kwargs): + return self._requester.post(url, **self._add_kwargs(url, kwargs)) + diff --git a/conans/client/rest/rest_client.py b/conans/client/rest/rest_client.py index df21c11b0..69cb49fad 100644 --- a/conans/client/rest/rest_client.py +++ b/conans/client/rest/rest_client.py @@ -6,7 +6,7 @@ import json from conans.paths import CONAN_MANIFEST, CONANINFO import time from conans.client.rest.differ import diff_snapshots -from conans.util.files import decode_text, md5sum +from conans.util.files import decode_text, md5sum, save import os from conans.model.manifest import FileTreeManifest from conans.client.rest.uploader_downloader import Uploader, Downloader @@ -76,24 +76,10 @@ class RestApiClient(object): self.custom_headers = {} # Can set custom headers to each request self._output = output self.requester = requester - self._verify_ssl = True + # Remote manager will set it to True or False dynamically depending on the remote + self.verify_ssl = True self._put_headers = put_headers - @property - def verify_ssl(self): - from conans.client.rest import cacert - if self._verify_ssl: - # Necessary for pyinstaller, because it doesn't copy the cacert. - # It should not be necessary anymore the own conan.io certificate (fixed in server) - return cacert.file_path - else: - return False - - @verify_ssl.setter - def verify_ssl(self, check): - assert(isinstance(check, bool)) - self._verify_ssl = check - @property def auth(self): return JWTAuth(self.token) diff --git a/conans/client/rest/version_checker.py b/conans/client/rest/version_checker.py index c8346c9a2..c3e95885d 100644 --- a/conans/client/rest/version_checker.py +++ b/conans/client/rest/version_checker.py @@ -14,32 +14,29 @@ class VersionCheckerRequester(object): self.client_version = client_version self.min_server_compatible_version = min_server_compatible_version - def get(self, url, auth=None, headers=None, verify=None, stream=None): - headers = headers or {} - headers['X-Conan-Client-Version'] = str(self.client_version) - ret = self.requester.get(url, auth=auth, headers=headers, verify=verify, stream=stream) + def _add_version_header(self, kwargs): + if "headers" not in kwargs or kwargs["headers"] is None: + kwargs["headers"] = {} + kwargs["headers"]['X-Conan-Client-Version'] = str(self.client_version) + return kwargs + + def get(self, *args, **kwargs): + ret = self.requester.get(*args, **self._add_version_header(kwargs)) self._handle_ret(ret) return ret - def put(self, url, data, headers=None, verify=None, auth=None): - headers = headers or {} - headers['X-Conan-Client-Version'] = str(self.client_version) - ret = self.requester.put(url, data=data, headers=headers, verify=verify, auth=auth) + def put(self, *args, **kwargs): + ret = self.requester.put(*args, **self._add_version_header(kwargs)) self._handle_ret(ret) return ret - def delete(self, url, auth, headers, verify=None): - headers = headers or {} - headers['X-Conan-Client-Version'] = str(self.client_version) - ret = self.requester.delete(url, auth=auth, headers=headers, verify=verify) + def delete(self, *args, **kwargs): + ret = self.requester.delete(*args, **self._add_version_header(kwargs)) self._handle_ret(ret) return ret - def post(self, url, auth=None, headers=None, verify=None, stream=None, data=None, json=None): - headers = headers or {} - headers['X-Conan-Client-Version'] = str(self.client_version) - ret = self.requester.post(url, auth=auth, headers=headers, - verify=verify, stream=stream, data=data, json=json) + def post(self, *args, **kwargs): + ret = self.requester.post(*args, **self._add_version_header(kwargs)) self._handle_ret(ret) return ret diff --git a/conans/client/tools/net.py b/conans/client/tools/net.py index 90cf3a6b3..235fc69b5 100644 --- a/conans/client/tools/net.py +++ b/conans/client/tools/net.py @@ -48,10 +48,6 @@ def ftp_download(ip, filename, login='', password=''): def download(url, filename, verify=True, out=None, retry=2, retry_wait=5, overwrite=False, auth=None, headers=None): out = out or ConanOutput(sys.stdout, True) - if verify: - # We check the certificate using a list of known verifiers - import conans.client.rest.cacert as cacert - verify = cacert.file_path downloader = Downloader(_global_requester, out, verify=verify) downloader.download(url, filename, retry=retry, retry_wait=retry_wait, overwrite=overwrite, auth=auth, headers=headers) diff --git a/conans/client/tools/oss.py b/conans/client/tools/oss.py index 322424c49..cd7aa79b3 100644 --- a/conans/client/tools/oss.py +++ b/conans/client/tools/oss.py @@ -34,10 +34,16 @@ def cpu_count(): def detected_architecture(): # FIXME: Very weak check but not very common to run conan in other architectures - if "64" in platform.machine(): + machine = platform.machine() + if "64" in machine: return "x86_64" - elif "86" in platform.machine(): + elif "86" in machine: return "x86" + elif "armv8" in machine: + return "armv8" + elif "armv7" in machine: + return "armv7" + return None # DETECT OS, VERSION AND DISTRIBUTIONS diff --git a/conans/paths.py b/conans/paths.py index 1e8719966..0980e818d 100644 --- a/conans/paths.py +++ b/conans/paths.py @@ -160,3 +160,4 @@ class SimplePaths(object): p = normpath(join(self.conan(package_reference.conan), PACKAGES_FOLDER, package_reference.package_id)) return path_shortener(p, short_paths) + diff --git a/conans/tools.py b/conans/tools.py index 3289293a8..20fa1c1e6 100644 --- a/conans/tools.py +++ b/conans/tools.py @@ -2,8 +2,8 @@ from __future__ import print_function import requests +# noinspection PyUnresolvedReferences from conans.client.tools import * -from conans.client.output import ConanOutput # noinspection PyUnresolvedReferences from conans.util.env_reader import get_env # noinspection PyUnresolvedReferences @@ -27,4 +27,6 @@ def set_global_instances(the_output, the_requester): _net._global_requester = the_requester -set_global_instances(ConanOutput(sys.stdout), requests) +# Assign a default, will be overwritten in the Factory of the ConanAPI +out = ConanOutput(sys.stdout) +set_global_instances(out, requests)
Conan running on ARM device: detected arch issues When running conan in an ARM device the `detected_architecture` is not detecting the arm, eventually, the main problem was that it was considering cross-building and adjusting a wrong triplet in Autotools causing the build to fail.
conan-io/conan
diff --git a/conans/test/build_helpers/autotools_configure_test.py b/conans/test/build_helpers/autotools_configure_test.py index 63965a59a..83eced2eb 100644 --- a/conans/test/build_helpers/autotools_configure_test.py +++ b/conans/test/build_helpers/autotools_configure_test.py @@ -387,9 +387,11 @@ class AutoToolsConfigureTest(unittest.TestCase): self.assertEquals(be.vars["CPPFLAGS"], "MyCppFlag") def cross_build_flags_test(self): - def get_values(this_os, this_arch, setting_os, setting_arch): + + def get_values(this_os, this_arch, setting_os, setting_arch, compiler=None): settings = MockSettings({"arch": setting_arch, - "os": setting_os}) + "os": setting_os, + "compiler": compiler}) conanfile = MockConanfile(settings) conanfile.settings = settings be = AutoToolsBuildEnvironment(conanfile) @@ -415,7 +417,17 @@ class AutoToolsConfigureTest(unittest.TestCase): build, host, target = get_values("Linux", "x86_64", "Linux", "x86") self.assertEquals(build, "x86_64-linux-gnu") - self.assertEquals(host, "i686-linux-gnu") + self.assertEquals(host, "x86-linux-gnu") + self.assertFalse(target) + + build, host, target = get_values("Linux", "x86_64", "Windows", "x86", compiler="gcc") + self.assertEquals(build, "x86_64-linux-gnu") + self.assertEquals(host, "i686-w64-mingw32") + self.assertFalse(target) + + build, host, target = get_values("Linux", "x86_64", "Windows", "x86", compiler="Visual Studio") + self.assertEquals(build, "x86_64-linux-gnu") + self.assertEquals(host, "i686-windows-msvc") # Not very common but exists sometimes self.assertFalse(target) build, host, target = get_values("Linux", "x86_64", "Linux", "armv7hf") @@ -454,12 +466,12 @@ class AutoToolsConfigureTest(unittest.TestCase): self.assertEquals(build, "x86_64-linux-gnu") self.assertEquals(host, "arm-linux-androideabi") - build, host, target = get_values("Linux", "x86_64", "Windows", "x86") - self.assertEquals(build, "x86_64-w64-mingw32") + build, host, target = get_values("Linux", "x86_64", "Windows", "x86", compiler="gcc") + self.assertEquals(build, "x86_64-linux-gnu") self.assertEquals(host, "i686-w64-mingw32") - build, host, target = get_values("Linux", "x86_64", "Windows", "x86_64") - self.assertEquals(build, "x86_64-w64-mingw32") + build, host, target = get_values("Linux", "x86_64", "Windows", "x86_64", compiler="gcc") + self.assertEquals(build, "x86_64-linux-gnu") self.assertEquals(host, "x86_64-w64-mingw32") build, host, target = get_values("Windows", "x86_64", "Windows", "x86_64") @@ -472,19 +484,14 @@ class AutoToolsConfigureTest(unittest.TestCase): self.assertFalse(host) self.assertFalse(target) - build, host, target = get_values("Windows", "x86_64", "Windows", "x86") + build, host, target = get_values("Windows", "x86_64", "Windows", "x86", compiler="gcc") self.assertEquals(build, "x86_64-w64-mingw32") self.assertEquals(host, "i686-w64-mingw32") self.assertFalse(target) - build, host, target = get_values("Windows", "x86_64", "Linux", "armv7hf") - self.assertFalse(build) - self.assertFalse(host) - self.assertFalse(target) - - build, host, target = get_values("Windows", "x86_64", "Linux", "x86_64") - self.assertFalse(build) - self.assertFalse(host) + build, host, target = get_values("Windows", "x86_64", "Linux", "armv7hf", compiler="gcc") + self.assertEquals(build, "x86_64-w64-mingw32") + self.assertEquals(host, "arm-linux-gnueabihf") self.assertFalse(target) build, host, target = get_values("Darwin", "x86_64", "Android", "armv7hf") @@ -504,9 +511,9 @@ class AutoToolsConfigureTest(unittest.TestCase): self.assertEquals(build, "x86_64-apple-darwin") self.assertEquals(host, "arm-apple-darwin") - build, host, target = get_values("Darwin", "x86_64", "tvOS", "arm64") + build, host, target = get_values("Darwin", "x86_64", "tvOS", "armv8") self.assertEquals(build, "x86_64-apple-darwin") - self.assertEquals(host, "arm-apple-darwin") + self.assertEquals(host, "aarch64-apple-darwin") def test_pkg_config_paths(self): if platform.system() == "Windows": @@ -557,10 +564,9 @@ class HelloConan(ConanFile): ab.configure(target="i686-apple-darwin") self.assertEquals(runner.command_called, "./configure --target=i686-apple-darwin") - def test_make_targets(self): runner = RunnerMock() - conanfile = MockConanfile(MockSettings({}),None,runner) + conanfile = MockConanfile(MockSettings({}), None, runner) ab = AutoToolsBuildEnvironment(conanfile) ab.configure() diff --git a/conans/test/command/conan_get_test.py b/conans/test/command/conan_get_test.py index d1262ca6a..08c710e26 100644 --- a/conans/test/command/conan_get_test.py +++ b/conans/test/command/conan_get_test.py @@ -92,7 +92,8 @@ class HelloConan(ConanFile): self.assertIn(self.conanfile, self.client.user_io.out) # List package dir - self.client.run('get Hello0/0.1@lasote/channel "." -p 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9 --raw -r default') + self.client.run('get Hello0/0.1@lasote/channel "." ' + ' -p 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9 --raw -r default') self.assertEquals("conan_package.tgz\nconaninfo.txt\nconanmanifest.txt\n", self.client.user_io.out) diff --git a/conans/test/command/config_install_test.py b/conans/test/command/config_install_test.py index af7db654a..defa29508 100644 --- a/conans/test/command/config_install_test.py +++ b/conans/test/command/config_install_test.py @@ -88,6 +88,12 @@ Other/1.2@user/channel conan-center save(os.path.join(self.client.client_cache.profiles_path, "default"), "#default profile empty") save(os.path.join(self.client.client_cache.profiles_path, "linux"), "#empty linux profile") + self.old_env = dict(os.environ) + + def tearDown(self): + os.environ.clear() + os.environ.update(self.old_env) + def _create_profile_folder(self, folder=None): folder = folder or temp_folder(path_with_spaces=False) save_files(folder, {"settings.yml": settings_yml, @@ -275,7 +281,7 @@ class Pkg(ConanFile): self._create_zip(filename) def download_verify_true(obj, url, filename, **kwargs): # @UnusedVariable - self.assertTrue(obj.verify.endswith("cacert.pem")) + self.assertTrue(obj.verify) self._create_zip(filename) with patch.object(Downloader, 'download', new=download_verify_false): diff --git a/conans/test/functional/client_certs_test.py b/conans/test/functional/client_certs_test.py new file mode 100644 index 000000000..a571e2ecf --- /dev/null +++ b/conans/test/functional/client_certs_test.py @@ -0,0 +1,35 @@ +import os +import unittest + +from conans import tools +from conans.test.utils.tools import TestClient, TestServer + + +class ClientCertsTest(unittest.TestCase): + + def pic_client_certs_test(self): + + class MyRequester(object): + + def __init__(*args, **kwargs): + pass + + def get(self, _, **kwargs): + return kwargs.get("cert", None) + + client = TestClient(requester_class=MyRequester) + self.assertIsNone(client.requester.get("url")) + + tools.save(client.client_cache.client_cert_path, "Fake cert") + client.init_dynamic_vars() + + self.assertEquals(client.requester.get("url"), client.client_cache.client_cert_path) + + tools.save(client.client_cache.client_cert_path, "Fake cert") + tools.save(client.client_cache.client_cert_key_path, "Fake key") + client.init_dynamic_vars() + self.assertEquals(client.requester.get("url"), (client.client_cache.client_cert_path, + client.client_cache.client_cert_key_path)) + + # assert that the cacert file is created + self.assertTrue(os.path.exists(client.client_cache.cacert_path)) diff --git a/conans/test/functional/proxies_conf_test.py b/conans/test/functional/proxies_conf_test.py index 8f2beaa78..c1b27c56d 100644 --- a/conans/test/functional/proxies_conf_test.py +++ b/conans/test/functional/proxies_conf_test.py @@ -23,7 +23,39 @@ no_proxy=http://someurl,http://otherurl.com http=http:/conan.url """ save(client.client_cache.conan_conf_path, conf) + client.client_cache.invalidate() requester = get_basic_requester(client.client_cache) - self.assertEqual(requester.proxies, {"https": None, - "http": "http:/conan.url"}) + + def verify_proxies(url, **kwargs): + self.assertEquals(kwargs["proxies"], {"https": None, "http": "http:/conan.url"}) + return "mocked ok!" + + requester._requester.get = verify_proxies self.assertEqual(os.environ["NO_PROXY"], "http://someurl,http://otherurl.com") + + self.assertEquals(requester.get("MyUrl"), "mocked ok!") + + def new_proxy_exclude_test(self): + + class MyRequester(object): + + def __init__(*args, **kwargs): + pass + + def get(self, _, **kwargs): + return "excluded!" if "proxies" not in kwargs else "not excluded!" + + client = TestClient(requester_class=MyRequester) + conf = """ +[proxies] +https=None +no_proxy_match=MyExcludedUrl*, *otherexcluded_one* +http=http://conan.url + """ + save(client.client_cache.conan_conf_path, conf) + client.init_dynamic_vars() + + self.assertEquals(client.requester.get("MyUrl"), "not excluded!") + self.assertEquals(client.requester.get("**otherexcluded_one***"), "excluded!") + self.assertEquals(client.requester.get("MyExcludedUrl***"), "excluded!") + self.assertEquals(client.requester.get("**MyExcludedUrl***"), "not excluded!") diff --git a/conans/test/functional/verify_ssl_test.py b/conans/test/functional/verify_ssl_test.py index 1eb1bdc48..3cee98082 100644 --- a/conans/test/functional/verify_ssl_test.py +++ b/conans/test/functional/verify_ssl_test.py @@ -13,8 +13,8 @@ class RequesterMockTrue(object): def __init__(self, *args, **kwargs): pass - def get(self, url, auth=None, headers=None, verify=None, stream=None): - assert("cacert.pem" in verify) + def get(self, url, *args, **kwargs): + assert("cacert.pem" in kwargs["verify"]) return resp @@ -23,8 +23,8 @@ class RequesterMockFalse(object): def __init__(self, *args, **kwargs): pass - def get(self, url, auth=None, headers=None, verify=None, stream=None): - assert(verify is False) + def get(self, url, *args, **kwargs): + assert(kwargs["verify"] is False) return resp diff --git a/conans/test/remote/put_properties_test.py b/conans/test/remote/put_properties_test.py index 6c839ec6c..cc09af1bf 100644 --- a/conans/test/remote/put_properties_test.py +++ b/conans/test/remote/put_properties_test.py @@ -24,12 +24,12 @@ class PutPropertiesTest(unittest.TestCase): wanted_vars = {"MyHeader1": "MyHeaderValue1;MyHeaderValue2", "Other": "Value"} class RequesterCheckHeaders(TestRequester): - def put(self, url, data, headers=None, verify=None, auth=None): + def put(self, url, **kwargs): for name, value in wanted_vars.items(): - value1 = headers[name] + value1 = kwargs["headers"][name] if value1 != value: raise Exception() - return super(RequesterCheckHeaders, self).put(url, data, headers, verify, auth) + return super(RequesterCheckHeaders, self).put(url, **kwargs) self.client = TestClient(requester_class=RequesterCheckHeaders, servers=self.servers, users={"default": [("lasote", "mypass")]}) diff --git a/conans/test/remote/rest_api_test.py b/conans/test/remote/rest_api_test.py index d77975995..270f9de17 100644 --- a/conans/test/remote/rest_api_test.py +++ b/conans/test/remote/rest_api_test.py @@ -41,7 +41,6 @@ class RestApiTest(unittest.TestCase): token = cls.api.authenticate("private_user", "private_pass") cls.api.token = token - @classmethod def tearDownClass(cls): cls.server.stop() diff --git a/conans/test/util/build_sln_command_test.py b/conans/test/util/build_sln_command_test.py index 2db28390b..0eebc83dc 100644 --- a/conans/test/util/build_sln_command_test.py +++ b/conans/test/util/build_sln_command_test.py @@ -3,6 +3,7 @@ import unittest + from conans import tools from conans.tools import build_sln_command, cpu_count from conans.errors import ConanException @@ -17,6 +18,8 @@ import os @attr('visual_studio') class BuildSLNCommandTest(unittest.TestCase): + + def no_configuration_test(self): dummy = """GlobalSection EndGlobalSection diff --git a/conans/test/utils/conanfile.py b/conans/test/utils/conanfile.py index 6285cdea3..aa42e7c3e 100644 --- a/conans/test/utils/conanfile.py +++ b/conans/test/utils/conanfile.py @@ -1,3 +1,5 @@ +from conans.test.utils.tools import TestBufferConanOutput + class MockSettings(object): @@ -33,6 +35,8 @@ class MockConanfile(object): self.runner = runner self.options = options or MockOptions({}) self.generators = [] + self.output = TestBufferConanOutput() + self.should_configure = True self.should_build = True self.should_install = True diff --git a/conans/test/utils/tools.py b/conans/test/utils/tools.py index 035e99488..f5186f17d 100644 --- a/conans/test/utils/tools.py +++ b/conans/test/utils/tools.py @@ -21,24 +21,20 @@ from conans.client.conan_api import migrate_and_get_client_cache, Conan from conans.client.conan_command_output import CommandOutputer from conans.client.conf import MIN_SERVER_COMPATIBLE_VERSION from conans.client.output import ConanOutput -from conans.client.remote_manager import RemoteManager from conans.client.remote_registry import RemoteRegistry -from conans.client.rest.auth_manager import ConanApiAuthManager -from conans.client.rest.rest_client import RestApiClient +from conans.client.rest.conan_requester import ConanRequester from conans.client.rest.uploader_downloader import IterableToFileAdapter -from conans.client.rest.version_checker import VersionCheckerRequester -from conans.client.store.localdb import LocalDB from conans.client.userio import UserIO from conans.model.version import Version -from conans.search.search import DiskSearchManager, DiskSearchAdapter +from conans.search.search import DiskSearchManager from conans.test.server.utils.server_launcher import (TESTING_REMOTE_PRIVATE_USER, TESTING_REMOTE_PRIVATE_PASS, TestServerLauncher) from conans.test.utils.runner import TestRunner from conans.test.utils.test_files import temp_folder +from conans.tools import set_global_instances from conans.util.files import save_files, save, mkdir from conans.util.log import logger -from conans.tools import set_global_instances class TestingResponse(object): @@ -90,7 +86,8 @@ class TestRequester(object): def __init__(self, test_servers): self.test_servers = test_servers - def _get_url_path(self, url): + @staticmethod + def _get_url_path(url): # Remove schema from url _, _, path, query, _ = urlsplit(url) url = urlunsplit(("", "", path, query, "")) @@ -103,68 +100,80 @@ class TestRequester(object): raise Exception("Testing error: Not remote found") - def get(self, url, auth=None, headers=None, verify=None, stream=None): - headers = headers or {} - app, url = self._prepare_call(url, headers, auth) + def get(self, url, **kwargs): + app, url = self._prepare_call(url, kwargs) if app: - response = app.get(url, headers=headers, expect_errors=True) + response = app.get(url, **kwargs) return TestingResponse(response) else: - return requests.get(url, headers=headers) + return requests.get(url, **kwargs) - def put(self, url, data, headers=None, verify=None, auth=None): - headers = headers or {} - app, url = self._prepare_call(url, headers, auth=auth) + def put(self, url, **kwargs): + app, url = self._prepare_call(url, kwargs) if app: - if isinstance(data, IterableToFileAdapter): - data_accum = b"" - for tmp in data: - data_accum += tmp - data = data_accum - response = app.put(url, data, expect_errors=True, headers=headers) + response = app.put(url, **kwargs) return TestingResponse(response) else: - return requests.put(url, data=data.read()) + return requests.put(url, **kwargs) - def delete(self, url, auth, headers, verify=None): - headers = headers or {} - app, url = self._prepare_call(url, headers, auth) + def delete(self, url, **kwargs): + app, url = self._prepare_call(url, kwargs) if app: - response = app.delete(url, "", headers=headers, expect_errors=True) + response = app.delete(url, **kwargs) return TestingResponse(response) else: - return requests.delete(url, headers=headers) + return requests.delete(url, **kwargs) - def post(self, url, auth=None, headers=None, verify=None, stream=None, data=None, json=None): - headers = headers or {} - app, url = self._prepare_call(url, headers, auth) + def post(self, url, **kwargs): + app, url = self._prepare_call(url, kwargs) if app: - content_type = None - if json: - import json as JSON - data = JSON.dumps(json) - content_type = "application/json" - response = app.post(url, data, headers=headers, - content_type=content_type, expect_errors=True) + response = app.post(url, **kwargs) return TestingResponse(response) else: - requests.post(url, data=data, json=json) + requests.post(url, **kwargs) - def _prepare_call(self, url, headers, auth): + def _prepare_call(self, url, kwargs): if not url.startswith("http://fake"): # Call to S3 (or external), perform a real request return None, url app = self._get_wsgi_app(url) url = self._get_url_path(url) # Remove http://server.com - self._set_auth_headers(auth, headers) + self._set_auth_headers(kwargs) + + if app: + kwargs["expect_errors"] = True + kwargs.pop("stream", None) + kwargs.pop("verify", None) + kwargs.pop("auth", None) + kwargs.pop("cert", None) + if "data" in kwargs: + if isinstance(kwargs["data"], IterableToFileAdapter): + data_accum = b"" + for tmp in kwargs["data"]: + data_accum += tmp + kwargs["data"] = data_accum + kwargs["params"] = kwargs["data"] + del kwargs["data"] # Parameter in test app is called "params" + if kwargs.get("json"): + # json is a high level parameter of requests, not a generic one + # translate it to data and content_type + import json + kwargs["params"] = json.dumps(kwargs["json"]) + kwargs["content_type"] = "application/json" + kwargs.pop("json", None) + + return app, url - def _set_auth_headers(self, auth, headers): - if auth: + @staticmethod + def _set_auth_headers(kwargs): + if kwargs.get("auth"): mock_request = Mock() mock_request.headers = {} - auth(mock_request) - headers.update(mock_request.headers) + kwargs["auth"](mock_request) + if "headers" not in kwargs: + kwargs["headers"] = {} + kwargs["headers"].update(mock_request.headers) class TestServer(object): @@ -318,8 +327,7 @@ class TestClient(object): self.storage_folder = os.path.join(self.base_folder, ".conan", "data") self.client_cache = ClientCache(self.base_folder, self.storage_folder, TestBufferConanOutput()) - search_adapter = DiskSearchAdapter() - self.search_manager = DiskSearchManager(self.client_cache, search_adapter) + self.search_manager = DiskSearchManager(self.client_cache) self.requester_class = requester_class self.conan_runner = runner @@ -379,25 +387,19 @@ class TestClient(object): real_servers = True if real_servers: - requester = requests + requester = requests.Session() else: if self.requester_class: requester = self.requester_class(self.servers) else: requester = TestRequester(self.servers) - # Verify client version against remotes - self.requester = VersionCheckerRequester(requester, self.client_version, - self.min_server_compatible_version, output) - - put_headers = self.client_cache.read_put_headers() - self.rest_api_client = RestApiClient(output, requester=self.requester, put_headers=put_headers) - # To store user and token - self.localdb = LocalDB(self.client_cache.localdb) - # Wraps RestApiClient to add authentication support (same interface) - auth_manager = ConanApiAuthManager(self.rest_api_client, self.user_io, self.localdb) - # Handle remote connections - self.remote_manager = RemoteManager(self.client_cache, auth_manager, self.user_io.out) + self.requester = ConanRequester(requester, self.client_cache) + + self.localdb, self.rest_api_client, self.remote_manager = Conan.instance_remote_manager( + self.requester, self.client_cache, + self.user_io, self.client_version, + self.min_server_compatible_version) set_global_instances(output, self.requester)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": -1, "issue_text_score": 2, "test_score": -1 }, "num_modified_files": 11 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc", "apt-get install -y pkg-config", "apt-get install -y ninja-build", "apt-get install -y meson" ], "python": "3.6", "reqs_path": [ "conans/requirements.txt", "conans/requirements_osx.txt", "conans/requirements_server.txt", "conans/requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asn1crypto==1.5.1 astroid==1.6.6 attrs==22.2.0 beautifulsoup4==4.12.3 bottle==0.12.25 certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 codecov==2.1.13 colorama==0.3.9 -e git+https://github.com/conan-io/conan.git@8151c4c39a5ffbf42f21a10d586fe88b8f1c8f04#egg=conan coverage==4.2 cryptography==2.1.4 distro==1.1.0 execnet==1.9.0 fasteners==0.19 future==0.16.0 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==1.3.0 ndg-httpsclient==0.4.4 node-semver==0.2.0 nose==1.3.7 packaging==21.3 parameterized==0.8.1 patch==1.16 pbr==6.1.1 pluggy==1.0.0 pluginbase==0.7 py==1.11.0 pyasn==1.5.0b7 pyasn1==0.5.1 pycparser==2.21 Pygments==2.14.0 PyJWT==1.7.1 pylint==1.8.4 pyOpenSSL==17.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 PyYAML==3.12 requests==2.27.1 six==1.17.0 soupsieve==2.3.2.post1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 waitress==2.0.0 WebOb==1.8.9 WebTest==2.0.35 wrapt==1.16.0 zipp==3.6.0
name: conan channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asn1crypto==1.5.1 - astroid==1.6.6 - attrs==22.2.0 - beautifulsoup4==4.12.3 - bottle==0.12.25 - cffi==1.15.1 - charset-normalizer==2.0.12 - codecov==2.1.13 - colorama==0.3.9 - coverage==4.2 - cryptography==2.1.4 - distro==1.1.0 - execnet==1.9.0 - fasteners==0.19 - future==0.16.0 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==1.3.0 - ndg-httpsclient==0.4.4 - node-semver==0.2.0 - nose==1.3.7 - packaging==21.3 - parameterized==0.8.1 - patch==1.16 - pbr==6.1.1 - pluggy==1.0.0 - pluginbase==0.7 - py==1.11.0 - pyasn==1.5.0b7 - pyasn1==0.5.1 - pycparser==2.21 - pygments==2.14.0 - pyjwt==1.7.1 - pylint==1.8.4 - pyopenssl==17.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - pyyaml==3.12 - requests==2.27.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - waitress==2.0.0 - webob==1.8.9 - webtest==2.0.35 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/conan
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_cppstd", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_make_targets", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_mocked_methods", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_previous_env", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_variables", "conans/test/command/config_install_test.py::ConfigInstallTest::test_without_profile_folder", "conans/test/functional/proxies_conf_test.py::ProxiesConfTest::test_requester" ]
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_pkg_config_paths", "conans/test/command/conan_get_test.py::ConanGetTest::test_get_local", "conans/test/command/conan_get_test.py::ConanGetTest::test_get_remote", "conans/test/command/conan_get_test.py::ConanGetTest::test_not_found" ]
[]
[]
MIT License
2,293
[ "conans/client/rest/rest_client.py", "conans/client/client_cache.py", "conans/client/rest/cacert.py", "conans/client/conan_api.py", "conans/tools.py", "conans/client/build/autotools_environment.py", "conans/client/conf/__init__.py", "conans/paths.py", "conans/client/tools/net.py", "conans/client/tools/oss.py", "conans/client/rest/version_checker.py", "conans/client/rest/conan_requester.py" ]
[ "conans/client/rest/rest_client.py", "conans/client/client_cache.py", "conans/client/rest/cacert.py", "conans/client/conan_api.py", "conans/tools.py", "conans/client/build/autotools_environment.py", "conans/client/conf/__init__.py", "conans/paths.py", "conans/client/tools/net.py", "conans/client/tools/oss.py", "conans/client/rest/version_checker.py", "conans/client/rest/conan_requester.py" ]
datosgobar__pydatajson-126
c71794df9f7e570d86394a7af9f038aeb85b64d4
2018-03-13 14:04:35
adb85a7de7dfa073ddf9817a5fe2d125f9ce4e54
diff --git a/docs/MANUAL.md b/docs/MANUAL.md index 39d9678..cd72809 100644 --- a/docs/MANUAL.md +++ b/docs/MANUAL.md @@ -87,13 +87,14 @@ Existen dos métodos, cuyos reportes se incluyen diariamente entre los archivos ### Métodos para federación de datasets - **pydatajson.DataJson.push_dataset_to_ckan()**: Copia la metadata de un dataset y la escribe en un portal de CKAN. -Toma los siguientes parámetros: - - **catalog_id**: El prefijo que va a preceder el id del dataset en el portal destino. +Toma los siguientes parámetros: - **owner_org**: La organización a la que pertence el dataset. Debe encontrarse en el portal de destino. - **dataset_origin_identifier**: Identificador del dataset en el catálogo de origen. - **portal_url**: URL del portal de CKAN de destino. - **apikey**: La apikey de un usuario del portal de destino con los permisos para crear el dataset bajo la organización pasada como parámetro. + - **catalog_id** (opcional, default: None): El prefijo que va a preceder el id del dataset en el portal destino, + separado por un guión bajo. - **demote_superThemes** (opcional, default: True):Si está en true, los ids de los themes del dataset, se escriben como groups de CKAN. - **demote_themes** (opcional, default: True): Si está en true, los labels de los themes del dataset, se escriben como diff --git a/pydatajson/ckan_utils.py b/pydatajson/ckan_utils.py index b915bde..f71570d 100644 --- a/pydatajson/ckan_utils.py +++ b/pydatajson/ckan_utils.py @@ -14,12 +14,12 @@ def append_attribute_to_extra(package, dataset, attribute, serialize=False): package['extras'].append({'key': attribute, 'value': value}) -def map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, +def map_dataset_to_package(dataset, owner_org, theme_taxonomy, catalog_id=None, demote_superThemes=True, demote_themes=True): package = dict() package['extras'] = [] # Obligatorios - package['id'] = catalog_id+'_'+dataset['identifier'] + package['id'] = catalog_id+'_'+dataset['identifier'] if catalog_id else dataset['identifier'] package['name'] = title_to_name(dataset['title'], decode=False) package['title'] = dataset['title'] package['private'] = False @@ -88,12 +88,12 @@ def convert_iso_string_to_utc(date_string): return utc_date_time.isoformat() -def map_distributions_to_resources(distributions, catalog_id): +def map_distributions_to_resources(distributions, catalog_id=None): resources = [] for distribution in distributions: resource = dict() # Obligatorios - resource['id'] = catalog_id + '_' + distribution['identifier'] + resource['id'] = catalog_id + '_' + distribution['identifier'] if catalog_id else distribution['identifier'] resource['name'] = distribution['title'] resource['url'] = distribution['downloadURL'] resource['created'] = convert_iso_string_to_utc(distribution['issued']) diff --git a/pydatajson/federation.py b/pydatajson/federation.py index 7807dfe..2c31623 100644 --- a/pydatajson/federation.py +++ b/pydatajson/federation.py @@ -9,17 +9,17 @@ from .ckan_utils import map_dataset_to_package from .search import get_datasets -def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifier, portal_url, apikey, - demote_superThemes=True, demote_themes=True): +def push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, + catalog_id=None, demote_superThemes=True, demote_themes=True): """Escribe la metadata de un dataset en el portal pasado por parámetro. Args: catalog (DataJson): El catálogo de origen que contiene el dataset. - catalog_id (str): El prefijo con el que va a preceder el id del dataset en catálogo destino. owner_org (str): La organización a la cual pertence el dataset. dataset_origin_identifier (str): El id del dataset que se va a federar. portal_url (str): La URL del portal CKAN de destino. apikey (str): La apikey de un usuario con los permisos que le permitan crear o actualizar el dataset. + catalog_id (str): El prefijo con el que va a preceder el id del dataset en catálogo destino. demote_superThemes(bool): Si está en true, los ids de los super themes del dataset, se propagan como grupo. demote_themes(bool): Si está en true, los labels de los themes del dataset, pasan a ser tags. Sino, se pasan como grupo. @@ -31,7 +31,7 @@ def push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifi ckan_portal = RemoteCKAN(portal_url, apikey=apikey) theme_taxonomy = catalog.themes - package = map_dataset_to_package(dataset, catalog_id, owner_org, theme_taxonomy, + package = map_dataset_to_package(dataset, owner_org, theme_taxonomy, catalog_id, demote_superThemes, demote_themes) # Get license id
Agregar opción para copiar el `identifier` original, sin prependear el `catalog_id` **Contexto** Poder escribir un dataset con su identifier original es útil para utilizar la función no como federador, sino como forma de restaurar los metadatos de un dataset a partir de un backup anterior. **Propuesta** Agregar un argumento opcional `prepend_catalog_identifier = True` que indica si debe o no _prependearse_ el identificador de un catálogo al identificar del dataset que se desea escribir en CKAN.
datosgobar/pydatajson
diff --git a/tests/test_ckan_integration.py b/tests/test_ckan_integration.py index 9b68543..9b76a24 100644 --- a/tests/test_ckan_integration.py +++ b/tests/test_ckan_integration.py @@ -57,8 +57,8 @@ class PushTestCase(unittest.TestCase): catalog_id = title_to_name(catalog['title']) dataset = catalog.datasets[0] dataset_id = dataset['identifier'] - return_id = push_dataset_to_ckan(catalog, catalog_id, "oficina-de-muestra", dataset_id, - self.portal_url, self.apikey) + return_id = push_dataset_to_ckan(catalog, "oficina-de-muestra", dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) self.assertEqual(return_id, catalog_id + '_' + dataset_id) @CKAN_VCR.use_cassette() @@ -66,12 +66,12 @@ class PushTestCase(unittest.TestCase): catalog = self.full_catalog catalog_id = title_to_name(catalog['title']) dataset_id = catalog.datasets[0]['identifier'] - push_dataset_to_ckan(catalog, catalog_id, "oficina-de-muestra", dataset_id, - self.portal_url, self.apikey) + push_dataset_to_ckan(catalog, "oficina-de-muestra", dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) catalog.datasets[0]['description'] = 'updated description' - return_id = push_dataset_to_ckan(catalog, catalog_id, "oficina-de-muestra", dataset_id, - self.portal_url, self.apikey) + return_id = push_dataset_to_ckan(catalog, "oficina-de-muestra", dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) data_dict = {'id': catalog_id + '_' + dataset_id} package = self.portal.call_action('package_show', data_dict=data_dict) @@ -83,21 +83,22 @@ class PushTestCase(unittest.TestCase): catalog_id = 'same-catalog-id' full_dataset = self.full_catalog.datasets[0] full_dataset_id = full_dataset['identifier'] - push_dataset_to_ckan(self.full_catalog, catalog_id, 'oficina-de-muestra', full_dataset_id, - self.portal_url, self.apikey) + push_dataset_to_ckan(self.full_catalog, 'oficina-de-muestra', full_dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) justice_dataset = self.justice_catalog.datasets[0] justice_dataset_id = justice_dataset['identifier'] - push_dataset_to_ckan(self.justice_catalog, catalog_id, 'oficina-de-muestra', justice_dataset_id, - self.portal_url, self.apikey) + push_dataset_to_ckan(self.justice_catalog, 'oficina-de-muestra', justice_dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) # Switch them and update full_dataset['distribution'], justice_dataset['distribution'] = \ justice_dataset['distribution'], full_dataset['distribution'] - full_package_id = push_dataset_to_ckan(self.full_catalog, catalog_id, 'oficina-de-muestra', full_dataset_id, - self.portal_url, self.apikey) - justice_package_id = push_dataset_to_ckan(self.justice_catalog, catalog_id, 'oficina-de-muestra', - justice_dataset_id, self.portal_url, self.apikey) + full_package_id = push_dataset_to_ckan(self.full_catalog,'oficina-de-muestra', full_dataset_id, + self.portal_url, self.apikey, catalog_id=catalog_id,) + justice_package_id = push_dataset_to_ckan(self.justice_catalog, 'oficina-de-muestra', + justice_dataset_id, self.portal_url, + self.apikey, catalog_id=catalog_id,) # Switch them back full_dataset['distribution'], justice_dataset['distribution'] = \ justice_dataset['distribution'], full_dataset['distribution'] diff --git a/tests/test_ckan_utils.py b/tests/test_ckan_utils.py index dca112c..3f20984 100644 --- a/tests/test_ckan_utils.py +++ b/tests/test_ckan_utils.py @@ -22,18 +22,25 @@ class DatasetConversionTestCase(unittest.TestCase): cls.dataset_id = cls.dataset.get('identifier') cls.distributions = cls.dataset['distribution'] + def test_catalog_id_is_prepended_to_dataset_id_if_passed(self): + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + self.assertEqual(self.catalog_id + '_' + self.dataset_id, package['id']) + + def test_dataset_id_is_preserved_if_catlog_id_is_not_passed(self): + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes) + self.assertEqual(self.dataset_id, package['id']) + def test_replicated_plain_attributes_are_corrext(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) plain_replicated_attributes = [('title', 'title'), ('notes', 'description'), ('url', 'landingPage')] for fst, snd in plain_replicated_attributes: self.assertEqual(self.dataset.get(snd), package.get(fst)) self.assertEqual('owner', package['owner_org']) - self.assertEqual(self.catalog_id+'_'+self.dataset_id, package['id']) def test_dataset_nested_replicated_attributes_stay_the_same(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) contact_point_nested = [('maintainer', 'fn'), ('maintainer_email', 'hasEmail')] for fst, snd in contact_point_nested: @@ -44,7 +51,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(self.dataset.get('publisher').get(snd), package.get(fst)) def test_dataset_array_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] try: @@ -67,8 +74,8 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords + theme_labels, tags) def test_themes_are_preserved_if_not_demoted(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', - self.catalog.themes, demote_themes=False) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + catalog_id=self.catalog_id, demote_themes=False) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] themes = self.dataset.get('theme', []) @@ -85,8 +92,8 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords, tags) def test_superThemes_dont_impact_groups_if_not_demoted(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', - self.catalog.themes, demote_superThemes=False) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + catalog_id=self.catalog_id, demote_superThemes=False) groups = [group['name'] for group in package.get('groups', [])] tags = [tag['name'] for tag in package['tags']] keywords = self.dataset.get('keyword', []) @@ -105,8 +112,8 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords + theme_labels, tags) def test_preserve_themes_and_superThemes(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', - self.catalog.themes, False, False) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + self.catalog_id, False, False) groups = [group['name'] for group in package.get('groups', [])] tags = [tag['name'] for tag in package['tags']] keywords = self.dataset.get('keyword', []) @@ -121,7 +128,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords, tags) def test_dataset_extra_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) # extras are included in dataset if package['extras']: for extra in package['extras']: @@ -137,7 +144,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(dataset_value, extra_value) def test_dataset_extra_attributes_are_complete(self): - package = map_dataset_to_package(self.dataset, self.catalog_id, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) # dataset attributes are included in extras extra_attrs = ['issued', 'modified', 'accrualPeriodicity', 'temporal', 'language', 'spatial', 'superTheme'] for key in extra_attrs: @@ -148,8 +155,20 @@ class DatasetConversionTestCase(unittest.TestCase): resulting_dict = {'key': key, 'value': value} self.assertTrue(resulting_dict in package['extras']) + def test_catalog_id_is_prefixed_in_resource_id_if_passed(self): + resources = map_distributions_to_resources(self.distributions, self.catalog_id) + for resource in resources: + distribution = next(x for x in self.dataset['distribution'] if x['title'] == resource['name']) + self.assertEqual(self.catalog_id + '_' + distribution['identifier'], resource['id']) + + def test_resource_id_is_preserved_if_catalog_id_is_not_passed(self): + resources = map_distributions_to_resources(self.distributions) + for resource in resources: + distribution = next(x for x in self.dataset['distribution'] if x['title'] == resource['name']) + self.assertEqual(distribution['identifier'], resource['id']) + def test_resources_replicated_attributes_stay_the_same(self): - resources = map_distributions_to_resources(self.distributions, self.catalog_id+'_'+self.dataset_id) + resources = map_distributions_to_resources(self.distributions, self.catalog_id) for resource in resources: distribution = next(x for x in self.dataset['distribution'] if x['title'] == resource['name']) replicated_attributes = [('url', 'downloadURL'), @@ -163,7 +182,6 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(distribution.get(snd), resource.get(fst)) else: self.assertIsNone(resource.get(fst)) - self.assertEqual(self.catalog_id+'_'+self.dataset_id+'_'+distribution['identifier'], resource['id']) def test_resources_transformed_attributes_are_correct(self): resources = map_distributions_to_resources(self.distributions, self.catalog_id+'_'+self.dataset_id) diff --git a/tests/test_federation.py b/tests/test_federation.py index 0dc245d..e4a1d2e 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -47,8 +47,8 @@ class PushDatasetTestCase(unittest.TestCase): else: return [] mock_portal.return_value.call_action = mock_call_action - res_id = push_dataset_to_ckan(self.catalog, self.catalog_id, 'owner', - self.dataset['identifier'], 'portal', 'key') + res_id = push_dataset_to_ckan(self.catalog, 'owner', self.dataset['identifier'], + 'portal', 'key', catalog_id=self.catalog_id) self.assertEqual(self.catalog_id + '_' + self.dataset_id, res_id) @patch('pydatajson.federation.RemoteCKAN', autospec=True) @@ -61,10 +61,25 @@ class PushDatasetTestCase(unittest.TestCase): else: return [] mock_portal.return_value.call_action = mock_call_action - res_id = push_dataset_to_ckan(self.catalog, self.catalog_id, 'owner', - self.dataset['identifier'], 'portal', 'key') + res_id = push_dataset_to_ckan(self.catalog, 'owner', self.dataset['identifier'], + 'portal', 'key', catalog_id=self.catalog_id) self.assertEqual(self.catalog_id + '_' + self.dataset_id, res_id) + @patch('pydatajson.federation.RemoteCKAN', autospec=True) + def test_dataset_id_is_preserved_if_catalog_id_is_not_passed(self, mock_portal): + def mock_call_action(action, data_dict=None): + if action == 'package_update': + return {'id': data_dict['id']} + if action == 'package_create': + self.fail('should not be called') + else: + return [] + + mock_portal.return_value.call_action = mock_call_action + res_id = push_dataset_to_ckan(self.catalog, 'owner', self.dataset['identifier'], + 'portal', 'key') + self.assertEqual(self.dataset_id, res_id) + @patch('pydatajson.federation.RemoteCKAN', autospec=True) def test_tags_are_passed_correctly(self, mock_portal): themes = self.dataset['theme'] @@ -87,8 +102,8 @@ class PushDatasetTestCase(unittest.TestCase): return [] mock_portal.return_value.call_action = mock_call_action - res_id = push_dataset_to_ckan(self.catalog, self.catalog_id, 'owner', - self.dataset['identifier'], 'portal', 'key') + res_id = push_dataset_to_ckan(self.catalog, 'owner', self.dataset['identifier'], + 'portal', 'key', catalog_id=self.catalog_id) self.assertEqual(self.catalog_id + '_' + self.dataset_id, res_id) @patch('pydatajson.federation.RemoteCKAN', autospec=True) @@ -103,8 +118,8 @@ class PushDatasetTestCase(unittest.TestCase): else: return [] mock_portal.return_value.call_action = mock_call_action - push_dataset_to_ckan(self.catalog, self.catalog_id, 'owner', - self.dataset['identifier'], 'portal', 'key') + push_dataset_to_ckan(self.catalog, 'owner', self.dataset['identifier'], + 'portal', 'key', catalog_id=self.catalog_id) @patch('pydatajson.federation.RemoteCKAN', autospec=True) def test_dataset_without_license_sets_notspecified(self, mock_portal): @@ -119,8 +134,8 @@ class PushDatasetTestCase(unittest.TestCase): return [] mock_portal.return_value.call_action = mock_call_action - push_dataset_to_ckan(self.minimum_catalog, self.minimum_catalog_id, 'owner', - self.minimum_dataset['identifier'], 'portal', 'key') + push_dataset_to_ckan(self.minimum_catalog, 'owner', self.minimum_dataset['identifier'], + 'portal', 'key', catalog_id=self.minimum_catalog_id) class RemoveDatasetTestCase(unittest.TestCase):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "flake8", "pytest" ], "pre_install": [ "mkdir tests/temp" ], "python": "3.6", "reqs_path": [ "requirements.txt", "requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 argh==0.27.2 asn1crypto==1.5.1 attrs==22.2.0 Babel==2.11.0 bumpversion==0.5.3 certifi==2021.5.30 cffi==1.15.1 chardet==3.0.4 ckanapi==4.0 CommonMark==0.5.4 coverage==4.1 cryptography==2.1.4 distlib==0.3.9 docopt==0.6.2 docutils==0.18.1 et-xmlfile==1.1.0 filelock==3.4.1 flake8==2.6.0 idna==2.6 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isodate==0.6.0 jdcal==1.4.1 Jinja2==3.0.3 jsonschema==2.6.0 MarkupSafe==2.0.1 mccabe==0.5.3 multidict==5.2.0 nose==1.3.7 openpyxl==2.4.11 packaging==21.3 pathtools==0.1.2 pkginfo==1.10.0 platformdirs==2.4.0 pluggy==0.13.1 pockets==0.9.1 py==1.11.0 pycodestyle==2.0.0 pycparser==2.21 -e git+https://github.com/datosgobar/pydatajson.git@c71794df9f7e570d86394a7af9f038aeb85b64d4#egg=pydatajson pyflakes==1.2.3 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.6.1 pytz==2025.2 PyYAML==3.11 recommonmark==0.4.0 requests==2.18.4 requests-toolbelt==1.0.0 rfc3987==1.3.7 six==1.11.0 snowballstemmer==2.2.0 Sphinx==1.5.2 sphinx-rtd-theme==0.2.4 sphinxcontrib-napoleon==0.6.1 tomli==1.2.3 tox==2.9.1 tqdm==4.64.1 twine==1.9.1 typing_extensions==4.1.1 unicodecsv==0.14.1 Unidecode==0.4.21 urllib3==1.22 vcrpy==1.11.1 virtualenv==20.17.1 watchdog==0.8.3 wrapt==1.16.0 yarl==1.7.2 zipp==3.6.0
name: pydatajson channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - argh==0.27.2 - asn1crypto==1.5.1 - attrs==22.2.0 - babel==2.11.0 - bumpversion==0.5.3 - cffi==1.15.1 - chardet==3.0.4 - ckanapi==4.0 - commonmark==0.5.4 - coverage==4.1 - cryptography==2.1.4 - distlib==0.3.9 - docopt==0.6.2 - docutils==0.18.1 - et-xmlfile==1.1.0 - filelock==3.4.1 - flake8==2.6.0 - idna==2.6 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isodate==0.6.0 - jdcal==1.4.1 - jinja2==3.0.3 - jsonschema==2.6.0 - markupsafe==2.0.1 - mccabe==0.5.3 - multidict==5.2.0 - nose==1.3.7 - openpyxl==2.4.11 - packaging==21.3 - pathtools==0.1.2 - pkginfo==1.10.0 - platformdirs==2.4.0 - pluggy==0.13.1 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.0.0 - pycparser==2.21 - pyflakes==1.2.3 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.6.1 - pytz==2025.2 - pyyaml==3.11 - recommonmark==0.4.0 - requests==2.18.4 - requests-toolbelt==1.0.0 - rfc3987==1.3.7 - six==1.11.0 - snowballstemmer==2.2.0 - sphinx==1.5.2 - sphinx-rtd-theme==0.2.4 - sphinxcontrib-napoleon==0.6.1 - tomli==1.2.3 - tox==2.9.1 - tqdm==4.64.1 - twine==1.9.1 - typing-extensions==4.1.1 - unicodecsv==0.14.1 - unidecode==0.04.21 - urllib3==1.22 - vcrpy==1.11.1 - virtualenv==20.17.1 - watchdog==0.8.3 - wrapt==1.16.0 - yarl==1.7.2 - zipp==3.6.0 prefix: /opt/conda/envs/pydatajson
[ "tests/test_ckan_integration.py::PushTestCase::test_dataset_is_created_correctly", "tests/test_ckan_integration.py::PushTestCase::test_dataset_is_updated_correctly", "tests/test_ckan_integration.py::PushTestCase::test_resources_swapped_correctly", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_catalog_id_is_prepended_to_dataset_id_if_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_array_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_complete", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_id_is_preserved_if_catlog_id_is_not_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_nested_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_replicated_plain_attributes_are_corrext", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resource_id_is_preserved_if_catalog_id_is_not_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_superThemes_dont_impact_groups_if_not_demoted", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_themes_are_preserved_if_not_demoted", "tests/test_federation.py::PushDatasetTestCase::test_dataset_id_is_preserved_if_catalog_id_is_not_passed", "tests/test_federation.py::PushDatasetTestCase::test_dataset_without_license_sets_notspecified", "tests/test_federation.py::PushDatasetTestCase::test_id_is_created_correctly", "tests/test_federation.py::PushDatasetTestCase::test_id_is_updated_correctly", "tests/test_federation.py::PushDatasetTestCase::test_licenses_are_interpreted_correctly", "tests/test_federation.py::PushDatasetTestCase::test_tags_are_passed_correctly" ]
[]
[ "tests/test_ckan_integration.py::RemoveTestCase::test_empty_query_result", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_filter_out", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_filter_out_and_organization", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_id", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_organization", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_publisher_and_organization", "tests/test_ckan_integration.py::RemoveTestCase::test_remove_dataset_by_title", "tests/test_ckan_integration.py::RemoveTestCase::test_with_no_parametres", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_catalog_id_is_prefixed_in_resource_id_if_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_preserve_themes_and_superThemes", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_extra_attributes_are_created_correctly", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_transformed_attributes_are_correct", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_change_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_microseconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_seconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_timezones_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_timezones_are_handled_correctly", "tests/test_federation.py::RemoveDatasetTestCase::test_empty_search_doesnt_call_purge", "tests/test_federation.py::RemoveDatasetTestCase::test_filter_in_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_filter_in_out_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_query_one_dataset", "tests/test_federation.py::RemoveDatasetTestCase::test_query_over_500_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_remove_through_filters_and_organization" ]
[]
MIT License
2,294
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
[ "pydatajson/ckan_utils.py", "pydatajson/federation.py", "docs/MANUAL.md" ]
cloudant__python-cloudant-365
981731500a3a11a2e49c5894d6946e65cd04b113
2018-03-14 14:36:37
8ef838717cf2de07b82ff0c83e6c538043b854f1
emlaver: Update copyright for `_client_session.py`
diff --git a/CHANGES.md b/CHANGES.md index 27f5b9b..ab805d4 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,7 +1,8 @@ # Unreleased -- [NEW] Moved `create_query_index` and other query related methods to `CouchDatabase` as the `_index`/`_find` API is available in CouchDB 2.x. - [NEW] Added functionality to test if a key is in a database as in `key in db`, overriding dict `__contains__` and checking in the remote database. +- [NEW] Moved `create_query_index` and other query related methods to `CouchDatabase` as the `_index`/`_find` API is available in CouchDB 2.x. +- [NEW] Support IAM authentication in replication documents. - [IMPROVED] Added support for IAM API key in `cloudant_bluemix` method. - [IMPROVED] Updated Travis CI and unit tests to run against CouchDB 2.1.1. diff --git a/src/cloudant/_client_session.py b/src/cloudant/_client_session.py index 93df457..0e8fd38 100644 --- a/src/cloudant/_client_session.py +++ b/src/cloudant/_client_session.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2015, 2017 IBM Corp. All rights reserved. +# Copyright (c) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -195,6 +195,15 @@ class IAMSession(ClientSession): self._token_url = os.environ.get( 'IAM_TOKEN_URL', 'https://iam.bluemix.net/identity/token') + @property + def get_api_key(self): + """ + Get IAM API key. + + :return: IAM API key. + """ + return self._api_key + def login(self): """ Perform IAM cookie based user login. diff --git a/src/cloudant/client.py b/src/cloudant/client.py index 59d25b2..de88112 100755 --- a/src/cloudant/client.py +++ b/src/cloudant/client.py @@ -98,6 +98,15 @@ class CouchDB(dict): if connect_to_couch and self._DATABASE_CLASS == CouchDatabase: self.connect() + @property + def is_iam_authenticated(self): + """ + Show if a client has authenticated using an IAM API key. + + :return: True if client is IAM authenticated. False otherwise. + """ + return self._use_iam + def connect(self): """ Starts up an authentication session for the client using cookie @@ -107,10 +116,12 @@ class CouchDB(dict): self.session_logout() if self.admin_party: + self._use_iam = False self.r_session = ClientSession( timeout=self._timeout ) elif self._use_basic_auth: + self._use_iam = False self.r_session = BasicSession( self._user, self._auth_token, diff --git a/src/cloudant/replicator.py b/src/cloudant/replicator.py index c9b6a40..7f24407 100644 --- a/src/cloudant/replicator.py +++ b/src/cloudant/replicator.py @@ -51,8 +51,7 @@ class Replicator(object): :param str repl_id: Optional replication id. Generated internally if not explicitly set. :param dict user_ctx: Optional user to act as. Composed internally - if not explicitly set and not in CouchDB Admin Party - mode. + if not explicitly set. :param bool create_target: Specifies whether or not to create the target, if it does not already exist. :param bool continuous: If set to True then the replication will be @@ -60,32 +59,50 @@ class Replicator(object): :returns: Replication document as a Document instance """ + if source_db is None: + raise CloudantReplicatorException(101) + + if target_db is None: + raise CloudantReplicatorException(102) data = dict( _id=repl_id if repl_id else str(uuid.uuid4()), **kwargs ) - if source_db is None: - raise CloudantReplicatorException(101) + # replication source + data['source'] = {'url': source_db.database_url} - if not source_db.admin_party: - data['source'].update( - {'headers': {'Authorization': source_db.creds['basic_auth']}} - ) + if source_db.admin_party: + pass # no credentials required + elif source_db.client.is_iam_authenticated: + data['source'].update({'auth': { + 'iam': {'api_key': source_db.client.r_session.get_api_key} + }}) + else: + data['source'].update({'headers': { + 'Authorization': source_db.creds['basic_auth'] + }}) + + # replication target - if target_db is None: - raise CloudantReplicatorException(102) data['target'] = {'url': target_db.database_url} - if not target_db.admin_party: - data['target'].update( - {'headers': {'Authorization': target_db.creds['basic_auth']}} - ) - - if not data.get('user_ctx'): - if (target_db and not target_db.admin_party or - self.database.creds): - data['user_ctx'] = self.database.creds['user_ctx'] + if target_db.admin_party: + pass # no credentials required + elif target_db.client.is_iam_authenticated: + data['target'].update({'auth': { + 'iam': {'api_key': target_db.client.r_session.get_api_key} + }}) + else: + data['target'].update({'headers': { + 'Authorization': target_db.creds['basic_auth'] + }}) + + # add user context delegation + + if not data.get('user_ctx') and self.database.creds and \ + self.database.creds.get('user_ctx'): + data['user_ctx'] = self.database.creds['user_ctx'] return self.database.create_document(data, throw_on_exists=True)
Support IAM authentication in replication documents. _Parent Issue: https://github.com/cloudant/cloudant-sync/issues/31_ Currently `db.creds['basic_auth']` returns `None` when using an IAM client . As a result, all replication documents created using IAM clients _don't_ contain source/target database credentials. _Work Required:_ * Expose the IAM key via the client session ([here](https://github.com/cloudant/python-cloudant/blob/2.7.0/src/cloudant/_client_session.py#L194)). * Rework the [Replicator](https://github.com/cloudant/python-cloudant/blob/2.7.0/src/cloudant/replicator.py#L77-L93) class to correctly inject the IAM key into the replication document (when databases are backed by an IAM client). * Enable replicator tests for IAM clients (currently disabled). * Add more unit tests as appropriate.
cloudant/python-cloudant
diff --git a/tests/unit/replicator_mock_tests.py b/tests/unit/replicator_mock_tests.py new file mode 100644 index 0000000..96589f9 --- /dev/null +++ b/tests/unit/replicator_mock_tests.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python +# Copyright (C) 2018 IBM Corp. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +_replicator_mock_tests_ + +replicator module - Mock unit tests for the Replicator class +""" + +import mock +import unittest + +from cloudant.database import CouchDatabase +from cloudant.replicator import Replicator + +from tests.unit.iam_auth_tests import MOCK_API_KEY + + +class ReplicatorDocumentValidationMockTests(unittest.TestCase): + """ + Replicator document validation tests + """ + + def setUp(self): + self.repl_id = 'rep_test' + + self.server_url = 'http://localhost:5984' + self.user_ctx = { + 'name': 'foo', + 'roles': ['erlanger', 'researcher'] + } + + self.source_db = 'source_db' + self.target_db = 'target_db' + + def setUpClientMocks(self, admin_party=False, iam_api_key=None): + m_client = mock.MagicMock() + type(m_client).server_url = mock.PropertyMock( + return_value=self.server_url) + + type(m_client).admin_party = mock.PropertyMock( + return_value=admin_party) + + iam_authenticated = False + + if iam_api_key is not None: + iam_authenticated = True + + m_session = mock.MagicMock() + type(m_session).get_api_key = mock.PropertyMock( + return_value=iam_api_key) + + type(m_client).r_session = mock.PropertyMock( + return_value=m_session) + + type(m_client).is_iam_authenticated = mock.PropertyMock( + return_value=iam_authenticated) + + return m_client + + def test_using_admin_party_source_and_target(self): + m_admin_party_client = self.setUpClientMocks(admin_party=True) + + m_replicator = mock.MagicMock() + type(m_replicator).creds = mock.PropertyMock(return_value=None) + m_admin_party_client.__getitem__.return_value = m_replicator + + # create source/target databases + src = CouchDatabase(m_admin_party_client, self.source_db) + tgt = CouchDatabase(m_admin_party_client, self.target_db) + + # trigger replication + rep = Replicator(m_admin_party_client) + rep.create_replication(src, tgt, repl_id=self.repl_id) + + kcall = m_replicator.create_document.call_args_list + self.assertEquals(len(kcall), 1) + args, kwargs = kcall[0] + self.assertEquals(len(args), 1) + + expected_doc = { + '_id': self.repl_id, + 'source': {'url': '/'.join((self.server_url, self.source_db))}, + 'target': {'url': '/'.join((self.server_url, self.target_db))} + } + + self.assertDictEqual(args[0], expected_doc) + self.assertTrue(kwargs['throw_on_exists']) + + def test_using_basic_auth_source_and_target(self): + test_basic_auth_header = 'abc' + + m_basic_auth_client = self.setUpClientMocks() + + m_replicator = mock.MagicMock() + m_basic_auth_client.__getitem__.return_value = m_replicator + m_basic_auth_client.basic_auth_str.return_value = test_basic_auth_header + + # create source/target databases + src = CouchDatabase(m_basic_auth_client, self.source_db) + tgt = CouchDatabase(m_basic_auth_client, self.target_db) + + # trigger replication + rep = Replicator(m_basic_auth_client) + rep.create_replication( + src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) + + kcall = m_replicator.create_document.call_args_list + self.assertEquals(len(kcall), 1) + args, kwargs = kcall[0] + self.assertEquals(len(args), 1) + + expected_doc = { + '_id': self.repl_id, + 'user_ctx': self.user_ctx, + 'source': { + 'headers': {'Authorization': test_basic_auth_header}, + 'url': '/'.join((self.server_url, self.source_db)) + }, + 'target': { + 'headers': {'Authorization': test_basic_auth_header}, + 'url': '/'.join((self.server_url, self.target_db)) + } + } + + self.assertDictEqual(args[0], expected_doc) + self.assertTrue(kwargs['throw_on_exists']) + + def test_using_iam_auth_source_and_target(self): + m_iam_auth_client = self.setUpClientMocks(iam_api_key=MOCK_API_KEY) + + m_replicator = mock.MagicMock() + m_iam_auth_client.__getitem__.return_value = m_replicator + + # create source/target databases + src = CouchDatabase(m_iam_auth_client, self.source_db) + tgt = CouchDatabase(m_iam_auth_client, self.target_db) + + # trigger replication + rep = Replicator(m_iam_auth_client) + rep.create_replication( + src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) + + kcall = m_replicator.create_document.call_args_list + self.assertEquals(len(kcall), 1) + args, kwargs = kcall[0] + self.assertEquals(len(args), 1) + + expected_doc = { + '_id': self.repl_id, + 'user_ctx': self.user_ctx, + 'source': { + 'auth': {'iam': {'api_key': MOCK_API_KEY}}, + 'url': '/'.join((self.server_url, self.source_db)) + }, + 'target': { + 'auth': {'iam': {'api_key': MOCK_API_KEY}}, + 'url': '/'.join((self.server_url, self.target_db)) + } + } + + self.assertDictEqual(args[0], expected_doc) + self.assertTrue(kwargs['throw_on_exists']) diff --git a/tests/unit/replicator_tests.py b/tests/unit/replicator_tests.py index cd79a7a..504500b 100644 --- a/tests/unit/replicator_tests.py +++ b/tests/unit/replicator_tests.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2015, 2016, 2017 IBM Corp. All rights reserved. +# Copyright (c) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -161,7 +161,6 @@ class ReplicatorTests(UnitTestDbBase): ) self.replication_ids.append(repl_id['_id']) - @skip_if_not_cookie_auth @flaky(max_runs=3) def test_create_replication(self): """ @@ -180,7 +179,7 @@ class ReplicatorTests(UnitTestDbBase): # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list - if self.client.admin_party: + if self.client.admin_party or self.client.is_iam_authenticated: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) @@ -238,7 +237,7 @@ class ReplicatorTests(UnitTestDbBase): # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list - if self.client.admin_party: + if self.client.admin_party or self.client.is_iam_authenticated: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) @@ -305,7 +304,6 @@ class ReplicatorTests(UnitTestDbBase): match = [repl_id for repl_id in all_repl_ids if repl_id in repl_ids] self.assertEqual(set(repl_ids), set(match)) - @skip_if_not_cookie_auth def test_retrieve_replication_state(self): """ Test that the replication state can be retrieved for a replication @@ -347,7 +345,6 @@ class ReplicatorTests(UnitTestDbBase): ) self.assertIsNone(repl_state) - @skip_if_not_cookie_auth def test_stop_replication(self): """ Test that a replication can be stopped. @@ -359,7 +356,16 @@ class ReplicatorTests(UnitTestDbBase): self.target_db, repl_id ) - self.replicator.stop_replication(repl_id) + max_retry = 3 + while True: + try: + max_retry -= 1 + self.replicator.stop_replication(repl_id) + break + except requests.HTTPError as err: + self.assertEqual(err.response.status_code, 409) + if max_retry == 0: + self.fail('Failed to stop replication: {0}'.format(err)) try: # The .fetch() will fail since the replication has been stopped # and the replication document has been removed from the db. @@ -383,7 +389,6 @@ class ReplicatorTests(UnitTestDbBase): 'Replication with id {} not found.'.format(repl_id) ) - @skip_if_not_cookie_auth def test_follow_replication(self): """ Test that follow_replication(...) properly iterates updated
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 4 }
2.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "mock", "sphinx", "sphinx_rtd_theme", "pylint", "flaky", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 astroid==2.11.7 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 -e git+https://github.com/cloudant/python-cloudant.git@981731500a3a11a2e49c5894d6946e65cd04b113#egg=cloudant dill==0.3.4 docutils==0.18.1 flaky==3.8.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 Jinja2==3.0.3 lazy-object-proxy==1.7.1 MarkupSafe==2.0.1 mccabe==0.7.0 mock==5.2.0 nose==1.3.7 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 pytz==2025.2 requests==2.27.1 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 urllib3==1.26.20 wrapt==1.16.0 zipp==3.6.0
name: python-cloudant channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - astroid==2.11.7 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - dill==0.3.4 - docutils==0.18.1 - flaky==3.8.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - jinja2==3.0.3 - lazy-object-proxy==1.7.1 - markupsafe==2.0.1 - mccabe==0.7.0 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - pytz==2025.2 - requests==2.27.1 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - urllib3==1.26.20 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/python-cloudant
[ "tests/unit/replicator_mock_tests.py::ReplicatorDocumentValidationMockTests::test_using_iam_auth_source_and_target" ]
[]
[ "tests/unit/replicator_mock_tests.py::ReplicatorDocumentValidationMockTests::test_using_admin_party_source_and_target", "tests/unit/replicator_mock_tests.py::ReplicatorDocumentValidationMockTests::test_using_basic_auth_source_and_target", "tests/unit/replicator_tests.py::CloudantReplicatorExceptionTests::test_raise_using_invalid_code", "tests/unit/replicator_tests.py::CloudantReplicatorExceptionTests::test_raise_with_proper_code_and_args", "tests/unit/replicator_tests.py::CloudantReplicatorExceptionTests::test_raise_without_args", "tests/unit/replicator_tests.py::CloudantReplicatorExceptionTests::test_raise_without_code" ]
[]
Apache License 2.0
2,296
[ "src/cloudant/client.py", "src/cloudant/replicator.py", "src/cloudant/_client_session.py", "CHANGES.md" ]
[ "src/cloudant/client.py", "src/cloudant/replicator.py", "src/cloudant/_client_session.py", "CHANGES.md" ]
PlasmaPy__PlasmaPy-306
51072c3dcf32a5210423eb519bcdedb960131105
2018-03-15 12:38:24
24113f1659d809930288374f6b1f95dc573aff47
pep8speaks: Hello @hzxusx! Thanks for submitting your pull request. - In the file [`plasmapy/classes/plasma.py`](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/classes/plasma.py), following are the PEP8 issues : > [Line 99:58](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/classes/plasma.py#L99): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment - In the file [`plasmapy/examples/plot_magnetic_statics.py`](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py), following are the PEP8 issues : > [Line 21:47](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L21): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 21:49](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L21): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 21:78](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L21): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 21:80](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L21): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 26:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L26): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 38:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L38): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 39:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L39): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 51:41](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L51): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 51:43](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L51): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 51:60](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L51): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 51:62](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L51): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 65:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L65): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 66:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L66): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 70:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L70): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 80:41](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L80): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 81:30](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L81): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 81:32](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L81): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 81:59](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L81): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 81:61](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L81): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 88:49](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L88): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 88:51](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L88): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 88:68](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L88): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 88:70](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L88): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 101:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L101): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 102:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L102): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 109:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/examples/plot_magnetic_statics.py#L109): [W391](https://duckduckgo.com/?q=pep8%20W391) blank line at end of file - In the file [`plasmapy/physics/magnetostatics.py`](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py), following are the PEP8 issues : > [Line 2:85](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L2): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 18:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L18): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 28:8](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L28): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 38:47](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L38): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 41:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L41): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 43:46](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L43): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 46:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L46): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 56:100](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L56): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (104 > 99 characters) > [Line 59:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L59): [E305](https://duckduckgo.com/?q=pep8%20E305) expected 2 blank lines after class or function definition, found 1 > [Line 60:61](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L60): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 62:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L62): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 0 > [Line 85:23](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L85): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 96:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L96): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 103:15](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L103): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 126:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L126): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 139:39](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L139): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 140:26](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L140): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 140:41](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L140): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 140:55](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L140): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 145:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L145): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 146:5](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L146): [E301](https://duckduckgo.com/?q=pep8%20E301) expected 1 blank line, found 0 > [Line 152:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L152): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 155:5](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L155): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 156:96](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L156): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 157:84](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L157): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 164:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L164): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 176:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L176): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 180:100](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L180): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (102 > 99 characters) > [Line 185:5](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L185): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (2) > [Line 189:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L189): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 195:15](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L195): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 202:31](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L202): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 206:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L206): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 214:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L214): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 217:71](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L217): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 226:59](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L226): [E225](https://duckduckgo.com/?q=pep8%20E225) missing whitespace around operator > [Line 230:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L230): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 246:47](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L246): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 247:38](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L247): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 247:57](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L247): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 247:71](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L247): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ':' > [Line 255:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L255): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 272:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L272): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 278:100](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L278): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (137 > 99 characters) > [Line 285:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L285): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 293:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L293): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 296:15](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L296): [W291](https://duckduckgo.com/?q=pep8%20W291) trailing whitespace > [Line 297:100](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L297): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (102 > 99 characters) > [Line 309:87](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L309): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 311:38](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L311): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 313:75](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L313): [E261](https://duckduckgo.com/?q=pep8%20E261) at least two spaces before inline comment > [Line 315:100](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L315): [E501](https://duckduckgo.com/?q=pep8%20E501) line too long (106 > 99 characters) > [Line 319:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L319): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 319:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/magnetostatics.py#L319): [W391](https://duckduckgo.com/?q=pep8%20W391) blank line at end of file - In the file [`plasmapy/physics/tests/test_magnetostatics.py`](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py), following are the PEP8 issues : > [Line 16:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L16): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 20:5](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L20): [E301](https://duckduckgo.com/?q=pep8%20E301) expected 1 blank line, found 0 > [Line 28:5](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L28): [E301](https://duckduckgo.com/?q=pep8%20E301) expected 1 blank line, found 0 > [Line 30:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L30): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 39:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L39): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 41:43](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L41): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 41:45](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L41): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 41:62](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L41): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 41:64](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L41): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 42:30](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L42): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ',' > [Line 42:31](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L42): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 43:30](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L43): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ',' > [Line 43:31](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L43): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 53:24](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L53): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 53:26](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L53): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 56:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L56): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 62:24](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L62): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 62:26](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L62): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 65:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L65): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 69:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L69): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 71:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L71): [E303](https://duckduckgo.com/?q=pep8%20E303) too many blank lines (3) > [Line 73:35](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L73): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ',' > [Line 73:36](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L73): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 74:35](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L74): [E203](https://duckduckgo.com/?q=pep8%20E203) whitespace before ',' > [Line 74:36](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L74): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 75:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L75): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 84:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L84): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 86:34](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L86): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 86:36](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L86): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 87:34](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L87): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 87:45](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L87): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 91:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L91): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 93:37](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L93): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 93:39](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L93): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 95:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L95): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 99:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L99): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 106:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L106): [E302](https://duckduckgo.com/?q=pep8%20E302) expected 2 blank lines, found 1 > [Line 108:34](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L108): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 108:36](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L108): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 109:34](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L109): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 109:36](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L109): [E231](https://duckduckgo.com/?q=pep8%20E231) missing whitespace after ',' > [Line 111:14](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L111): [E741](https://duckduckgo.com/?q=pep8%20E741) ambiguous variable name 'I' > [Line 112:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L112): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 121:1](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L121): [W293](https://duckduckgo.com/?q=pep8%20W293) blank line contains whitespace > [Line 126:30](https://github.com/PlasmaPy/PlasmaPy/blob/73683fc97e9fd169cf6a4e1694404d24a03c9b2c/plasmapy/physics/tests/test_magnetostatics.py#L126): [W292](https://duckduckgo.com/?q=pep8%20W292) no newline at end of file PlasmaPy follows the [PEP8 style guide](https://www.python.org/dev/peps/pep-0008/), which may be checked locally using [pycodestyle](http://pycodestyle.pycqa.org/en/latest/intro.html) StanczakDominik: Pep 8 errors. Pep 8 errors. Pep 8 errors. I feel your pain :D I'll be able to review this in about 24h from now. We might want to no merge this before the (soon-ish) 0.1 release, but we can keep it reviewed, tested and mergeable at a moment's notice if you're okay with that! hzxusx: Thanks for your advice @StanczakDominik ! I fixed most of them. Still don't know how to plot with units. StanczakDominik: Yeah, sorry, turned out I couldn't get much done over the weekend :( I'll look at the new changes and go through any notes I left on the previous pass as time allows. In the meantime, [this](http://docs.astropy.org/en/stable/api/astropy.visualization.quantity_support.html?highlight=matplotlib) is what I had intended to link you as to plotting Quantities! I haven't had the opportunity to use it myself yet, but it should work. not7cd: It's over a month since there was any change. Squash it and merge before there will be to many conflicts to resolve. StanczakDominik: All right, I'm gonna merge this now! I think I'll squash and rebase on top of master, though, as there's a whole lot of exploratory commits here. not7cd: It's still open codecov[bot]: # [Codecov](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=h1) Report > Merging [#306](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=desc) into [master](https://codecov.io/gh/PlasmaPy/PlasmaPy/commit/87515b4a27052001e11213cc97341dbd9a2f3590?src=pr&el=desc) will **increase** coverage by `0.05%`. > The diff coverage is `94.57%`. [![Impacted file tree graph](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306/graphs/tree.svg?width=650&src=pr&token=LzZ5mnXVad&height=150)](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #306 +/- ## ========================================= + Coverage 92.85% 92.9% +0.05% ========================================= Files 67 69 +2 Lines 6434 6655 +221 ========================================= + Hits 5974 6183 +209 - Misses 460 472 +12 ``` | [Impacted Files](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [plasmapy/physics/\_\_init\_\_.py](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306/diff?src=pr&el=tree#diff-cGxhc21hcHkvcGh5c2ljcy9fX2luaXRfXy5weQ==) | `100% <100%> (ø)` | :arrow_up: | | [plasmapy/physics/tests/test\_magnetostatics.py](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306/diff?src=pr&el=tree#diff-cGxhc21hcHkvcGh5c2ljcy90ZXN0cy90ZXN0X21hZ25ldG9zdGF0aWNzLnB5) | `100% <100%> (ø)` | | | [plasmapy/classes/sources/plasma3d.py](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306/diff?src=pr&el=tree#diff-cGxhc21hcHkvY2xhc3Nlcy9zb3VyY2VzL3BsYXNtYTNkLnB5) | `90.47% <42.85%> (-9.53%)` | :arrow_down: | | [plasmapy/physics/magnetostatics.py](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306/diff?src=pr&el=tree#diff-cGxhc21hcHkvcGh5c2ljcy9tYWduZXRvc3RhdGljcy5weQ==) | `93.79% <93.79%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=footer). Last update [87515b4...5f44f14](https://codecov.io/gh/PlasmaPy/PlasmaPy/pull/306?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). hzxusx: I passed my defense yesterday and finally have time to do this. It's now ready to merge except 8 lines more coverage misses. Is it strictly request? If it is, how should I write a test for __repr__ function properly? ritiek: @hzxusx You should be able to add some by calling `repr()` with passing the object as argument and simply asserting its return string. not7cd: @StanczakDominik, do I understand that you commited this PR manualy? StanczakDominik: @hzxusx congratulations on your defense! :tada: The test for `__repr__` would be nice but I can probably get that done here while looking at the code. @not7cd if I remember correctly, I started and got into some trouble with my new Linux config and rebasing and figured I'd do it once I got things setup better. Which I happen to do, now. :> StanczakDominik: Hey @hzxusx, I managed to clean up the tree a bit! I solved a few nasty rebase conflicts, but I squashed your changes to one commit in the process. If you don't mind, I'd like to push it to your branch and add a few of my own fixes. If you're fine with that, then once I do (not done yet, I'm just working locally for now) I'm going to ask you to do: ```bash git fetch upstream # upstream being the name of your PlasmaPy/PlasmaPy remote instead of your own fork git checkout WireClass git reset upstream/WireClass # review the changes via # git diff ``` StanczakDominik: Current proposed state of the branch after squashing can be seen at https://github.com/StanczakDominik/PlasmaPy/tree/WireClass StanczakDominik: I still might have further review comments if we squash this tree, this is just all I'm able to do as of today. StanczakDominik: Okay, I'm going to assume I can push my changes here, given that @not7cd wanted to start basing some work off of this.
diff --git a/docs/physics/index.rst b/docs/physics/index.rst index ecf12287..342c187e 100644 --- a/docs/physics/index.rst +++ b/docs/physics/index.rst @@ -24,6 +24,7 @@ We thus have: distribution quantum relativity + magnetostatics The subpackage makes heavy use of `astropy.units.Quantity` for handling conversions between different unit systems. This is especially important for electron volts, commonly used in plasma physics to denote temperature, although diff --git a/docs/physics/magnetostatics.rst b/docs/physics/magnetostatics.rst new file mode 100644 index 00000000..4682bce5 --- /dev/null +++ b/docs/physics/magnetostatics.rst @@ -0,0 +1,11 @@ +.. _magnetostatics: + +************************************************** +Magnetostatics (`plasmapy.physics.magnetostatics`) +************************************************** + +.. currentmodule:: plasmapy.physics.magnetostatics + +.. automodapi:: plasmapy.physics.magnetostatics + :no-heading: + diff --git a/plasmapy/classes/sources/plasma3d.py b/plasmapy/classes/sources/plasma3d.py index a2789cb1..24fcab63 100644 --- a/plasmapy/classes/sources/plasma3d.py +++ b/plasmapy/classes/sources/plasma3d.py @@ -5,6 +5,9 @@ import numpy as np import astropy.units as u +import itertools + +from plasmapy.physics.magnetostatics import MagnetoStatics from plasmapy.constants import (m_p, m_e, @@ -115,3 +118,13 @@ def is_datasource_for(cls, **kwargs): else: match = False return match + + def add_magnetostatic(self, *mstats: MagnetoStatics): + # for each MagnetoStatic argument + for mstat in mstats: + # loop over 3D-index (ix,iy,iz) + for point_index in itertools.product(*[list(range(n)) for n in self.domain_shape]): + # get coordinate + p = self.grid[(slice(None),)+point_index] # function as [:, *index] + # calculate magnetic field at this point and add back + self.magnetic_field[(slice(None),)+point_index] += mstat.magnetic_field(p) diff --git a/plasmapy/examples/plot_magnetic_statics.py b/plasmapy/examples/plot_magnetic_statics.py new file mode 100644 index 00000000..5e5631fd --- /dev/null +++ b/plasmapy/examples/plot_magnetic_statics.py @@ -0,0 +1,110 @@ +# coding: utf-8 +""" +Magnetostatic Fields +===================== + +An example of using PlasmaPy's `Magnetostatic` class in `physics` subpackage. +""" + +import plasmapy as pp +from plasmapy.physics import magnetostatics +from plasmapy.classes.sources import Plasma3D +import numpy as np +import astropy.units as u +import matplotlib.pyplot as plt +import itertools + +############################################################ +# Some common magnetostatic fields can be generated and added to a plasma object. +# A dipole + +dipole = magnetostatics.MagneticDipole(np.array([0, 0, 1])*u.A*u.m*u.m, np.array([0, 0, 0])*u.m) +print(dipole) + +############################################################ +# initialize a a plasma, where the magnetic field will be calculated on + +plasma = Plasma3D(domain_x=np.linspace(-2, 2, 30) * u.m, + domain_y=np.linspace(0, 0, 1) * u.m, + domain_z=np.linspace(-2, 2, 20) * u.m) + +############################################################ +# add the dipole field to it +plasma.add_magnetostatic(dipole) + +X, Z = plasma.grid[0, :, 0, :], plasma.grid[2, :, 0, :] +U = plasma.magnetic_field[0, :, 0, :].value.T # because grid uses 'ij' indexing +W = plasma.magnetic_field[2, :, 0, :].value.T # because grid uses 'ij' indexing + + +############################################################ +plt.figure() +plt.axis('square') +plt.xlim(-2, 2) +plt.ylim(-2, 2) +plt.title('Dipole field in x-z plane, generated by a dipole pointing in the z direction') +plt.streamplot(plasma.x.value, plasma.z.value, U, W) + +############################################################ +# A circular current-carring wire + +cw = magnetostatics.CircularWire(np.array([0, 0, 1]), np.array([0, 0, 0])*u.m, 1*u.m, 1*u.A) +print(cw) + +############################################################ +# initialize a a plasma, where the magnetic field will be calculated on +plasma = Plasma3D(domain_x=np.linspace(-2, 2, 30) * u.m, + domain_y=np.linspace(0, 0, 1) * u.m, + domain_z=np.linspace(-2, 2, 20) * u.m) + +############################################################ +# add the circular coil field to it +plasma.add_magnetostatic(cw) + +X, Z = plasma.grid[0, :, 0, :], plasma.grid[2, :, 0, :] +U = plasma.magnetic_field[0, :, 0, :].value.T # because grid uses 'ij' indexing +W = plasma.magnetic_field[2, :, 0, :].value.T # because grid uses 'ij' indexing + +plt.figure() +plt.axis('square') +plt.xlim(-2, 2) +plt.ylim(-2, 2) +plt.title('Circular coil field in x-z plane, generated by a circular coil in the x-y plane') +plt.streamplot(plasma.x.value, plasma.z.value, U, W) + +############################################################ +# a circular wire can be described as parametric equation and converted to GeneralWire + +gw_cw = cw.to_GeneralWire() + +# the calculated magnetic field is close +print(gw_cw.magnetic_field([0, 0, 0]) - cw.magnetic_field([0, 0, 0])) + + +############################################################ +# A infinite straight wire + + +iw = magnetostatics.InfiniteStraightWire(np.array([0, 1, 0]), np.array([0, 0, 0])*u.m, 1*u.A) +print(iw) + +############################################################ +# initialize a a plasma, where the magnetic field will be calculated on +plasma = Plasma3D(domain_x=np.linspace(-2, 2, 30) * u.m, + domain_y=np.linspace(0, 0, 1) * u.m, + domain_z=np.linspace(-2, 2, 20) * u.m) + +# add the infinite straight wire field to it +plasma.add_magnetostatic(iw) + +X, Z = plasma.grid[0, :, 0, :], plasma.grid[2, :, 0, :] +U = plasma.magnetic_field[0, :, 0, :].value.T # because grid uses 'ij' indexing +W = plasma.magnetic_field[2, :, 0, :].value.T # because grid uses 'ij' indexing + +plt.figure() +plt.title('Dipole field in x-z plane, generated by a infinite straight wire ' + 'pointing in the y direction') +plt.axis('square') +plt.xlim(-2, 2) +plt.ylim(-2, 2) +plt.streamplot(plasma.x.value, plasma.z.value, U, W) diff --git a/plasmapy/physics/__init__.py b/plasmapy/physics/__init__.py index ce6cbd9d..05d2a29b 100644 --- a/plasmapy/physics/__init__.py +++ b/plasmapy/physics/__init__.py @@ -36,3 +36,5 @@ from .relativity import Lorentz_factor from . import transport + +from . import magnetostatics diff --git a/plasmapy/physics/magnetostatics.py b/plasmapy/physics/magnetostatics.py new file mode 100644 index 00000000..9bfda4a6 --- /dev/null +++ b/plasmapy/physics/magnetostatics.py @@ -0,0 +1,421 @@ +""" +Define MagneticStatics class to calculate common static magnetic fields +as first raised in issue #100. +""" + +import abc + +import numpy as np +from astropy import units as u, constants +from scipy.special import roots_legendre + + +class MagnetoStatics(abc.ABC): + """Abstract class for all kinds of magnetic static fields""" + + @abc.abstractmethod + def magnetic_field(self, p: u.m) -> u.T: + """ + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + """ + + +class MagneticDipole(MagnetoStatics): + """ + Simple magnetic dipole - two nearby opposite point charges. + + Parameters + ---------- + moment: `astropy.units.Quantity` + Magnetic moment vector, in units of A * m^2 + p0: `astropy.units.Quantity` + Position of the dipole + + """ + + @u.quantity_input() + def __init__(self, moment: u.A * u.m**2, p0: u.m): + self.moment = moment.to(u.A*u.m*u.m).value + self.p0 = p0.to(u.m).value + + def __repr__(self): + return "{name}(moment={moment}, p0={p0})".format( + name=self.__class__.__name__, + moment=self.moment, + p0=self.p0 + ) + + def magnetic_field(self, p: u.m) -> u.T: + r""" + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + """ + r = p - self.p0 + m = self.moment + B = constants.mu0.value/4/np.pi \ + * (3*r*np.dot(m, r)/np.linalg.norm(r)**5 - m/np.linalg.norm(r)**3) + return B*u.T + + +class Wire(MagnetoStatics): + """Abstract wire class for concrete wires to be inherited from.""" + + +class GeneralWire(Wire): + r""" + General wire class described by its parametric vector equation + + Parameters + ---------- + parametric_eq: Callable + A vector-valued (with units of position) function of a single real + parameter. + t1: float + lower bound of the parameter, smaller than t2 + t2: float + upper bound of the parameter, larger than t1 + current: `astropy.units.Quantity` + electric current + + """ + + @u.quantity_input() + def __init__(self, parametric_eq, + t1, + t2, + current: + u.A): + if callable(parametric_eq): + self.parametric_eq = parametric_eq + else: + raise ValueError("Argument parametric_eq should be a callable") + if t1 < t2: + self.t1 = t1 + self.t2 = t2 + else: + raise ValueError(f"t1={t1} is not smaller than t2={t2}") + self.current = current.to(u.A).value + + def magnetic_field(self, p: u.m, n: int = 1000) -> u.T: + r""" + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + n : int, optional + Number of segments for Wire calculation + (defaults to 1000) + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + Notes + ----- + For simplicity, we segment the wire into n equal pieces, + and assume each segment is straight. Default n is 1000. + + .. math:: + + \vec B + \approx \frac{\mu_0 I}{4\pi} \sum_{i=1}^{n} + \frac{[\vec l(t_{i}) - \vec l(t_{i-1})] \times + \left[\vec p - \frac{\vec l(t_{i}) + \vec l(t_{i-1})}{2}\right]} + {\left|\vec p - \frac{\vec l(t_{i}) + \vec l(t_{i-1})}{2}\right|^3}, + \quad \text{where}\, t_i = t_{\min}+i/n*(t_{\max}-t_{\min}) + + """ + + p1 = self.parametric_eq(self.t1) + step = (self.t2 - self.t1) / n + t = self.t1 + B = 0 + for i in range(n): + t = t + step + p2 = self.parametric_eq(t) + dl = p2 - p1 + p1 = p2 + R = p - (p2 + p1) / 2 + B += np.cross(dl, R)/np.linalg.norm(R)**3 + B = B*constants.mu0.value/4/np.pi*self.current + return B*u.T + + +class FiniteStraightWire(Wire): + """ + Finite length straight wire class. + + p1 to p2 direction is the possitive current direction. + + Parameters + ---------- + p1: `astropy.units.Quantity` + three-dimensional Cartesian coordinate of one end of the straight wire + p2: `astropy.units.Quantity` + three-dimensional Cartesian coordinate of another end of the straight wire + current: `astropy.units.Quantity` + electric current + + """ + + @u.quantity_input() + def __init__(self, p1: u.m, p2: u.m, current: u.A): + self.p1 = p1.to(u.m).value + self.p2 = p2.to(u.m).value + if np.all(p1 == p2): + raise ValueError("p1, p2 should not be the same point.") + self.current = current.to(u.A).value + + def __repr__(self): + return "{name}(p1={p1}, p2={p2}, current={current})".format( + name=self.__class__.__name__, + p1=self.p1, + p2=self.p2, + current=self.current + ) + + def magnetic_field(self, p) -> u.T: + r""" + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + Notes + ----- + Let :math:`P_f` be the foot of perpendicular, :math:`\theta_1`(:math:`\theta_2`) be the + angles between :math:`\overrightarrow{PP_1}`(:math:`\overrightarrow{PP_2}`) + and :math:`\overrightarrow{P_2P_1}`. + + .. math: + \vec B = \frac{(\overrightarrow{P_2P_1}\times\overrightarrow{PP_f})^0} + {|\overrightarrow{PP_f}|} + \frac{\mu_0 I}{4\pi} (\cos\theta_1 - \cos\theta_2) + + """ + # foot of perpendicular + p1, p2 = self.p1, self.p2 + p2_p1 = p2 - p1 + ratio = np.dot(p - p1, p2_p1)/np.dot(p2_p1, p2_p1) + pf = p1 + p2_p1*ratio + + # angles: theta_1 = <p - p1, p2 - p1>, theta_2 = <p - p2, p2 - p1> + cos_theta_1 = np.dot(p - p1, p2_p1)/np.linalg.norm(p - p1)/np.linalg.norm(p2_p1) + cos_theta_2 = np.dot(p - p2, p2_p1)/np.linalg.norm(p - p2)/np.linalg.norm(p2_p1) + + B_unit = np.cross(p2_p1, p - pf) + B_unit = B_unit/np.linalg.norm(B_unit) + + B = B_unit/np.linalg.norm(p-pf)*(cos_theta_1 - cos_theta_2) \ + * constants.mu0.value/4/np.pi*self.current + + return B*u.T + + def to_GeneralWire(self): + """Convert this `Wire` into a `GeneralWire`.""" + p1, p2 = self.p1, self.p2 + return GeneralWire(lambda t: p1+(p2-p1)*t, 0, 1, self.current*u.A) + + +class InfiniteStraightWire(Wire): + """ + Infinite straight wire class. + + Parameters + ---------- + direction: + three-dimensional direction vector of the wire, also the positive current direction + p0: `astropy.units.Quantity` + one point on the wire + current: `astropy.units.Quantity` + electric current + + """ + + @u.quantity_input() + def __init__(self, direction, p0: u.m, current: u.A): + self.direction = direction/np.linalg.norm(direction) + self.p0 = p0.to(u.m).value + self.current = current.to(u.A).value + + def __repr__(self): + return "{name}(direction={direction}, p0={p0}, current={current})".format( + name=self.__class__.__name__, + direction=self.direction, + p0=self.p0, + current=self.current + ) + + def magnetic_field(self, p) -> u.T: + r""" + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + Notes + ----- + .. math: + \vec B = \frac{\mu_0 I}{2\pi r}*(\vec l^0\times \vec{PP_0})^0, + \text{where}\, \vec l^0\, \text{is the unit vector of current direction}, + r\, \text{is the perpendicular distance between} P_0 \text{and the infinite wire} + + """ + r = np.cross(self.direction, p - self.p0) + B_unit = r / np.linalg.norm(r) + r = np.linalg.norm(r) + + return B_unit/r*constants.mu0.value/2/np.pi*self.current*u.T + + +class CircularWire(Wire): + """ + Circular wire(coil) class + + Parameters + ---------- + normal: + three-dimensional normal vector of the circular coil + center: `astropy.units.Quantity` + three-dimensional position vector of the circular coil's center + radius: `astropy.units.Quantity` + radius of the circular coil + current: `astropy.units.Quantity` + electric current + + """ + + @u.quantity_input() + def __init__(self, normal, center: u.m, radius: u.m, + current: u.A, n=300): + self.normal = normal/np.linalg.norm(normal) + self.center = center.to(u.m).value + if radius > 0: + self.radius = radius.to(u.m).value + else: + raise ValueError("Radius should bu larger than 0") + self.current = current.to(u.A).value + + # parametric equation + # find other two axises in the disc plane + z = np.array([0, 0, 1]) + axis_x = np.cross(z, self.normal) + axis_y = np.cross(self.normal, axis_x) + + if np.linalg.norm(axis_x) == 0: + axis_x = np.array([1, 0, 0]) + axis_y = np.array([0, 1, 0]) + else: + axis_x = axis_x/np.linalg.norm(axis_x) + axis_y = axis_y/np.linalg.norm(axis_y) + + self.axis_x = axis_x + self.axis_y = axis_y + + def curve(t): + if isinstance(t, np.ndarray): + t = np.expand_dims(t, 0) + axis_x_mat = np.expand_dims(axis_x, 1) + axis_y_mat = np.expand_dims(axis_y, 1) + return self.radius*(np.matmul(axis_x_mat, np.cos(t)) + + np.matmul(axis_y_mat, np.sin(t))) \ + + np.expand_dims(self.center, 1) + else: + return self.radius*(np.cos(t)*axis_x + np.sin(t)*axis_y) + self.center + self.curve = curve + + self.roots_legendre = roots_legendre(n) + self.n = n + + def __repr__(self): + return "{name}(normal={normal}, center={center}, \ +radius={radius}, current={current})".format( + name=self.__class__.__name__, + normal=self.normal, + center=self.center, + radius=self.radius, + current=self.current + ) + + def magnetic_field(self, p) -> u.T: + r""" + Calculate magnetic field generated by this wire at position `p` + + Parameters + ---------- + p : `astropy.units.Quantity` + three-dimensional position vector + + Returns + ------- + B : `astropy.units.Quantity` + magnetic field at the specified positon + + Notes + ----- + .. math: + \vec B + = \frac{\mu_0 I}{4\pi} + \int \frac{d\vec l\times(\vec p - \vec l(t))}{|\vec p - \vec l(t)|^3}\\ + = \frac{\mu_0 I}{4\pi} \int_{-\pi}^{\pi} {(-r\sin\theta \hat x + r\cos\theta \hat y)} + \times \frac{\vec p - \vec l(t)}{|\vec p - \vec l(t)|^3} d\theta + + We use n points Gauss-Legendre quadrature to compute the integral. The default n is 300. + + """ + + x, w = self.roots_legendre + t = x*np.pi + pt = self.curve(t) + dl = self.radius*( + - np.matmul(np.expand_dims(self.axis_x, 1), np.expand_dims(np.sin(t), 0)) + + np.matmul(np.expand_dims(self.axis_y, 1), np.expand_dims(np.cos(t), 0))) # (3, n) + + r = np.expand_dims(p, 1) - pt # (3, n) + r_norm_3 = np.linalg.norm(r, axis=0)**3 + ft = np.cross(dl, r, axisa=0, axisb=0)/np.expand_dims(r_norm_3, 1) # (n, 3) + + return np.pi*np.matmul(np.expand_dims(w, 0), ft).squeeze(0) \ + * constants.mu0.value/4/np.pi*self.current*u.T + + def to_GeneralWire(self): + """Convert this `Wire` into a `GeneralWire`.""" + return GeneralWire(self.curve, -np.pi, np.pi, self.current*u.A)
Functions to get magnetostatic fields from known solutions (infinite wires, circular loops) In addition to the Biot-Savart law solver proposed in #99, we should have a way of calculating the magnetic fields resulting from simple current-carrying wire setups. This would include the magnetic field from an infinite wire (which has a simple exact solution) and the magnetic field resulting from a circular current-carrying loop (which has a complicated exact solution) for boundary conditions at infinity. There might be other analytical solutions worth including, but these two are probably the most important ones. This could perhaps go in the `physics` subpackage in a file called `magnetostatics.py`.
PlasmaPy/PlasmaPy
diff --git a/plasmapy/classes/sources/tests/test_plasmablob.py b/plasmapy/classes/sources/tests/test_plasmablob.py index f8a910e5..0b94f37e 100644 --- a/plasmapy/classes/sources/tests/test_plasmablob.py +++ b/plasmapy/classes/sources/tests/test_plasmablob.py @@ -2,10 +2,114 @@ import numpy as np import astropy.units as u -from plasmapy.classes.sources import plasmablob +from plasmapy.classes.sources import plasma3d, plasmablob +from plasmapy.physics import magnetostatics from plasmapy.utils.exceptions import InvalidParticleError - [email protected]('grid_dimensions, expected_size', [ + ((100, 1, 1), 100), # Test 1D setup + ((128, 128, 1), 16384), # 2D + ((64, 64, 64), 262144), # 3D +]) +def test_Plasma3D_setup(grid_dimensions, expected_size): + r"""Function to test basic setup of the Plasma3D object. + + Tests that a Plasma3D object initiated with a particular + specification behaves in the correct way. + + Parameters + ---------- + grid_dimensions : tuple of ints + Grid size of the Plasma3D object to test. Must be a tuple of + length 3, indicating length of the grid in x, y, and z + directions respectively. Directions not needed should have a + length of 1. + + expected_size : int + Product of grid dimensions. + + Examples + -------- + >>> test_Plasma3D_setup((10, 10, 10), 1000) + >>> test_Plasma3D_setup((100, 10, 1), 1000) + """ + x, y, z = grid_dimensions + test_plasma = plasma3d.Plasma3D(domain_x=np.linspace(0, 1, x) * u.m, + domain_y=np.linspace(0, 1, y) * u.m, + domain_z=np.linspace(0, 1, z) * u.m) + + # Basic grid setup + assert test_plasma.x.size == x + assert test_plasma.y.size == y + assert test_plasma.z.size == z + assert test_plasma.grid.size == 3 * expected_size + + # Core variable units and shapes + assert test_plasma.density.size == expected_size + assert test_plasma.density.si.unit == u.kg / u.m ** 3 + + assert test_plasma.momentum.size == 3 * expected_size + assert test_plasma.momentum.si.unit == u.kg / (u.m ** 2 * u.s) + + assert test_plasma.pressure.size == expected_size + assert test_plasma.pressure.si.unit == u.Pa + + assert test_plasma.magnetic_field.size == 3 * expected_size + assert test_plasma.magnetic_field.si.unit == u.T + + assert test_plasma.electric_field.size == 3 * expected_size + assert test_plasma.electric_field.si.unit == u.V / u.m + + +# @pytest.mark.parametrize([()]) +def test_Plasma3D_derived_vars(): + r"""Function to test derived variables of the Plasma3D class. + + Tests the shapes, units and values of variables derived from core + variables. The core variables are set with arbitrary uniform + values. + """ + test_plasma = plasma3d.Plasma3D(domain_x=np.linspace(0, 1, 64) * u.m, + domain_y=np.linspace(0, 1, 64) * u.m, + domain_z=np.linspace(0, 1, 1) * u.m) + + # Set an arbitrary uniform values throughout the plasma + test_plasma.density[...] = 2.0 * u.kg / u.m ** 3 + test_plasma.momentum[...] = 10.0 * u.kg / (u.m ** 2 * u.s) + test_plasma.pressure[...] = 1 * u.Pa + test_plasma.magnetic_field[...] = 0.01 * u.T + test_plasma.electric_field[...] = 0.01 * u.V / u.m + + # Test derived variable units and shapes + assert test_plasma.velocity.shape == test_plasma.momentum.shape + assert (test_plasma.velocity == 5.0 * u.m / u.s).all() + + assert test_plasma.magnetic_field_strength.shape == \ + test_plasma.magnetic_field.shape[1:] + assert test_plasma.magnetic_field_strength.si.unit == u.T + assert np.allclose(test_plasma.magnetic_field_strength.value, 0.017320508) + + assert test_plasma.electric_field_strength.shape == \ + test_plasma.electric_field.shape[1:] + assert test_plasma.electric_field_strength.si.unit == u.V / u.m + + assert test_plasma.alfven_speed.shape == test_plasma.density.shape + assert test_plasma.alfven_speed.unit.si == u.m / u.s + assert np.allclose(test_plasma.alfven_speed.value, 10.92548431) + +def test_Plasma3D_add_magnetostatics(): + r"""Function to test add_magnetostatic function + """ + dipole = magnetostatics.MagneticDipole(np.array([0, 0, 1])*u.A*u.m*u.m, np.array([0, 0, 0])*u.m) + cw = magnetostatics.CircularWire(np.array([0, 0, 1]), np.array([0, 0, 0])*u.m, 1*u.m, 1*u.A) + gw_cw = cw.to_GeneralWire() + iw = magnetostatics.InfiniteStraightWire(np.array([0, 1, 0]), np.array([0, 0, 0])*u.m, 1*u.A) + plasma = plasma3d.Plasma3D(domain_x=np.linspace(-2, 2, 30) * u.m, + domain_y=np.linspace(0, 0, 1) * u.m, + domain_z=np.linspace(-2, 2, 20) * u.m) + + plasma.add_magnetostatic(dipole, cw, gw_cw, iw) + class Test_PlasmaBlobRegimes: def test_intermediate_coupling(self): r""" diff --git a/plasmapy/physics/tests/test_magnetostatics.py b/plasmapy/physics/tests/test_magnetostatics.py new file mode 100644 index 00000000..ceb2a162 --- /dev/null +++ b/plasmapy/physics/tests/test_magnetostatics.py @@ -0,0 +1,154 @@ +import numpy as np +import pytest +from astropy import units as u, constants + +from plasmapy.physics.magnetostatics import (MagnetoStatics, + MagneticDipole, + Wire, + GeneralWire, + FiniteStraightWire, + InfiniteStraightWire, + CircularWire) + + +mu0_4pi = constants.mu0/4/np.pi + + +class Test_MagneticDipole: + def setup_method(self): + self.moment = np.array([0, 0, 1])*u.A*u.m*u.m + self.p0 = np.array([0, 0, 0])*u.m + + def test_value1(self): + "Test a known solution" + p = np.array([1, 0, 0]) + B1 = MagneticDipole(self.moment, self.p0).magnetic_field(p) + B1_expected = np.array([0, 0, -1])*1e-7*u.T + assert np.all(np.isclose(B1.value, B1_expected.value)) + assert B1.unit == u.T + + def test_value2(self): + "Test a known solution" + p = np.array([0, 0, 1]) + B2 = MagneticDipole(self.moment, self.p0).magnetic_field(p) + B2_expected = np.array([0, 0, 2])*1e-7*u.T + assert np.all(np.isclose(B2.value, B2_expected.value)) + assert B2.unit == u.T + + +class Test_GeneralWire: + def setup_method(self): + self.cw = CircularWire(np.array([0, 0, 1]), np.array([0, 0, 0])*u.m, 1*u.m, 1*u.A) + p1 = np.array([0., 0., 0.])*u.m + p2 = np.array([0., 0., 1.])*u.m + self.fw = FiniteStraightWire(p1, p2, 1*u.A) + + def test_not_callable(self): + "Test that `GeneralWire` raises `ValueError` if its first argument is not callale" + with pytest.raises(ValueError): + GeneralWire("wire", 0, 1, 1*u.A) + + def test_close_cw(self): + "Test if the GeneralWire is close to the CircularWire it converted from" + gw_cw = self.cw.to_GeneralWire() + p = np.array([0, 0, 0]) + B_cw = self.cw.magnetic_field(p) + B_gw_cw = gw_cw.magnetic_field(p) + + assert np.all(np.isclose(B_cw.value, B_gw_cw.value)) + assert B_cw.unit == B_gw_cw.unit + + def test_close_fw(self): + "Test if the GeneralWire is close to the FiniteWire it converted from" + gw_fw = self.fw.to_GeneralWire() + p = np.array([1, 0, 0]) + B_fw = self.fw.magnetic_field(p) + B_gw_fw = gw_fw.magnetic_field(p) + + assert np.all(np.isclose(B_fw.value, B_gw_fw.value)) + assert B_fw.unit == B_gw_fw.unit + + def test_value_error(self): + "Test GeneralWire raise ValueError when argument t1>t2" + with pytest.raises(ValueError) as e: + gw_cw = GeneralWire(lambda t: [0,0,t], 2, 1, 1.*u.A) + + +class Test_FiniteStraightWire: + def setup_method(self): + self.p1 = np.array([0., 0., -1.])*u.m + self.p2 = np.array([0., 0., 1.])*u.m + self.current = 1*u.A + + def test_same_point(self): + "Test that `FintiteStraightWire` raises `ValueError` if p1 == p2 " + with pytest.raises(ValueError): + FiniteStraightWire(self.p1, self.p1, self.current) + + def test_value1(self): + "Test a known solution" + fw = FiniteStraightWire(self.p1, self.p2, self.current) + B1 = fw.magnetic_field([1, 0, 0]) + B1_expected = np.array([0, np.sqrt(2), 0])*1e-7*u.T + assert np.all(np.isclose(B1.value, B1_expected.value)) + assert B1.unit == u.T + + def test_repr(self): + "Test __repr__ function" + fw = FiniteStraightWire(self.p1, self.p2, self.current) + assert repr(fw) == r"FiniteStraightWire(p1=[ 0. 0. -1.], p2=[0. 0. 1.], current=1.0)" + + + +class Test_InfiniteStraightWire: + def setup_method(self): + self.direction = np.array([0, 1, 0]) + self.p0 = np.array([0, 0, 0])*u.m + self.current = 1*u.A + + def test_value1(self): + "Test a known solution" + iw = InfiniteStraightWire(self.direction, self.p0, self.current) + B1 = iw.magnetic_field([1, 0, 0]) + B1_expected = np.array([0, 0, -2])*1e-7*u.T + assert np.all(np.isclose(B1.value, B1_expected.value)) + assert B1.unit == u.T + + def test_repr(self): + "Test __repr__ function" + iw = InfiniteStraightWire(self.direction, self.p0, self.current) + assert repr(iw) == r"InfiniteStraightWire(direction=[0. 1. 0.], p0=[0. 0. 0.], current=1.0)" + +class Test_CircularWire: + def setup_method(self): + self.normalz = np.array([0, 0, 1]) + self.normalx = np.array([1, 0, 0]) + self.center = np.array([0, 0, 0])*u.m + self.radius = 1*u.m + self.current = 1*u.A + + def test_negative_radius(self): + "Test that `FintiteStraightWire` raises `ValueError` if radius < 0" + with pytest.raises(ValueError): + CircularWire(self.normalz, self.center, -1.*u.m, self.current) + + def test_value1(self): + "Test a known solution" + cw = CircularWire(self.normalz, self.center, self.radius, self.current) + B1 = cw.magnetic_field([0, 0, 1]) + B1_expected = np.array([0, 0, 1])*2*np.pi/2**1.5*1e-7*u.T + assert np.all(np.isclose(B1.value, B1_expected.value)) + assert B1.unit == u.T + + def test_value2(self): + "Test a known solution" + cw = CircularWire(self.normalx, self.center, self.radius, self.current) + B2 = cw.magnetic_field([1, 0, 0]) + B2_expected = np.array([1, 0, 0])*2*np.pi/2**1.5*1e-7*u.T + assert np.all(np.isclose(B2.value, B2_expected.value)) + assert B2.unit == u.T + + def test_repr(self): + "Test __repr__ function" + cw = CircularWire(self.normalz, self.center, self.radius, self.current) + assert repr(cw) == r"CircularWire(normal=[0. 0. 1.], center=[0. 0. 0.], radius=1.0, current=1.0)"
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 3 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "coverage" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asteval==0.9.26 astropy==4.1 attrs==22.2.0 certifi==2021.5.30 colorama==0.4.5 coverage==6.2 cycler==0.11.0 Cython==3.0.12 future==1.0.0 importlib-metadata==4.8.3 iniconfig==1.1.1 kiwisolver==1.3.1 lmfit==1.0.3 matplotlib==3.3.4 mpmath==1.3.0 numpy==1.19.5 packaging==21.3 Pillow==8.4.0 -e git+https://github.com/PlasmaPy/PlasmaPy.git@51072c3dcf32a5210423eb519bcdedb960131105#egg=plasmapy pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 roman==3.3 scipy==1.5.4 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 uncertainties==3.1.7 zipp==3.6.0
name: PlasmaPy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asteval==0.9.26 - astropy==4.1 - attrs==22.2.0 - colorama==0.4.5 - coverage==6.2 - cycler==0.11.0 - cython==3.0.12 - future==1.0.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - kiwisolver==1.3.1 - lmfit==1.0.3 - matplotlib==3.3.4 - mpmath==1.3.0 - numpy==1.19.5 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - roman==3.3 - scipy==1.5.4 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - uncertainties==3.1.7 - zipp==3.6.0 prefix: /opt/conda/envs/PlasmaPy
[ "plasmapy/classes/sources/tests/test_plasmablob.py::test_Plasma3D_setup[grid_dimensions0-100]", "plasmapy/classes/sources/tests/test_plasmablob.py::test_Plasma3D_setup[grid_dimensions1-16384]", "plasmapy/classes/sources/tests/test_plasmablob.py::test_Plasma3D_setup[grid_dimensions2-262144]", "plasmapy/classes/sources/tests/test_plasmablob.py::test_Plasma3D_derived_vars", "plasmapy/classes/sources/tests/test_plasmablob.py::test_Plasma3D_add_magnetostatics", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_invalid_particle", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_electron_temperature", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_electron_density", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_ionization", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_composition", "plasmapy/physics/tests/test_magnetostatics.py::Test_MagneticDipole::test_value1", "plasmapy/physics/tests/test_magnetostatics.py::Test_MagneticDipole::test_value2", "plasmapy/physics/tests/test_magnetostatics.py::Test_GeneralWire::test_not_callable", "plasmapy/physics/tests/test_magnetostatics.py::Test_GeneralWire::test_close_cw", "plasmapy/physics/tests/test_magnetostatics.py::Test_GeneralWire::test_close_fw", "plasmapy/physics/tests/test_magnetostatics.py::Test_GeneralWire::test_value_error", "plasmapy/physics/tests/test_magnetostatics.py::Test_FiniteStraightWire::test_same_point", "plasmapy/physics/tests/test_magnetostatics.py::Test_FiniteStraightWire::test_value1", "plasmapy/physics/tests/test_magnetostatics.py::Test_FiniteStraightWire::test_repr", "plasmapy/physics/tests/test_magnetostatics.py::Test_InfiniteStraightWire::test_value1", "plasmapy/physics/tests/test_magnetostatics.py::Test_InfiniteStraightWire::test_repr", "plasmapy/physics/tests/test_magnetostatics.py::Test_CircularWire::test_negative_radius", "plasmapy/physics/tests/test_magnetostatics.py::Test_CircularWire::test_value1", "plasmapy/physics/tests/test_magnetostatics.py::Test_CircularWire::test_value2", "plasmapy/physics/tests/test_magnetostatics.py::Test_CircularWire::test_repr" ]
[ "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_intermediate_coupling", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_strongly_coupled", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_weakly_coupled", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_thermal_kinetic_energy_dominant", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_fermi_quantum_energy_dominant", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlobRegimes::test_both_fermi_and_thermal_energy_important", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_coupling", "plasmapy/classes/sources/tests/test_plasmablob.py::Test_PlasmaBlob::test_quantum_theta" ]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,297
[ "plasmapy/classes/sources/plasma3d.py", "plasmapy/physics/magnetostatics.py", "docs/physics/index.rst", "plasmapy/examples/plot_magnetic_statics.py", "docs/physics/magnetostatics.rst", "plasmapy/physics/__init__.py" ]
[ "plasmapy/classes/sources/plasma3d.py", "plasmapy/physics/magnetostatics.py", "docs/physics/index.rst", "plasmapy/examples/plot_magnetic_statics.py", "docs/physics/magnetostatics.rst", "plasmapy/physics/__init__.py" ]
datosgobar__pydatajson-131
ef99387305d7cd46831c715c7a443f4b056baeb4
2018-03-16 17:55:35
adb85a7de7dfa073ddf9817a5fe2d125f9ce4e54
diff --git a/pydatajson/ckan_utils.py b/pydatajson/ckan_utils.py index f71570d..9724f44 100644 --- a/pydatajson/ckan_utils.py +++ b/pydatajson/ckan_utils.py @@ -1,6 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- import json +import re +import sys from datetime import time from dateutil import parser, tz from .helpers import title_to_name @@ -14,7 +16,7 @@ def append_attribute_to_extra(package, dataset, attribute, serialize=False): package['extras'].append({'key': attribute, 'value': value}) -def map_dataset_to_package(dataset, owner_org, theme_taxonomy, catalog_id=None, +def map_dataset_to_package(catalog, dataset, owner_org, catalog_id=None, demote_superThemes=True, demote_themes=True): package = dict() package['extras'] = [] @@ -66,7 +68,8 @@ def map_dataset_to_package(dataset, owner_org, theme_taxonomy, catalog_id=None, if themes and demote_themes: package['tags'] = package.get('tags', []) for theme in themes: - label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) + label = catalog.get_theme(identifier=theme)['label'] + label = re.sub(r'[^\wá-úÁ-ÚñÑ .-]+', '', label, flags=re.UNICODE) package['tags'].append({'name': label}) else: package['groups'] = package.get('groups', []) + [{'name': title_to_name(theme, decode=False)} diff --git a/pydatajson/federation.py b/pydatajson/federation.py index 2c31623..9573040 100644 --- a/pydatajson/federation.py +++ b/pydatajson/federation.py @@ -29,9 +29,8 @@ def push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier, portal_u """ dataset = catalog.get_dataset(dataset_origin_identifier) ckan_portal = RemoteCKAN(portal_url, apikey=apikey) - theme_taxonomy = catalog.themes - package = map_dataset_to_package(dataset, owner_org, theme_taxonomy, catalog_id, + package = map_dataset_to_package(catalog, dataset, owner_org, catalog_id, demote_superThemes, demote_themes) # Get license id diff --git a/pydatajson/search.py b/pydatajson/search.py index 0ab6391..1c3d25a 100644 --- a/pydatajson/search.py +++ b/pydatajson/search.py @@ -280,7 +280,7 @@ def get_theme(catalog, identifier=None, label=None): # filtra por id (preferentemente) o label if identifier: - filtered_themes = [theme for theme in themes if theme["id"] == identifier] + filtered_themes = [theme for theme in themes if theme["id"].lower() == identifier.lower()] if len(filtered_themes) > 1: raise ThemeIdRepeated([x["id"] for x in filtered_themes])
Robustecer el manejo de themes por parte de push_dataset_to_ckan() Este es un issue que hace referencia al 24 de monitoreo apertura: https://github.com/datosgobar/monitoreo-apertura/issues/24 Lo abro acá porque me parece que es más apropiado discutirlo en este repo. -------------------------------------------------------------------------------------------------- **Contexto** Se intentó federar el dataset de series de tiempo a la instancia andino.datos.gob.ar, de la siguiente manera: ``` catalog_modernizacion = DataJson("http://infra.datos.gob.ar/catalog/modernizacion/data.json") catalog_modernizacion.push_dataset_to_ckan( "modernizacion", "ministerio-de-modernizacion", "7", portal_url, apikey ) ``` y dio la rta: ``` --------------------------------------------------------------------------- StopIteration Traceback (most recent call last) <ipython-input-8-b4c2dae3c9ec> in <module>() 1 catalog_modernizacion.push_dataset_to_ckan( 2 "modernizacion", "ministerio-de-modernizacion", "7", ----> 3 portal_url, apikey 4 ) /Users/abenassi/github/pydatajson/pydatajson/federation.pyc in push_dataset_to_ckan(catalog, catalog_id, owner_org, dataset_origin_identifier, portal_url, apikey) 49 theme_taxonomy = catalog.themes 50 for theme in themes: ---> 51 label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) 52 package['tags'].append({'name': label}) 53 StopIteration: ``` **Propuesta** Investigar qué hizo que se frenara la operación. Puede ser que el problema sea que se requiere crear "temas" que la instancia destino no tiene. Si es así, lo mejor sería incorporar la creación automática de temas que no existan dentro de la operación de federación.
datosgobar/pydatajson
diff --git a/tests/samples/full_data.json b/tests/samples/full_data.json index b349578..e80673c 100644 --- a/tests/samples/full_data.json +++ b/tests/samples/full_data.json @@ -193,7 +193,7 @@ "id": "convocatorias" }, { - "label": "Compras", + "label": "Adquisición", "description": "Datasets sobre compras realizadas.", "id": "compras" }, @@ -213,7 +213,7 @@ "id": "normativa" }, { - "label": "Proveedores", + "label": "Proveeduría", "description": "Datasets sobre proveedores del Estado.", "id": "proveedores" } diff --git a/tests/test_ckan_utils.py b/tests/test_ckan_utils.py index 3f20984..f90406e 100644 --- a/tests/test_ckan_utils.py +++ b/tests/test_ckan_utils.py @@ -1,6 +1,10 @@ +# -*- coding: utf-8 -*- + import unittest import os import json +import re +import sys from dateutil import parser, tz from .context import pydatajson from pydatajson.ckan_utils import map_dataset_to_package, map_distributions_to_resources, convert_iso_string_to_utc @@ -23,15 +27,15 @@ class DatasetConversionTestCase(unittest.TestCase): cls.distributions = cls.dataset['distribution'] def test_catalog_id_is_prepended_to_dataset_id_if_passed(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) self.assertEqual(self.catalog_id + '_' + self.dataset_id, package['id']) def test_dataset_id_is_preserved_if_catlog_id_is_not_passed(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner') self.assertEqual(self.dataset_id, package['id']) def test_replicated_plain_attributes_are_corrext(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) plain_replicated_attributes = [('title', 'title'), ('notes', 'description'), ('url', 'landingPage')] @@ -40,7 +44,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual('owner', package['owner_org']) def test_dataset_nested_replicated_attributes_stay_the_same(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) contact_point_nested = [('maintainer', 'fn'), ('maintainer_email', 'hasEmail')] for fst, snd in contact_point_nested: @@ -51,7 +55,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(self.dataset.get('publisher').get(snd), package.get(fst)) def test_dataset_array_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] try: @@ -65,7 +69,8 @@ class DatasetConversionTestCase(unittest.TestCase): themes = self.dataset.get('theme', []) theme_labels = [] for theme in themes: - label = next(x['label'] for x in self.catalog.themes if x['id'] == theme) + label = self.catalog.get_theme(identifier=theme)['label'] + label = re.sub(r'[^\w .-]+', '', label, flags=re.UNICODE) theme_labels.append(label) try: @@ -74,7 +79,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords + theme_labels, tags) def test_themes_are_preserved_if_not_demoted(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id, demote_themes=False) groups = [group['name'] for group in package.get('groups', [])] super_themes = [title_to_name(s_theme.lower()) for s_theme in self.dataset.get('superTheme')] @@ -92,7 +97,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords, tags) def test_superThemes_dont_impact_groups_if_not_demoted(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id, demote_superThemes=False) groups = [group['name'] for group in package.get('groups', [])] tags = [tag['name'] for tag in package['tags']] @@ -100,7 +105,8 @@ class DatasetConversionTestCase(unittest.TestCase): themes = self.dataset.get('theme', []) theme_labels = [] for theme in themes: - label = next(x['label'] for x in self.catalog.themes if x['id'] == theme) + label = self.catalog.get_theme(identifier=theme)['label'] + label = re.sub(r'[^\wá-úÁ-ÚñÑ .-]+', '', label, flags=re.UNICODE) theme_labels.append(label) try: self.assertItemsEqual([], groups) @@ -112,7 +118,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords + theme_labels, tags) def test_preserve_themes_and_superThemes(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', self.catalog_id, False, False) groups = [group['name'] for group in package.get('groups', [])] tags = [tag['name'] for tag in package['tags']] @@ -128,7 +134,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertCountEqual(keywords, tags) def test_dataset_extra_attributes_are_correct(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) # extras are included in dataset if package['extras']: for extra in package['extras']: @@ -144,7 +150,7 @@ class DatasetConversionTestCase(unittest.TestCase): self.assertEqual(dataset_value, extra_value) def test_dataset_extra_attributes_are_complete(self): - package = map_dataset_to_package(self.dataset, 'owner', self.catalog.themes, catalog_id=self.catalog_id) + package = map_dataset_to_package(self.catalog, self.dataset, 'owner', catalog_id=self.catalog_id) # dataset attributes are included in extras extra_attrs = ['issued', 'modified', 'accrualPeriodicity', 'temporal', 'language', 'spatial', 'superTheme'] for key in extra_attrs: diff --git a/tests/test_federation.py b/tests/test_federation.py index e4a1d2e..e6804b9 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- + import unittest import os import re +import sys try: from mock import patch, MagicMock except ImportError: @@ -83,10 +86,10 @@ class PushDatasetTestCase(unittest.TestCase): @patch('pydatajson.federation.RemoteCKAN', autospec=True) def test_tags_are_passed_correctly(self, mock_portal): themes = self.dataset['theme'] - theme_taxonomy = self.catalog.themes keywords = [kw for kw in self.dataset['keyword']] for theme in themes: - label = next(x['label'] for x in theme_taxonomy if x['id'] == theme) + label = self.catalog.get_theme(identifier=theme)['label'] + label = re.sub(r'[^\w .-]+', '', label, flags=re.UNICODE) keywords.append(label) def mock_call_action(action, data_dict=None):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "mkdir tests/temp" ], "python": "3.6", "reqs_path": [ "requirements.txt", "requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 argh==0.27.2 asn1crypto==1.5.1 attrs==22.2.0 Babel==2.11.0 bumpversion==0.5.3 certifi==2021.5.30 cffi==1.15.1 chardet==3.0.4 ckanapi==4.0 CommonMark==0.5.4 coverage==4.1 cryptography==2.1.4 distlib==0.3.9 docopt==0.6.2 docutils==0.18.1 et-xmlfile==1.1.0 filelock==3.4.1 flake8==2.6.0 idna==2.6 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isodate==0.6.0 jdcal==1.4.1 Jinja2==3.0.3 jsonschema==2.6.0 MarkupSafe==2.0.1 mccabe==0.5.3 multidict==5.2.0 nose==1.3.7 openpyxl==2.4.11 packaging==21.3 pathtools==0.1.2 pkginfo==1.10.0 platformdirs==2.4.0 pluggy==0.13.1 pockets==0.9.1 py==1.11.0 pycodestyle==2.0.0 pycparser==2.21 -e git+https://github.com/datosgobar/pydatajson.git@ef99387305d7cd46831c715c7a443f4b056baeb4#egg=pydatajson pyflakes==1.2.3 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.6.1 pytz==2025.2 PyYAML==3.11 recommonmark==0.4.0 requests==2.18.4 requests-toolbelt==1.0.0 rfc3987==1.3.7 six==1.11.0 snowballstemmer==2.2.0 Sphinx==1.5.2 sphinx-rtd-theme==0.2.4 sphinxcontrib-napoleon==0.6.1 tomli==1.2.3 tox==2.9.1 tqdm==4.64.1 twine==1.9.1 typing_extensions==4.1.1 unicodecsv==0.14.1 Unidecode==0.4.21 urllib3==1.22 vcrpy==1.11.1 virtualenv==20.17.1 watchdog==0.8.3 wrapt==1.16.0 yarl==1.7.2 zipp==3.6.0
name: pydatajson channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - argh==0.27.2 - asn1crypto==1.5.1 - attrs==22.2.0 - babel==2.11.0 - bumpversion==0.5.3 - cffi==1.15.1 - chardet==3.0.4 - ckanapi==4.0 - commonmark==0.5.4 - coverage==4.1 - cryptography==2.1.4 - distlib==0.3.9 - docopt==0.6.2 - docutils==0.18.1 - et-xmlfile==1.1.0 - filelock==3.4.1 - flake8==2.6.0 - idna==2.6 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isodate==0.6.0 - jdcal==1.4.1 - jinja2==3.0.3 - jsonschema==2.6.0 - markupsafe==2.0.1 - mccabe==0.5.3 - multidict==5.2.0 - nose==1.3.7 - openpyxl==2.4.11 - packaging==21.3 - pathtools==0.1.2 - pkginfo==1.10.0 - platformdirs==2.4.0 - pluggy==0.13.1 - pockets==0.9.1 - py==1.11.0 - pycodestyle==2.0.0 - pycparser==2.21 - pyflakes==1.2.3 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.6.1 - pytz==2025.2 - pyyaml==3.11 - recommonmark==0.4.0 - requests==2.18.4 - requests-toolbelt==1.0.0 - rfc3987==1.3.7 - six==1.11.0 - snowballstemmer==2.2.0 - sphinx==1.5.2 - sphinx-rtd-theme==0.2.4 - sphinxcontrib-napoleon==0.6.1 - tomli==1.2.3 - tox==2.9.1 - tqdm==4.64.1 - twine==1.9.1 - typing-extensions==4.1.1 - unicodecsv==0.14.1 - unidecode==0.04.21 - urllib3==1.22 - vcrpy==1.11.1 - virtualenv==20.17.1 - watchdog==0.8.3 - wrapt==1.16.0 - yarl==1.7.2 - zipp==3.6.0 prefix: /opt/conda/envs/pydatajson
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_catalog_id_is_prepended_to_dataset_id_if_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_array_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_complete", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_extra_attributes_are_correct", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_id_is_preserved_if_catlog_id_is_not_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_dataset_nested_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_preserve_themes_and_superThemes", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_replicated_plain_attributes_are_corrext", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_superThemes_dont_impact_groups_if_not_demoted", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_themes_are_preserved_if_not_demoted" ]
[]
[ "tests/test_ckan_utils.py::DatasetConversionTestCase::test_catalog_id_is_prefixed_in_resource_id_if_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resource_id_is_preserved_if_catalog_id_is_not_passed", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_extra_attributes_are_created_correctly", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_replicated_attributes_stay_the_same", "tests/test_ckan_utils.py::DatasetConversionTestCase::test_resources_transformed_attributes_are_correct", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_change_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_dates_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_microseconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_seconds_are_handled_correctly", "tests/test_ckan_utils.py::DatetimeConversionTests::test_datetimes_without_timezones_stay_the_same", "tests/test_ckan_utils.py::DatetimeConversionTests::test_timezones_are_handled_correctly", "tests/test_federation.py::PushDatasetTestCase::test_dataset_id_is_preserved_if_catalog_id_is_not_passed", "tests/test_federation.py::PushDatasetTestCase::test_dataset_without_license_sets_notspecified", "tests/test_federation.py::PushDatasetTestCase::test_id_is_created_correctly", "tests/test_federation.py::PushDatasetTestCase::test_id_is_updated_correctly", "tests/test_federation.py::PushDatasetTestCase::test_licenses_are_interpreted_correctly", "tests/test_federation.py::PushDatasetTestCase::test_tags_are_passed_correctly", "tests/test_federation.py::RemoveDatasetTestCase::test_empty_search_doesnt_call_purge", "tests/test_federation.py::RemoveDatasetTestCase::test_filter_in_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_filter_in_out_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_query_one_dataset", "tests/test_federation.py::RemoveDatasetTestCase::test_query_over_500_datasets", "tests/test_federation.py::RemoveDatasetTestCase::test_remove_through_filters_and_organization" ]
[]
MIT License
2,298
[ "pydatajson/search.py", "pydatajson/ckan_utils.py", "pydatajson/federation.py" ]
[ "pydatajson/search.py", "pydatajson/ckan_utils.py", "pydatajson/federation.py" ]
G-Node__python-odml-251
8953343f0f4616c0a71087d406b5f6d4a2036748
2018-03-16 21:34:58
eeff5922987b064681d1328f81af317d8171808f
diff --git a/odml/doc.py b/odml/doc.py index 8d75f2b..ea15912 100644 --- a/odml/doc.py +++ b/odml/doc.py @@ -34,10 +34,13 @@ class BaseDocument(base.sectionable, Document): print(e) self._id = str(uuid.uuid4()) self._author = author - self._date = date # date must be a datetime self._version = version self._repository = repository + # Make sure date is properly parsed into a datetime object + self._date = None + self.date = date + @property def id(self): """ diff --git a/odml/dtypes.py b/odml/dtypes.py index 39d1e8d..e86bf8d 100644 --- a/odml/dtypes.py +++ b/odml/dtypes.py @@ -1,5 +1,5 @@ import sys -import datetime +import datetime as dt from enum import Enum self = sys.modules[__name__].__dict__ @@ -12,6 +12,10 @@ try: except NameError: unicode = str +FORMAT_DATE = "%Y-%m-%d" +FORMAT_DATETIME = "%Y-%m-%d %H:%M:%S" +FORMAT_TIME = "%H:%M:%S" + class DType(str, Enum): string = 'string' @@ -44,11 +48,11 @@ def default_values(dtype): return default_dtype_value[dtype] if dtype == 'datetime': - return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + return dt.datetime.now().replace(microsecond=0) if dtype == 'date': - return datetime.datetime.now().strftime('%Y-%m-%d') + return dt.datetime.now().date() if dtype == 'time': - return datetime.datetime.now().strftime('%H:%M:%S') + return dt.datetime.now().replace(microsecond=0).time() return '' # Maybe return None ? @@ -65,9 +69,9 @@ def infer_dtype(value): if dtype == 'string' and '\n' in value: dtype = 'text' return dtype - else: - # If unable to infer a dtype of given value, return defalt as *string* - return 'string' + + # If unable to infer a dtype of given value, return default as *string* + return 'string' def valid_type(dtype): @@ -109,14 +113,15 @@ def set(value, dtype=None): if isinstance(value, str): return str_set(value) else: - if type(value) in (str, unicode): + if isinstance(value, (str, unicode)): return str_set(value) return self.get(dtype + "_set", str_set)(value) def int_get(string): - if not string: - return 0 + if string is None or string == "": + return default_values("int") + try: return int(string) except ValueError: @@ -125,14 +130,20 @@ def int_get(string): def float_get(string): - if not string: - return 0.0 + if string is None or string == "": + return default_values("float") + return float(string) def str_get(string): + # Do not stringify empty list or dict but make sure boolean False gets through. + if string in [None, "", [], {}]: + return default_values("string") + if sys.version_info < (3, 0): return unicode(string) + return str(string) @@ -144,71 +155,65 @@ string_set = str_get def time_get(string): - if not string: - return None - if type(string) is datetime.time: - return datetime.datetime.strptime(string.strftime('%H:%M:%S'), - '%H:%M:%S').time() - else: - return datetime.datetime.strptime(string, '%H:%M:%S').time() + if string is None or string == "": + return default_values("time") + if isinstance(string, dt.time): + return dt.datetime.strptime(string.strftime(FORMAT_TIME), FORMAT_TIME).time() -def time_set(value): - if not value: - return None - if type(value) is datetime.time: - return value.strftime("%H:%M:%S") - return value.isoformat() + return dt.datetime.strptime(string, FORMAT_TIME).time() + + +time_set = time_get def date_get(string): - if not string: - return None - if type(string) is datetime.date: - return datetime.datetime.strptime(string.isoformat(), - '%Y-%m-%d').date() - else: - return datetime.datetime.strptime(string, '%Y-%m-%d').date() + if string is None or string == "": + return default_values("date") + + if isinstance(string, dt.date): + return dt.datetime.strptime(string.isoformat(), FORMAT_DATE).date() + + return dt.datetime.strptime(string, FORMAT_DATE).date() -date_set = time_set +date_set = date_get def datetime_get(string): - if not string: - return None - if type(string) is datetime.datetime: - return datetime.datetime.strptime(string.strftime('%Y-%m-%d %H:%M:%S'), - '%Y-%m-%d %H:%M:%S') - else: - return datetime.datetime.strptime(string, '%Y-%m-%d %H:%M:%S') + if string is None or string == "": + return default_values("datetime") + if isinstance(string, dt.datetime): + return dt.datetime.strptime(string.strftime(FORMAT_DATETIME), FORMAT_DATETIME) -def datetime_set(value): - if not value: - return None - if type(value) is datetime.datetime: - return value.strftime('%Y-%m-%d %H:%M:%S') - else: - return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') + return dt.datetime.strptime(string, FORMAT_DATETIME) + + +datetime_set = datetime_get def boolean_get(string): - if string is None: - return None - if type(string) in (unicode, str): + if string in [None, "", [], {}]: + return default_values("boolean") + + if isinstance(string, (unicode, str)): string = string.lower() + truth = ["true", "1", True, "t"] # be kind, spec only accepts True / False if string in truth: return True + false = ["false", "0", False, "f"] if string in false: return False + # disallow any values that cannot be interpreted as boolean. raise ValueError # Alias boolean_set to boolean_get. Both perform same function. + boolean_set = boolean_get bool_get = boolean_get bool_set = boolean_set diff --git a/odml/tools/dict_parser.py b/odml/tools/dict_parser.py index 0d2cdf0..3dd95a9 100644 --- a/odml/tools/dict_parser.py +++ b/odml/tools/dict_parser.py @@ -83,7 +83,13 @@ class DictWriter: if isinstance(tag, tuple): prop_dict[attr] = list(tag) elif (tag == []) or tag: # Even if 'value' is empty, allow '[]' - prop_dict[attr] = tag + # Custom odML tuples require special handling + # for save loading from file. + if attr == "value" and prop.dtype and \ + prop.dtype.endswith("-tuple") and len(prop.value) > 0: + prop_dict["value"] = "(%s)" % ";".join(prop.value[0]) + else: + prop_dict[attr] = tag props_seq.append(prop_dict) @@ -178,17 +184,13 @@ class DictReader: for _property in props_list: prop_attrs = {} - values = [] for i in _property: attr = self.is_valid_attribute(i, odmlfmt.Property) - if attr == 'value': - values = _property['value'] if attr: prop_attrs[attr] = _property[attr] prop = odmlfmt.Property.create(**prop_attrs) - prop.value = values odml_props.append(prop) return odml_props diff --git a/odml/tools/odmlparser.py b/odml/tools/odmlparser.py index 1caa7fa..641a52a 100644 --- a/odml/tools/odmlparser.py +++ b/odml/tools/odmlparser.py @@ -5,6 +5,7 @@ A generic odML parsing module. Parses odML files and documents. """ +import datetime import json import yaml @@ -67,11 +68,21 @@ class ODMLWriter: if self.parser == 'YAML': string_doc = yaml.dump(odml_output, default_flow_style=False) elif self.parser == 'JSON': - string_doc = json.dumps(odml_output, indent=4) + string_doc = json.dumps(odml_output, indent=4, + cls=JSONDateTimeSerializer) return string_doc +# Required to serialize datetime values with JSON. +class JSONDateTimeSerializer(json.JSONEncoder): + def default(self, o): + if isinstance(o, (datetime.datetime, datetime.date, datetime.time)): + return str(o) + + return json.JSONEncoder.default(self, o) + + class ODMLReader: """ A reader to parse odML files or strings into odml documents, diff --git a/odml/tools/xmlparser.py b/odml/tools/xmlparser.py index a0d48c1..f2ea862 100644 --- a/odml/tools/xmlparser.py +++ b/odml/tools/xmlparser.py @@ -83,7 +83,11 @@ class XMLWriter: if val is None: continue if isinstance(fmt, format.Property.__class__) and k == "value": - ele = E(k, to_csv(val)) + # Custom odML tuples require special handling for save loading from file. + if e.dtype and e.dtype.endswith("-tuple") and len(val) > 0: + ele = E(k, "(%s)" % ";".join(val[0])) + else: + ele = E(k, to_csv(val)) cur.append(ele) else: if isinstance(val, list):
[dtype] Return defined default values Currently the "get" methods in `dtype.py` return magic numbers as default values. Make them return the already defined default values instead.
G-Node/python-odml
diff --git a/test/test_dtypes.py b/test/test_dtypes.py index 6e90e5e..bbc3d35 100644 --- a/test/test_dtypes.py +++ b/test/test_dtypes.py @@ -1,8 +1,7 @@ +import datetime import unittest import odml.dtypes as typ -import odml -import datetime class TestTypes(unittest.TestCase): @@ -11,42 +10,150 @@ class TestTypes(unittest.TestCase): pass def test_date(self): + self.assertIsInstance(typ.date_get(None), datetime.date) + self.assertIsInstance(typ.date_get(""), datetime.date) + + re = "^[0-9]{4}-(0[1-9]|1[0-2])-([0-2][0-9]|3[0-1])$" + self.assertRegexpMatches(typ.date_get(None).strftime(typ.FORMAT_DATE), re) + self.assertRegexpMatches(typ.date_get("").strftime(typ.FORMAT_DATE), re) + date = datetime.date(2011, 12, 1) date_string = '2011-12-01' self.assertEqual(date, typ.date_get(date_string)) - self.assertEqual(typ.date_set(date), date_string) + self.assertEqual(date, typ.date_get(date)) + + with self.assertRaises(TypeError): + _ = typ.date_get([]) + with self.assertRaises(TypeError): + _ = typ.date_get({}) + with self.assertRaises(TypeError): + _ = typ.date_get(False) + + # Test fail on datetime.datetime + with self.assertRaises(ValueError): + _ = typ.date_get(datetime.datetime.now()) + + # Test fail on datetime.time + with self.assertRaises(TypeError): + _ = typ.date_get(datetime.datetime.now().time()) + + # Test fail on invalid string format + with self.assertRaises(ValueError): + _ = typ.date_get("11.11.1111") def test_time(self): + self.assertIsInstance(typ.time_get(None), datetime.time) + self.assertIsInstance(typ.time_get(""), datetime.time) + + re = "^[0-5][0-9]:[0-5][0-9]:[0-5][0-9]$" + self.assertRegexpMatches(typ.time_get(None).strftime(typ.FORMAT_TIME), re) + self.assertRegexpMatches(typ.time_get("").strftime(typ.FORMAT_TIME), re) + time = datetime.time(12, 34, 56) time_string = '12:34:56' self.assertEqual(time, typ.time_get(time_string)) - self.assertEqual(typ.time_set(time), time_string) + self.assertEqual(time, typ.time_get(time)) + + with self.assertRaises(TypeError): + _ = typ.time_get([]) + with self.assertRaises(TypeError): + _ = typ.time_get({}) + with self.assertRaises(TypeError): + _ = typ.time_get(False) + + # Test fail on datetime.datetime + with self.assertRaises(TypeError): + _ = typ.time_get(datetime.datetime.now()) + + # Test fail on datetime.date + with self.assertRaises(TypeError): + _ = typ.time_get(datetime.datetime.now().date()) + + # Test fail on invalid string format + with self.assertRaises(ValueError): + _ = typ.time_get("11-11-11") def test_datetime(self): + self.assertIsInstance(typ.datetime_get(None), datetime.datetime) + self.assertIsInstance(typ.datetime_get(""), datetime.datetime) + + re = "^[0-9]{4}-(0[1-9]|1[0-2])-([0-2][0-9]|3[0-1]) " \ + "[0-5][0-9]:[0-5][0-9]:[0-5][0-9]$" + self.assertRegexpMatches(typ.datetime_get(None).strftime(typ.FORMAT_DATETIME), re) + self.assertRegexpMatches(typ.datetime_get("").strftime(typ.FORMAT_DATETIME), re) + date = datetime.datetime(2011, 12, 1, 12, 34, 56) date_string = '2011-12-01 12:34:56' self.assertEqual(date, typ.datetime_get(date_string)) - self.assertEqual(typ.datetime_set(date), date_string) + self.assertEqual(date, typ.datetime_get(date)) + + with self.assertRaises(TypeError): + _ = typ.datetime_get([]) + with self.assertRaises(TypeError): + _ = typ.datetime_get({}) + with self.assertRaises(TypeError): + _ = typ.datetime_get(False) + + # Test fail on datetime.time + with self.assertRaises(TypeError): + _ = typ.datetime_get(datetime.datetime.now().time()) + + # Test fail on datetime.date + with self.assertRaises(TypeError): + _ = typ.datetime_get(datetime.datetime.now().date()) + + # Test fail on invalid string format + with self.assertRaises(ValueError): + _ = typ.datetime_get("11.11.1111 12:12:12") def test_int(self): - p = odml.Property("test", value="123456789012345678901", dtype="int") - self.assertEqual(p.value[0], 123456789012345678901) - p = odml.Property("test", value="-123456789012345678901", dtype="int") - self.assertEqual(p.value[0], -123456789012345678901) - p = odml.Property("test", value="123.45", dtype="int") - self.assertEqual(p.value[0], 123) + self.assertEqual(typ.default_values("int"), typ.int_get(None)) + self.assertEqual(typ.default_values("int"), typ.int_get("")) + + self.assertIsInstance(typ.int_get(11), int) + self.assertIsInstance(typ.int_get(1.1), int) + self.assertIsInstance(typ.int_get("11"), int) + self.assertEqual(typ.int_get("123456789012345678901"), 123456789012345678901) + self.assertEqual(typ.int_get("-123456789012345678901"), -123456789012345678901) + self.assertEqual(typ.int_get("123.45"), 123) + + with self.assertRaises(TypeError): + _ = typ.int_get([]) + with self.assertRaises(TypeError): + _ = typ.int_get({}) + with self.assertRaises(ValueError): + _ = typ.int_get("fail") + + def test_float(self): + self.assertEqual(typ.default_values("float"), typ.float_get(None)) + self.assertEqual(typ.default_values("float"), typ.float_get("")) + + self.assertIsInstance(typ.float_get(1), float) + self.assertIsInstance(typ.float_get("1.1"), float) + self.assertEqual(typ.float_get(123.45), 123.45) + + with self.assertRaises(TypeError): + _ = typ.float_get([]) + with self.assertRaises(TypeError): + _ = typ.float_get({}) + with self.assertRaises(ValueError): + _ = typ.float_get("fail") def test_str(self): - s = odml.Property(name='Name', value='Sherin') - self.assertEqual(s.value[0], 'Sherin') - self.assertEqual(s.dtype, 'string') + self.assertEqual(typ.default_values("string"), typ.str_get(None)) + self.assertEqual(typ.default_values("string"), typ.str_get("")) + self.assertEqual(typ.default_values("string"), typ.str_get([])) + self.assertEqual(typ.default_values("string"), typ.str_get({})) - s.value = 'Jerin' - self.assertEqual(s.value[0], 'Jerin') - self.assertEqual(s.dtype, 'string') + # Make sure boolean values are properly converted to string. + self.assertEqual(typ.str_get(False), 'False') + self.assertEqual(typ.str_get(True), 'True') def test_bool(self): - self.assertEqual(None, typ.boolean_get(None)) + self.assertEqual(typ.default_values("boolean"), typ.boolean_get(None)) + self.assertEqual(typ.default_values("boolean"), typ.boolean_get("")) + self.assertEqual(typ.default_values("boolean"), typ.boolean_get([])) + self.assertEqual(typ.default_values("boolean"), typ.boolean_get({})) true_values = [True, "TRUE", "true", "T", "t", "1", 1] for val in true_values: @@ -64,18 +171,17 @@ class TestTypes(unittest.TestCase): typ.boolean_get(2.1) def test_tuple(self): - # Success test - t = odml.Property(name="Location", value='(39.12; 67.19)', dtype='2-tuple') - tuple_value = t.value[0] # As the formed tuple is a list of list - self.assertEqual(tuple_value[0], '39.12') - self.assertEqual(tuple_value[1], '67.19') + self.assertIs(typ.tuple_get(""), None) + self.assertIs(typ.tuple_get(None), None) - # Failure test. More tuple values then specified. - with self.assertRaises(ValueError): - t = odml.Property(name="Public-Key", value='(5689; 1254; 687)', - dtype='2-tuple') + self.assertEqual(typ.tuple_get("(39.12; 67.19)"), ["39.12", "67.19"]) + + # Test fail on missing parenthesis. + with self.assertRaises(AssertionError): + _ = typ.tuple_get("fail") + # Test fail on mismatching element count and count number. + with self.assertRaises(AssertionError): + _ = typ.tuple_get("(1; 2; 3)", 2) def test_dtype_none(self): - t = odml.Property(name="Record", value={'name': 'Marie'}) - self.assertEqual(t.dtype, 'string') - self.assertEqual(t.value[0], "{'name': 'Marie'}") + self.assertEqual(typ.get({'name': 'Marie'}), "{'name': 'Marie'}") diff --git a/test/test_infer_type.py b/test/test_infer_type.py index 8909f85..7f27bc4 100644 --- a/test/test_infer_type.py +++ b/test/test_infer_type.py @@ -11,51 +11,51 @@ class TestInferType(unittest.TestCase): p = Property("test", value="somestring") assert(p.dtype == "string") if sys.version_info < (3, 0): - assert (type(p.value[0]) == unicode) + assert isinstance(p.value[0], unicode) else: - assert (type(p.value[0]) == str) + assert isinstance(p.value[0], str) def test_text(self): p = Property("test", value="some\nstring") assert(p.dtype == "text") if sys.version_info < (3, 0): - assert (type(p.value[0]) == unicode) + assert isinstance(p.value[0], unicode) else: - assert (type(p.value[0]) == str) + assert isinstance(p.value[0], str) def test_int(self): p = Property("test", value=111) assert(p.dtype == "int") - assert(type(p.value[0]) == int) + assert isinstance(p.value[0], int) def test_float(self): p = Property("test", value=3.14) assert(p.dtype == "float") - assert(type(p.value[0]) == float) + assert isinstance(p.value[0], float) def test_datetime(self): p = Property("test", value=dt.now()) assert(p.dtype == "datetime") - assert(type(p.value[0]) == dt) + assert isinstance(p.value[0], dt) def test_date(self): p = Property("test", dt.now().date()) assert(p.dtype == "date") - assert(type(p.value[0]) == date) + assert isinstance(p.value[0], date) def test_time(self): p = Property("test", value=dt.now().time()) assert(p.dtype == "time") - assert(type(p.value[0]) == time) + assert isinstance(p.value[0], time) def test_boolean(self): p = Property("test", True) assert(p.dtype == "boolean") - assert(type(p.value[0]) == bool) + assert isinstance(p.value[0], bool) p = Property("test", False) assert(p.dtype == "boolean") - assert(type(p.value[0]) == bool) + assert isinstance(p.value[0], bool) def test_read_write(self): doc = Document("author") @@ -79,37 +79,37 @@ class TestInferType(unittest.TestCase): p = new_sec.properties["strprop"] assert(p.dtype == "string") if sys.version_info < (3, 0): - assert(type(p.value[0]) == unicode) + assert isinstance(p.value[0], unicode) else: - assert(type(p.value[0]) == str) + assert isinstance(p.value[0], str) p = new_sec.properties["txtprop"] assert(p.dtype == "text") if sys.version_info < (3, 0): - assert(type(p.value[0]) == unicode) + assert isinstance(p.value[0], unicode) else: - assert(type(p.value[0]) == str) + assert isinstance(p.value[0], str) p = new_sec.properties["intprop"] assert(p.dtype == "int") - assert(type(p.value[0]) == int) + assert isinstance(p.value[0], int) p = new_sec.properties["floatprop"] assert(p.dtype == "float") - assert(type(p.value[0]) == float) + assert isinstance(p.value[0], float) p = new_sec.properties["datetimeprop"] assert(p.dtype == "datetime") - assert(type(p.value[0]) == dt) + assert isinstance(p.value[0], dt) p = new_sec.properties["dateprop"] assert(p.dtype == "date") - assert(type(p.value[0]) == date) + assert isinstance(p.value[0], date) p = new_sec.properties["timeprop"] assert(p.dtype == "time") - assert(type(p.value[0]) == time) + assert isinstance(p.value[0], time) p = new_sec.properties["boolprop"] assert(p.dtype == "boolean") - assert(type(p.value[0]) == bool) + assert isinstance(p.value[0], bool) diff --git a/test/test_property.py b/test/test_property.py index c122f97..cbcaade 100644 --- a/test/test_property.py +++ b/test/test_property.py @@ -73,6 +73,24 @@ class TestProperty(unittest.TestCase): p6 = Property("test", {"name": "Marie", "name":"Johanna"}) self.assertEqual(len(p6), 1) + # Test tuple dtype value. + t = Property(name="Location", value='(39.12; 67.19)', dtype='2-tuple') + tuple_value = t.value[0] # As the formed tuple is a list of list + self.assertEqual(tuple_value[0], '39.12') + self.assertEqual(tuple_value[1], '67.19') + + # Test invalid tuple length + with self.assertRaises(ValueError): + _ = Property(name="Public-Key", value='(5689; 1254; 687)', dtype='2-tuple') + + # Test missing tuple length. + with self.assertRaises(ValueError): + _ = Property(name="Public-Key", value='(5689; 1254; 687)', dtype='-tuple') + + # Test invalid tuple format. + with self.assertRaises(ValueError): + _ = Property(name="Public-Key", value='5689; 1254; 687', dtype='3-tuple') + def test_get_set_value(self): values = [1, 2, 3, 4, 5] p = Property("property", value=values) diff --git a/test/test_samplefile.py b/test/test_samplefile.py index 92ae8ec..3bd5ec6 100644 --- a/test/test_samplefile.py +++ b/test/test_samplefile.py @@ -197,7 +197,7 @@ class AttributeTest(unittest.TestCase): def test_conversion_int_to_float(self): p = odml.Property("test", "1", dtype="int") self.assertEqual(p.dtype, "int") - self.assertEqual(type(p.value[0]), int) + self.assertIsInstance(p.value[0], int) p.dtype = "float" # change dtype self.assertEqual(p.dtype, "float") self.assertEqual(p.value[0], 1.0)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 5 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libxml2-dev libxslt1-dev lib32z1-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work isodate==0.6.1 lxml==5.3.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/G-Node/python-odml.git@8953343f0f4616c0a71087d406b5f6d4a2036748#egg=odML packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 PyYAML==6.0.1 rdflib==5.0.0 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: python-odml channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - isodate==0.6.1 - lxml==5.3.1 - pyyaml==6.0.1 - rdflib==5.0.0 - six==1.17.0 prefix: /opt/conda/envs/python-odml
[ "test/test_dtypes.py::TestTypes::test_bool", "test/test_dtypes.py::TestTypes::test_date", "test/test_dtypes.py::TestTypes::test_datetime", "test/test_dtypes.py::TestTypes::test_float", "test/test_dtypes.py::TestTypes::test_int", "test/test_dtypes.py::TestTypes::test_str", "test/test_dtypes.py::TestTypes::test_time" ]
[]
[ "test/test_dtypes.py::TestTypes::test_dtype_none", "test/test_dtypes.py::TestTypes::test_tuple", "test/test_infer_type.py::TestInferType::test_boolean", "test/test_infer_type.py::TestInferType::test_date", "test/test_infer_type.py::TestInferType::test_datetime", "test/test_infer_type.py::TestInferType::test_float", "test/test_infer_type.py::TestInferType::test_int", "test/test_infer_type.py::TestInferType::test_read_write", "test/test_infer_type.py::TestInferType::test_string", "test/test_infer_type.py::TestInferType::test_text", "test/test_infer_type.py::TestInferType::test_time", "test/test_property.py::TestProperty::test_bool_conversion", "test/test_property.py::TestProperty::test_dtype", "test/test_property.py::TestProperty::test_get_set_value", "test/test_property.py::TestProperty::test_merge", "test/test_property.py::TestProperty::test_name", "test/test_property.py::TestProperty::test_parent", "test/test_property.py::TestProperty::test_path", "test/test_property.py::TestProperty::test_set_id", "test/test_property.py::TestProperty::test_str_to_int_convert", "test/test_property.py::TestProperty::test_value", "test/test_property.py::TestProperty::test_value_origin", "test/test_samplefile.py::SampleFileCreatorTest::test_samplefile", "test/test_samplefile.py::SampleFileOperationTest::test_find_key", "test/test_samplefile.py::SampleFileOperationTest::test_restore", "test/test_samplefile.py::SampleFileOperationTest::test_save", "test/test_samplefile.py::SampleFileOperationTest::test_xml_writer_version", "test/test_samplefile.py::AttributeTest::test_conversion_float_to_int", "test/test_samplefile.py::AttributeTest::test_conversion_int_to_float", "test/test_samplefile.py::AttributeTest::test_value_float", "test/test_samplefile.py::AttributeTest::test_value_int", "test/test_samplefile.py::CopyTest::test_dependence", "test/test_samplefile.py::CopyTest::test_independence", "test/test_samplefile.py::MiscTest::test_findall_related", "test/test_samplefile.py::MiscTest::test_get_property_by_path", "test/test_samplefile.py::MiscTest::test_get_section_by_path", "test/test_samplefile.py::MiscTest::test_paths", "test/test_samplefile.py::MiscTest::test_reorder_first", "test/test_samplefile.py::MiscTest::test_reorder_post", "test/test_samplefile.py::MiscTest::test_save_version", "test/test_samplefile.py::MiscTest::test_section_path" ]
[]
BSD 4-Clause "Original" or "Old" License
2,299
[ "odml/tools/dict_parser.py", "odml/doc.py", "odml/tools/odmlparser.py", "odml/tools/xmlparser.py", "odml/dtypes.py" ]
[ "odml/tools/dict_parser.py", "odml/doc.py", "odml/tools/odmlparser.py", "odml/tools/xmlparser.py", "odml/dtypes.py" ]
UBC-MDS__PyPunisher-72
859c2f19db06c3bb7b488645f65dd286a1ba2a65
2018-03-17 05:02:23
859c2f19db06c3bb7b488645f65dd286a1ba2a65
diff --git a/pypunisher/__init__.py b/pypunisher/__init__.py index 97d4631..0730e06 100644 --- a/pypunisher/__init__.py +++ b/pypunisher/__init__.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + """ PyPunisher ========== diff --git a/pypunisher/_checks.py b/pypunisher/_checks.py index a268a99..ad8a08f 100644 --- a/pypunisher/_checks.py +++ b/pypunisher/_checks.py @@ -1,8 +1,8 @@ -""" - - Checks - ~~~~~~ +#!/usr/bin/env python +""" +Checks +====== """ import numpy as np @@ -48,20 +48,21 @@ def input_checks(locals_): """ # Sort so that the order of the parameter name # are in a reliable (alphabetical) order. - param_a, param_b = sorted(k for k, p in locals_.items() if k != 'self') - locals_non_non = {k: v for k, v in locals_.items() - if v is not None and k != 'self'} + ALLOWED = ('min_change', 'n_features') + param_a, param_b = sorted(k for k, p in locals_.items() if k in ALLOWED) + locals_non_none = {k: v for k, v in locals_.items() + if v is not None and k in ALLOWED} - if len(locals_non_non) != 1: + if len(locals_non_none) != 1: raise TypeError( "At least one of `{a}` and `{b}` must be None.".format( a=param_a, b=param_b ) ) - # Unpack the single key and value pair - name, obj = tuple(locals_non_non.items())[0] - if obj is None and not isinstance(obj, (int, float)): + # Unpack the single key and value pair. + name, obj = tuple(locals_non_none.items())[0] + if not isinstance(obj, (int, float)): raise TypeError( "`{}` must be of type int or float.".format(name) ) diff --git a/pypunisher/metrics/__init__.py b/pypunisher/metrics/__init__.py index 0ef76a8..8fe54ef 100644 --- a/pypunisher/metrics/__init__.py +++ b/pypunisher/metrics/__init__.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + """ Metrics ======= diff --git a/pypunisher/metrics/criterion.py b/pypunisher/metrics/criterion.py index 91c5150..70e599b 100644 --- a/pypunisher/metrics/criterion.py +++ b/pypunisher/metrics/criterion.py @@ -1,8 +1,8 @@ -""" - - Information Criterion - ~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" + Information Criterion + ===================== """ from numpy import log, ndarray, pi from pypunisher._checks import model_check diff --git a/pypunisher/selection_engines/__init__.py b/pypunisher/selection_engines/__init__.py index 65479f8..aac2dd9 100644 --- a/pypunisher/selection_engines/__init__.py +++ b/pypunisher/selection_engines/__init__.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + """ Selection Engines ================= diff --git a/pypunisher/selection_engines/_utils.py b/pypunisher/selection_engines/_utils.py index 9315f08..013198a 100644 --- a/pypunisher/selection_engines/_utils.py +++ b/pypunisher/selection_engines/_utils.py @@ -1,8 +1,8 @@ -""" - - Utils - ~~~~~ +#!/usr/bin/env python +""" +Utils +===== """ def get_n_features(matrix, min_=2): @@ -47,8 +47,7 @@ def enforce_use_of_all_cpus(model): exists """ - if hasattr(model, 'n_jobs'): - setattr(model, 'n_jobs', -1) + setattr(model, 'n_jobs', -1) return model diff --git a/pypunisher/selection_engines/selection.py b/pypunisher/selection_engines/selection.py index 067a3f5..561aec7 100644 --- a/pypunisher/selection_engines/selection.py +++ b/pypunisher/selection_engines/selection.py @@ -1,8 +1,8 @@ -""" - - Forward and Backward Selection Algorithms - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Forward and Backward Selection Algorithms +========================================= """ from pypunisher.metrics.criterion import aic, bic from pypunisher._checks import model_check, array_check, input_checks @@ -93,6 +93,27 @@ class Selection(object): score = self._model.score(X_val, y_val) return score + @staticmethod + def _do_not_skip(kwargs): + """Check for skipping override by looking + for `_do_not_skip` in keyword arguments + If it is present, the loops in the algorithms + will be run to exhaustion. + + Args: + kwargs : dict + Keyword Args + + Returns: + Bool + If `_do_not_skip` is not present + or `_do_not_skip` is present and is True. + Otherwise, the value of `do_not_skip` + is returned. + + """ + return kwargs.get('_do_not_skip', True) + def _forward_break_criteria(self, S, min_change, best_j_score, j_score_dict, n_features): """Check if `forward()` should break @@ -128,7 +149,7 @@ class Selection(object): else: return False - def forward(self, min_change=0.5, n_features=None): + def forward(self, min_change=0.5, n_features=None, **kwargs): """Perform Forward Selection on a Sklearn model. Args: @@ -140,6 +161,10 @@ class Selection(object): Note: `min_change` must be None in order for `n_features` to operate. Floats will be regarded as proportions of the total that must lie on (0, 1). + kwargs : Keyword Args + Includes: + * `_do_not_skip`: for interal use only; it is + not recommended that users use this parameter. Returns: S : list @@ -150,14 +175,18 @@ class Selection(object): S = list() best_score = None itera = list(range(self._total_number_of_features)) + do_not_skip = self._do_not_skip(kwargs) - if n_features: + if n_features and do_not_skip: n_features = parse_n_features(n_features, total=len(itera)) for i in range(self._total_number_of_features): if self._verbose: print("Iteration: {}".format(i)) + if not do_not_skip: + continue + # 1. Find best feature, j, to add. j_score_dict = dict() for j in itera: @@ -182,7 +211,7 @@ class Selection(object): return S - def backward(self, n_features=0.5, min_change=None): + def backward(self, n_features=0.5, min_change=None, **kwargs): """Perform Backward Selection on a Sklearn model. Args: @@ -194,6 +223,14 @@ class Selection(object): min_change : int or float, optional The smallest change to be considered significant. `n_features` must be None for `min_change` to operate. + kwargs : Keyword Args + Includes: + * `_do_not_skip` : bool + Explore loop exhaustion. + **For internal use only**; Not intended for outside use. + * `_last_score_punt` : bool + Relax `defeated_last_iter_score` decision boundary. + **For internal use only**. Not intended for outside use. Returns: S : list @@ -205,8 +242,10 @@ class Selection(object): """ input_checks(locals()) S = list(range(self._total_number_of_features)) # start with all features + do_not_skip = self._do_not_skip(kwargs) + last_score_punt = kwargs.get('_last_score_punt', False) - if n_features: + if n_features and do_not_skip: n_features = parse_n_features(n_features, total=len(S)) last_iter_score = self._fit_and_score(S, feature=None, algorithm='backward') @@ -215,6 +254,9 @@ class Selection(object): if self._verbose: print("Iteration: {}".format(i)) + if not do_not_skip: + continue + # 1. Hunt for the least predictive feature. best = {'feature': None, 'score': None, 'defeated_last_iter_score': True} for j in S: @@ -228,13 +270,13 @@ class Selection(object): if isinstance(n_features, int): S.remove(to_drop) # blindly drop. last_iter_score = best_new_score - if len(S) == n_features: - break - else: + if not len(S) == n_features: continue # i.e., ignore criteria below. + else: + break # 2b. Halt if the change is not longer considered significant. - if isinstance(min_change, (int, float)): - if best['defeated_last_iter_score']: + else: + if best['defeated_last_iter_score'] or last_score_punt: if (best_new_score - last_iter_score) < min_change: break # there was a change, but it was not large enough. else: @@ -243,8 +285,4 @@ class Selection(object): else: break - # 2c. Halt if only one feature remains. - if len(S) == 1: - break - return S
Feedback on Milestone 2 Hi All, Nice work for milestone 2. I like your comprehensive designs for the entire package. Here is my comments: 1. Good Practice to state out installation requires python 3.6 2. I like your coverage part to detail your test coverage, excellent 3. For __init__.py, line 4 and 5, why not just list.append() to have version number? 4. For selection_engines/__init__.py, I like your comments for the issue in scipy 5. Please improve your style in Python programming, you can refer to https://google.github.io/styleguide/pyguide.html as in selection.py line 4, this is not professional, also the space you have between lines are not equal. 6. For your first line of your python file, it is suggested that you can include #!/usr/bin/env python just in case the user is running your code in Linux(like me) 7. for _fit_and_score(self, S, feature, algorithm), what if algorithm input is wrong input parameters? if you thought about that? 8. For function backward(), S is not a good naming for a list Regards Jason
UBC-MDS/PyPunisher
diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..4265cc3 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,1 @@ +#!/usr/bin/env python diff --git a/tests/_defaults.py b/tests/_defaults.py index b9a31c4..ddfc126 100644 --- a/tests/_defaults.py +++ b/tests/_defaults.py @@ -1,8 +1,8 @@ -""" +#!/usr/bin/env python +""" Default Base for Testing Against - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - + ================================ """ from sklearn.linear_model import LinearRegression from tests._test_data import X_train, y_train, X_val, y_val diff --git a/tests/_test_data.py b/tests/_test_data.py index 5b74671..adc5f13 100644 --- a/tests/_test_data.py +++ b/tests/_test_data.py @@ -1,31 +1,41 @@ -""" - - Test Data - ~~~~~~~~~ - - Generate: y = x + e, where e ~ Uniform(0, 50) and - `x` is embedded as the middle column in a zero matrix. - That is, only ONE column is predictive of y, the rest are - trivial column vectors. +#!/usr/bin/env python +""" +Test Data +========= +Generate: y = x + e, where e ~ Uniform(0, 50) and +`x` is embedded as the middle column in a zero matrix. +That is, only ONE column is predictive of y, the rest are +trivial column vectors. + +X_train : 2D array + Training Features. +X_val : 2D array + Validation Features. +y_train : 1D array + Training labels. +y_val : 1D array + Validation Labels +true_best_feature : int, list + Denotes the best feature + that is actually predictive of the response. """ import numpy as np from sklearn.model_selection import train_test_split SEED = 99 - -features = 20 -obs = 501 -middle_feature = features // 2 +FEATURES = 20 +OBSERVATIONS = 501 +middle_feature = FEATURES // 2 np.random.seed(SEED) -X = np.zeros((obs, features)) -y = np.arange(obs) -X[:, middle_feature] = y + np.random.uniform(0, 50, size=obs) +X = np.zeros((OBSERVATIONS, FEATURES)) +y = np.arange(OBSERVATIONS) +X[:, middle_feature] = y + np.random.uniform(0, 50, size=OBSERVATIONS) X_train, X_val, y_train, y_val = train_test_split(X, y, random_state=SEED) -TRUE_BEST_FEATURE = middle_feature +true_best_feature = middle_feature # Visualize --- # import matplotlib.pyplot as plt diff --git a/tests/_wrappers.py b/tests/_wrappers.py index e4404f9..2dcf69d 100644 --- a/tests/_wrappers.py +++ b/tests/_wrappers.py @@ -1,8 +1,8 @@ -""" - - Wrapper Functions for Testing - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Wrapper Functions for Testing +============================= """ from copy import deepcopy from tests._defaults import DEFAULT_SELECTION_PARAMS diff --git a/tests/test_backward_selection.py b/tests/test_backward_selection.py index d44f603..cdb8b2a 100644 --- a/tests/test_backward_selection.py +++ b/tests/test_backward_selection.py @@ -1,8 +1,8 @@ -""" - - Tests Specific to Backward Selection - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Tests Specific to Backward Selection +==================================== """ import os import sys @@ -13,33 +13,88 @@ sys.path.insert(0, os.path.abspath(".")) sys.path.insert(0, os.path.abspath("../")) from tests._wrappers import backward +from tests._test_data import X_train + +# ----------------------------------------------------------------------------- +# Test `backward()` Params +# ----------------------------------------------------------------------------- -def test_backward_params(): + +def test_n_features_greater_than_zero_backward(): """ - Check parameters to `backward()` raise when expected. + Check `backward()`'s `n_features` raises when + not greater than zero """ msg = "`n_features` must be greater than zero." with pytest.raises(ValueError, match=msg): backward(n_features=-0.5, min_change=None) + +def test_min_change_greater_than_zero_backward(): + """ + Check `backward()`'s `min_change` raises when + not greater than zero + """ msg = "`min_change` must be greater than zero." with pytest.raises(ValueError, match=msg): backward(n_features=None, min_change=-0.75) + +def test_min_change_fails_on_string_backward(): + """ + Check that backward raises when passed a string + for `min_change`. + """ + msg = "`min_change` must be of type int or float." + with pytest.raises(TypeError, match=msg): + backward(min_change='-0.75', n_features=None) + + +def test_n_features_fails_on_string_backward(): + """ + Check that backward raises when passed a string + for `n_features`. + """ + msg = "`n_features` must be of type int or float." + with pytest.raises(TypeError, match=msg): + backward(min_change=None, n_features='-0.75') + + +def test_both_non_none_backward(): + """ + Check `backward()` raise when at least one + of `min_change` or `n_features` are not None. + """ # Note: items in backticks (``) will be in alphabetical order. msg = "At least one of `min_change` and `n_features` must be None." with pytest.raises(TypeError, match=msg): backward(n_features=0.5, min_change=0.3) - - msg = "`criterion` must be one of: None, 'aic', 'bic'." - with pytest.raises(ValueError, match=msg): - backward(n_features=0.5, criterion='acc') + +def test_float_greater_than_one_raises_backward(): + """ + Test that float values not on (0, 1) raise. + """ msg = "^If a float, `n_features` must be on" with pytest.raises(ValueError, match=msg): backward(n_features=1.5) - + + +def test_min_features_requirement_backward(): + """ + Check that the requirement that at least + two features must be present. + """ msg = "less than 2 features present." with pytest.raises(IndexError, match=msg): - backward(X_train=ones((501, 1)), X_val=ones((501, 1))) \ No newline at end of file + backward(X_train=ones((501, 1)), X_val=ones((501, 1))) + + +# ----------------------------------------------------------------------------- +# Test Exhausting loop +# ----------------------------------------------------------------------------- + +def test_loop_exhaust(): + """Text Exhausting backward()'s loop.""" + backward(n_features=X_train.shape[-1], min_change=None, _do_not_skip=False) diff --git a/tests/test_criterion.py b/tests/test_criterion.py index 9e2b8ff..2773219 100644 --- a/tests/test_criterion.py +++ b/tests/test_criterion.py @@ -1,8 +1,8 @@ -""" - - Criterion Tests - ~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Criterion Tests +=============== """ import os import sys @@ -15,6 +15,7 @@ import statsmodels.api as sm from pypunisher.metrics.criterion import aic, bic from sklearn.linear_model import LinearRegression from tests._test_data import X_train, y_train +from tests._wrappers import forward, backward COMP_TOLERANCE = 200 # comparision tolerance between floats @@ -49,6 +50,22 @@ def test_metric_model_param(): metric(kind, X_train=X_train, y_train=y_train) +# ----------------------------------------------------------------------------- +# Test criterion through selection +# ----------------------------------------------------------------------------- + + +def test_selection_class_use_of_criterion(): + """Test Criterion through `forward()` and `backward().""" + + msg = "`criterion` must be one of: None, 'aic', 'bic'." + with pytest.raises(ValueError, match=msg): + forward(min_change=0.5, criterion='acc') + + with pytest.raises(ValueError, match=msg): + backward(n_features=0.5, criterion='Santa') + + # ----------------------------------------------------------------------------- # `data` Param # ----------------------------------------------------------------------------- @@ -68,6 +85,7 @@ def test_metric_data_param(): else: metric(sk_model, X_train=kind, y_train=y_train) + # ----------------------------------------------------------------------------- # Metric output # ----------------------------------------------------------------------------- diff --git a/tests/test_forward_selection.py b/tests/test_forward_selection.py index fa62b01..93d27f0 100644 --- a/tests/test_forward_selection.py +++ b/tests/test_forward_selection.py @@ -1,8 +1,8 @@ -""" - - Tests Specific to Forward Selection - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Tests Specific to Forward Selection +=================================== """ import os import sys @@ -12,25 +12,68 @@ sys.path.insert(0, os.path.abspath(".")) sys.path.insert(0, os.path.abspath("../")) from tests._wrappers import forward +from tests._test_data import X_train + + +# ----------------------------------------------------------------------------- +# Test `forward()` Params +# ----------------------------------------------------------------------------- + + +def test_n_features_greater_than_zero_forward(): + """ + Check that `n_features` is required to be > 0. + """ + msg = "`n_features` must be greater than zero." + with pytest.raises(ValueError, match=msg): + forward(min_change=None, n_features=-0.75) -def test_forward_params(): +def test_min_change_greater_than_zero_forward(): """ - Check parameters to `forward()` raise when expected. + Check that `min_change` is required to be > 0. """ msg = "`min_change` must be greater than zero." with pytest.raises(ValueError, match=msg): forward(min_change=-0.5, n_features=None) - msg = "`n_features` must be greater than zero." - with pytest.raises(ValueError, match=msg): - forward(min_change=None, n_features=-0.75) +def test_n_features_fails_on_string_forward(): + """ + Check that forward raises when passed a string + for `n_features`. + """ + msg = "`n_features` must be of type int or float." + with pytest.raises(TypeError, match=msg): + forward(min_change=None, n_features='-0.75') + + +def test_min_change_fails_on_string_forward(): + """ + Check that forward raises when passed a string + for `min_change`. + """ + msg = "`min_change` must be of type int or float." + with pytest.raises(TypeError, match=msg): + forward(min_change='-0.75', n_features=None) + + +def test_both_non_none_forward(): + """ + Check `forward()` raise when at least one + of `min_change` or `n_features` are not None. + """ # Note: items in backticks (``) will be in alphabetical order. msg = "At least one of `min_change` and `n_features` must be None." with pytest.raises(TypeError, match=msg): forward(min_change=0.5, n_features=0.3) - - msg = "`criterion` must be one of: None, 'aic', 'bic'." - with pytest.raises(ValueError, match=msg): - forward(min_change=0.5, criterion='acc') + + +# ----------------------------------------------------------------------------- +# Test Exhausting loop +# ----------------------------------------------------------------------------- + +def test_loop_exhaust(): + """Text Exhausting forwards()'s loop.""" + # Should not raise. + forward(n_features=X_train.shape[-1], min_change=None, _do_not_skip=False) diff --git a/tests/test_selection.py b/tests/test_selection.py index e3ee45a..2eb6098 100644 --- a/tests/test_selection.py +++ b/tests/test_selection.py @@ -1,8 +1,8 @@ -""" - - Run Tests Common to Forward and Backward Selection - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#!/usr/bin/env python +""" +Run Tests Common to Forward and Backward Selection +================================================== """ import os import sys @@ -14,7 +14,7 @@ sys.path.insert(0, os.path.abspath("../")) from pypunisher import Selection from tests._wrappers import forward, backward -from tests._test_data import TRUE_BEST_FEATURE +from tests._test_data import true_best_feature from tests._defaults import DEFAULT_SELECTION_PARAMS @@ -75,6 +75,18 @@ def test_sklearn_model_methods(): with pytest.raises(AttributeError): Selection(**d) +# ----------------------------------------------------------------------------- +# Test Multiple Features +# ----------------------------------------------------------------------------- + + +def test_passing_significant_change(): + """ + Test cases where there is a significant `min_change` + during backward selection. + """ + backward(n_features=None, min_change=1, _last_score_punt=True) + # ----------------------------------------------------------------------------- # Outputs: Run the Forward and Backward Selection Algorithms @@ -88,7 +100,7 @@ forward_output += forward(n_features=1, min_change=None) # Force the backward selection algorithm to # select the single feature it thinks is most predictive. # If implemented correctly, `backward()` should be able to -# identify `TRUE_BEST_FEATURE` as predictive. +# identify `true_best_feature` as predictive. backward_output = backward(n_features=1) # Run using the other parameter option @@ -142,7 +154,7 @@ def output_values(output): in the contrived data. """ msg = "The algorithm failed to select the predictive feature." - assert TRUE_BEST_FEATURE in output, msg + assert true_best_feature in output, msg def test_fsel_output_values(): @@ -206,4 +218,4 @@ def test_fsel_verbose_output(): def test_bsel_verbose_output(): backward_output = backward(n_features=2, min_change=None, verbose=True) - assert len(backward_output) >= 1 \ No newline at end of file + assert len(backward_output) >= 1
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 7 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==3.3.9 certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 dill==0.3.9 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 isort==6.0.1 joblib==1.4.2 mccabe==0.7.0 numpy==2.0.2 packaging==24.2 pandas==2.2.3 patsy==1.0.1 platformdirs==4.3.7 pluggy==1.5.0 pylint==3.3.6 -e git+https://github.com/UBC-MDS/PyPunisher.git@859c2f19db06c3bb7b488645f65dd286a1ba2a65#egg=pypunisher pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.32.3 scikit-learn==1.6.1 scipy==1.13.1 six==1.17.0 statsmodels==0.14.4 threadpoolctl==3.6.0 tomli==2.2.1 tomlkit==0.13.2 tqdm==4.67.1 typing_extensions==4.13.0 tzdata==2025.2 urllib3==2.3.0
name: PyPunisher channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==3.3.9 - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - dill==0.3.9 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - isort==6.0.1 - joblib==1.4.2 - mccabe==0.7.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - patsy==1.0.1 - platformdirs==4.3.7 - pluggy==1.5.0 - pylint==3.3.6 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.32.3 - scikit-learn==1.6.1 - scipy==1.13.1 - six==1.17.0 - statsmodels==0.14.4 - threadpoolctl==3.6.0 - tomli==2.2.1 - tomlkit==0.13.2 - tqdm==4.67.1 - typing-extensions==4.13.0 - tzdata==2025.2 - urllib3==2.3.0 prefix: /opt/conda/envs/PyPunisher
[ "tests/test_backward_selection.py::test_min_change_fails_on_string_backward", "tests/test_backward_selection.py::test_n_features_fails_on_string_backward", "tests/test_backward_selection.py::test_loop_exhaust", "tests/test_forward_selection.py::test_n_features_fails_on_string_forward", "tests/test_forward_selection.py::test_min_change_fails_on_string_forward", "tests/test_forward_selection.py::test_loop_exhaust", "tests/test_selection.py::test_passing_significant_change" ]
[]
[ "tests/test_backward_selection.py::test_n_features_greater_than_zero_backward", "tests/test_backward_selection.py::test_min_change_greater_than_zero_backward", "tests/test_backward_selection.py::test_both_non_none_backward", "tests/test_backward_selection.py::test_float_greater_than_one_raises_backward", "tests/test_backward_selection.py::test_min_features_requirement_backward", "tests/test_criterion.py::test_metric_model_param", "tests/test_criterion.py::test_selection_class_use_of_criterion", "tests/test_criterion.py::test_metric_data_param", "tests/test_criterion.py::test_metric_output", "tests/test_criterion.py::test_metric_output_value", "tests/test_forward_selection.py::test_n_features_greater_than_zero_forward", "tests/test_forward_selection.py::test_min_change_greater_than_zero_forward", "tests/test_forward_selection.py::test_both_non_none_forward", "tests/test_selection.py::test_input_types", "tests/test_selection.py::test_too_few_features", "tests/test_selection.py::test_sklearn_model_methods", "tests/test_selection.py::test_fsel_output_type", "tests/test_selection.py::test_bsel_output_type", "tests/test_selection.py::test_n_features", "tests/test_selection.py::test_fsel_output_values", "tests/test_selection.py::test_bsel_output_values", "tests/test_selection.py::test_fsel_aic_output", "tests/test_selection.py::test_fsel_bic_output", "tests/test_selection.py::test_bsel_aic_output", "tests/test_selection.py::test_bsel_bic_output", "tests/test_selection.py::test_fsel_min_change_output", "tests/test_selection.py::test_bsel_min_change_output", "tests/test_selection.py::test_fsel_verbose_output", "tests/test_selection.py::test_bsel_verbose_output" ]
[]
BSD 3-Clause "New" or "Revised" License
2,301
[ "pypunisher/metrics/criterion.py", "pypunisher/metrics/__init__.py", "pypunisher/selection_engines/__init__.py", "pypunisher/selection_engines/_utils.py", "pypunisher/selection_engines/selection.py", "pypunisher/_checks.py", "pypunisher/__init__.py" ]
[ "pypunisher/metrics/criterion.py", "pypunisher/metrics/__init__.py", "pypunisher/selection_engines/__init__.py", "pypunisher/selection_engines/_utils.py", "pypunisher/selection_engines/selection.py", "pypunisher/_checks.py", "pypunisher/__init__.py" ]
pypa__twine-322
5199edbdce443ff8e0942424157820854c1cebf4
2018-03-18 04:39:47
5199edbdce443ff8e0942424157820854c1cebf4
codecov[bot]: # [Codecov](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=h1) Report > Merging [#322](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=desc) into [master](https://codecov.io/gh/pypa/twine/commit/5199edbdce443ff8e0942424157820854c1cebf4?src=pr&el=desc) will **increase** coverage by `1.07%`. > The diff coverage is `75%`. [![Impacted file tree graph](https://codecov.io/gh/pypa/twine/pull/322/graphs/tree.svg?width=650&height=150&src=pr&token=NrvN4iFbj6)](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #322 +/- ## ========================================== + Coverage 68.51% 69.58% +1.07% ========================================== Files 12 12 Lines 578 582 +4 Branches 91 91 ========================================== + Hits 396 405 +9 + Misses 156 150 -6 - Partials 26 27 +1 ``` | [Impacted Files](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [twine/utils.py](https://codecov.io/gh/pypa/twine/pull/322/diff?src=pr&el=tree#diff-dHdpbmUvdXRpbHMucHk=) | `87.12% <0%> (+1.98%)` | :arrow_up: | | [twine/repository.py](https://codecov.io/gh/pypa/twine/pull/322/diff?src=pr&el=tree#diff-dHdpbmUvcmVwb3NpdG9yeS5weQ==) | `60.57% <100%> (+0.38%)` | :arrow_up: | | [twine/exceptions.py](https://codecov.io/gh/pypa/twine/pull/322/diff?src=pr&el=tree#diff-dHdpbmUvZXhjZXB0aW9ucy5weQ==) | `100% <100%> (ø)` | :arrow_up: | | [twine/commands/upload.py](https://codecov.io/gh/pypa/twine/pull/322/diff?src=pr&el=tree#diff-dHdpbmUvY29tbWFuZHMvdXBsb2FkLnB5) | `65.47% <66.66%> (+3.57%)` | :arrow_up: | | [twine/wininst.py](https://codecov.io/gh/pypa/twine/pull/322/diff?src=pr&el=tree#diff-dHdpbmUvd2luaW5zdC5weQ==) | `29.72% <0%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=footer). Last update [5199edb...2697917](https://codecov.io/gh/pypa/twine/pull/322?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/docs/changelog.rst b/docs/changelog.rst index 8c88876..4885afa 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,8 @@ Changelog ========= +* :bug:`322 major` Raise exception if attempting upload to deprecated legacy + PyPI URLs. * :feature:`320` Remove PyPI as default ``register`` package index. * :feature:`319` Support Metadata 2.1 (:pep:`566`), including Markdown for ``description`` fields. diff --git a/twine/commands/upload.py b/twine/commands/upload.py index 9766893..e2ebdba 100644 --- a/twine/commands/upload.py +++ b/twine/commands/upload.py @@ -21,7 +21,7 @@ import sys import twine.exceptions as exc from twine.package import PackageFile -from twine.repository import Repository, LEGACY_PYPI +from twine.repository import Repository, LEGACY_PYPI, LEGACY_TEST_PYPI from twine import utils @@ -98,11 +98,20 @@ def upload(dists, repository, sign, identity, username, password, comment, print("Uploading distributions to {0}".format(config["repository"])) - if config["repository"].startswith(LEGACY_PYPI): - print( - "Note: you are uploading to the old upload URL. It's recommended " - "to use the new URL \"{0}\" or to leave the URL unspecified and " - "allow twine to choose.".format(utils.DEFAULT_REPOSITORY)) + if config["repository"].startswith((LEGACY_PYPI, LEGACY_TEST_PYPI)): + raise exc.UploadToDeprecatedPyPIDetected( + "You're trying to upload to the legacy PyPI site '{0}'. " + "Uploading to those sites is deprecated. \n " + "The new sites are pypi.org and test.pypi.org. Try using " + "{1} (or {2}) to upload your packages instead. " + "These are the default URLs for Twine now. \n More at " + "https://packaging.python.org/guides/migrating-to-pypi-org/ " + ".".format( + config["repository"], + utils.DEFAULT_REPOSITORY, + utils.TEST_REPOSITORY + ) + ) username = utils.get_username(username, config) password = utils.get_password( diff --git a/twine/exceptions.py b/twine/exceptions.py index 5783a9d..af0401c 100644 --- a/twine/exceptions.py +++ b/twine/exceptions.py @@ -30,3 +30,9 @@ class PackageNotFound(Exception): This is only used when attempting to register a package. """ pass + + +class UploadToDeprecatedPyPIDetected(Exception): + """An upload attempt was detected to deprecated legacy PyPI + sites pypi.python.org or testpypi.python.org.""" + pass diff --git a/twine/repository.py b/twine/repository.py index 7d87d2d..74e1968 100644 --- a/twine/repository.py +++ b/twine/repository.py @@ -31,6 +31,7 @@ import twine KEYWORDS_TO_NOT_FLATTEN = set(["gpg_signature", "content"]) LEGACY_PYPI = 'https://pypi.python.org/' +LEGACY_TEST_PYPI = 'https://testpypi.python.org/' WAREHOUSE = 'https://upload.pypi.org/' OLD_WAREHOUSE = 'https://upload.pypi.io/' diff --git a/twine/utils.py b/twine/utils.py index 6f2ac30..5989de0 100644 --- a/twine/utils.py +++ b/twine/utils.py @@ -140,11 +140,15 @@ def normalize_repository_url(url): def check_status_code(response): - if (response.status_code == 500 and + """ + Shouldn't happen, thanks to the UploadToDeprecatedPyPIDetected + exception, but this is in case that breaks and it does. + """ + if (response.status_code == 410 and response.url.startswith(("https://pypi.python.org", "https://testpypi.python.org"))): print("It appears you're uploading to pypi.python.org (or " - "testpypi.python.org). You've received a 500 error response. " + "testpypi.python.org). You've received a 410 error response. " "Uploading to those sites is deprecated. The new sites are " "pypi.org and test.pypi.org. Try using " "https://upload.pypi.org/legacy/ "
notice obsolete repository settings and warn user more usefully Following up on @tdicola's and others' comments in https://github.com/pypa/twine/issues/200#issuecomment-346537836 , #270, #258, here's a feature request: If Twine notices that it's getting an obsolete `repository` setting (in particular `https://pypi.python.org/pypi`) from a `[repository]` flag in `.pypirc`, > It would be really, really nice for twine to catch this case and give an actionable error since previous packaging instructions told people to add that repository line, etc.
pypa/twine
diff --git a/tests/test_upload.py b/tests/test_upload.py index 12ad867..55c20df 100644 --- a/tests/test_upload.py +++ b/tests/test_upload.py @@ -20,7 +20,7 @@ import pretend import pytest from twine.commands import upload -from twine import package, cli +from twine import package, cli, exceptions import twine import helpers @@ -95,6 +95,40 @@ def test_get_config_old_format(tmpdir): ).format(pypirc) +def test_deprecated_repo(tmpdir): + with pytest.raises(exceptions.UploadToDeprecatedPyPIDetected) as err: + pypirc = os.path.join(str(tmpdir), ".pypirc") + dists = ["tests/fixtures/twine-1.5.0-py2.py3-none-any.whl"] + + with open(pypirc, "w") as fp: + fp.write(textwrap.dedent(""" + [pypi] + repository: https://pypi.python.org/pypi/ + username:foo + password:bar + """)) + + upload.upload(dists=dists, repository="pypi", sign=None, identity=None, + username=None, password=None, comment=None, + cert=None, client_cert=None, + sign_with=None, config_file=pypirc, skip_existing=False, + repository_url=None, + ) + + assert err.args[0] == ( + "You're trying to upload to the legacy PyPI site " + "'https://pypi.python.org/pypi/'. " + "Uploading to those sites is deprecated. \n " + "The new sites are pypi.org and test.pypi.org. Try using " + "https://upload.pypi.org/legacy/ " + "(or https://test.pypi.org/legacy/) " + "to upload your packages instead. " + "These are the default URLs for Twine now. \n " + "More at " + "https://packaging.python.org/guides/migrating-to-pypi-org/ ." + ) + + def test_skip_existing_skips_files_already_on_PyPI(monkeypatch): response = pretend.stub( status_code=400,
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 5 }
1.10
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "coverage", "pretend" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 idna==3.10 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pkginfo==1.10.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work pretend==1.0.9 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 requests==2.27.1 requests-toolbelt==1.0.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tqdm==4.64.1 -e git+https://github.com/pypa/twine.git@5199edbdce443ff8e0942424157820854c1cebf4#egg=twine typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: twine channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - coverage==6.2 - idna==3.10 - importlib-resources==5.4.0 - pkginfo==1.10.0 - pretend==1.0.9 - requests==2.27.1 - requests-toolbelt==1.0.0 - tqdm==4.64.1 - urllib3==1.26.20 prefix: /opt/conda/envs/twine
[ "tests/test_upload.py::test_deprecated_repo" ]
[]
[ "tests/test_upload.py::test_ensure_wheel_files_uploaded_first", "tests/test_upload.py::test_ensure_if_no_wheel_files", "tests/test_upload.py::test_find_dists_expands_globs", "tests/test_upload.py::test_find_dists_errors_on_invalid_globs", "tests/test_upload.py::test_find_dists_handles_real_files", "tests/test_upload.py::test_get_config_old_format", "tests/test_upload.py::test_skip_existing_skips_files_already_on_PyPI", "tests/test_upload.py::test_skip_existing_skips_files_already_on_pypiserver", "tests/test_upload.py::test_skip_upload_respects_skip_existing", "tests/test_upload.py::test_values_from_env" ]
[]
Apache License 2.0
2,304
[ "twine/commands/upload.py", "twine/utils.py", "docs/changelog.rst", "twine/repository.py", "twine/exceptions.py" ]
[ "twine/commands/upload.py", "twine/utils.py", "docs/changelog.rst", "twine/repository.py", "twine/exceptions.py" ]
ntoll__uflash-45
8139fe11aa97c45334592cd8546ebb306ac41ccb
2018-03-18 20:29:55
8139fe11aa97c45334592cd8546ebb306ac41ccb
diff --git a/requirements.txt b/requirements.txt index 40fe650..04c8918 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,7 @@ pyflakes coverage sphinx pytest-cov +nudatus>=0.0.2 # Mock is bundled as part of unittest since Python 3.3 # mock_open can't read binary data in <= 3.4.2 diff --git a/uflash.py b/uflash.py index 7e1872f..4d0d0b3 100644 --- a/uflash.py +++ b/uflash.py @@ -21,6 +21,13 @@ import sys from subprocess import check_output import time +# nudatus is an optional dependancy +can_minify = True +try: + import nudatus +except ImportError: # pragma: no cover + can_minify = False + #: The magic start address in flash memory for a Python script. _SCRIPT_ADDR = 0x3e000 @@ -41,6 +48,7 @@ Documentation is here: https://uflash.readthedocs.io/en/latest/ #: MAJOR, MINOR, RELEASE, STATUS [alpha, beta, final], VERSION _VERSION = (1, 1, 1, ) +_MAX_SIZE = 8188 def get_version(): @@ -50,6 +58,15 @@ def get_version(): return '.'.join([str(i) for i in _VERSION]) +def get_minifier(): + """ + Report the minifier will be used when minify=True + """ + if can_minify: + return 'nudatus' + return None + + def strfunc(raw): """ Compatibility for 2 & 3 str() @@ -57,7 +74,7 @@ def strfunc(raw): return str(raw) if sys.version_info[0] == 2 else str(raw, 'utf-8') -def hexlify(script): +def hexlify(script, minify=False): """ Takes the byte content of a Python script and returns a hex encoded version of it. @@ -69,11 +86,15 @@ def hexlify(script): # Convert line endings in case the file was created on Windows. script = script.replace(b'\r\n', b'\n') script = script.replace(b'\r', b'\n') + if minify: + if not can_minify: + raise ValueError("No minifier is available") + script = nudatus.mangle(script.decode('utf-8')).encode('utf-8') # Add header, pad to multiple of 16 bytes. data = b'MP' + struct.pack('<H', len(script)) + script # Padding with null bytes in a 2/3 compatible way data = data + (b'\x00' * (16 - len(data) % 16)) - if len(data) > 8192: + if len(data) > _MAX_SIZE: # 'MP' = 2 bytes, script length is another 2 bytes. raise ValueError("Python script must be less than 8188 bytes.") # Convert to .hex format. @@ -252,7 +273,7 @@ def save_hex(hex_file, path): def flash(path_to_python=None, paths_to_microbits=None, - path_to_runtime=None, python_script=None): + path_to_runtime=None, python_script=None, minify=False): """ Given a path to or source of a Python file will attempt to create a hex file and then flash it onto the referenced BBC micro:bit. @@ -285,9 +306,9 @@ def flash(path_to_python=None, paths_to_microbits=None, if not path_to_python.endswith('.py'): raise ValueError('Python files must end in ".py".') with open(path_to_python, 'rb') as python_script: - python_hex = hexlify(python_script.read()) + python_hex = hexlify(python_script.read(), minify) elif python_script: - python_hex = hexlify(python_script) + python_hex = hexlify(python_script, minify) runtime = _RUNTIME # Load the hex for the runtime. @@ -374,6 +395,9 @@ def main(argv=None): parser.add_argument('-w', '--watch', action='store_true', help='Watch the source file for changes.') + parser.add_argument('-m', '--minify', + action='store_true', + help='Minify the source') parser.add_argument('--version', action='version', version='%(prog)s ' + get_version()) args = parser.parse_args(argv) @@ -402,7 +426,7 @@ def main(argv=None): else: try: flash(path_to_python=args.source, paths_to_microbits=args.target, - path_to_runtime=args.runtime) + path_to_runtime=args.runtime, minify=args.minify) except Exception as ex: error_message = ( "Error flashing {source} to {target}{runtime}: {error!s}"
Allow for script minification There should be a flag (`--minify` and `-m`) that causes uflash to "minify" the Python script using various heuristics so it'll fit into the limited amount of memory available. See: https://github.com/mu-editor/mu/pull/385 for an example of what I mean.
ntoll/uflash
diff --git a/tests/test_uflash.py b/tests/test_uflash.py index d3746a3..38c561f 100644 --- a/tests/test_uflash.py +++ b/tests/test_uflash.py @@ -44,6 +44,17 @@ def test_get_version(): assert result == '.'.join([str(i) for i in uflash._VERSION]) +def test_get_minifier(): + """ + When a minifier was loaded a string identifing it should be + returned, otherwise None + """ + with mock.patch('uflash.can_minify', False): + assert uflash.get_minifier() is None + with mock.patch('uflash.can_minify', True): + assert len(uflash.get_minifier()) > 0 + + def test_hexlify(): """ Ensure we get the expected .hex encoded result from a "good" call to the @@ -383,7 +394,7 @@ def test_flash_with_python_script(): with mock.patch('uflash.find_microbit', return_value='bar'): with mock.patch('uflash.hexlify') as mock_hexlify: uflash.flash(python_script=python_script) - mock_hexlify.assert_called_once_with(python_script) + mock_hexlify.assert_called_once_with(python_script, False) def test_flash_cannot_find_microbit(): @@ -409,6 +420,25 @@ def test_flash_wrong_python(): assert 'Will only run on Python ' in ex.value.args[0] +def test_hexlify_minify_without_minifier(): + """ + When minification but no minifier is available a ValueError + should be raised + """ + with pytest.raises(ValueError): + with mock.patch('uflash.can_minify', False): + uflash.hexlify(TEST_SCRIPT, minify=True) + + +def test_hexlify_minify(): + """ + Check mangle is called as expected + """ + with mock.patch('nudatus.mangle') as mangle: + uflash.hexlify(TEST_SCRIPT, minify=True) + mangle.assert_called_once_with(TEST_SCRIPT.decode('utf-8')) + + def test_main_no_args(): """ If there are no args into the main function, it simply calls flash with @@ -419,7 +449,8 @@ def test_main_no_args(): uflash.main() mock_flash.assert_called_once_with(path_to_python=None, paths_to_microbits=[], - path_to_runtime=None) + path_to_runtime=None, + minify=False) def test_main_first_arg_python(): @@ -431,7 +462,8 @@ def test_main_first_arg_python(): uflash.main(argv=['foo.py']) mock_flash.assert_called_once_with(path_to_python='foo.py', paths_to_microbits=[], - path_to_runtime=None) + path_to_runtime=None, + minify=False) def test_main_first_arg_help(capsys): @@ -557,7 +589,8 @@ def test_main_two_args(): mock_flash.assert_called_once_with( path_to_python='foo.py', paths_to_microbits=['/media/foo/bar'], - path_to_runtime=None) + path_to_runtime=None, + minify=False) def test_main_multiple_microbits(): @@ -572,7 +605,8 @@ def test_main_multiple_microbits(): path_to_python='foo.py', paths_to_microbits=[ '/media/foo/bar', '/media/foo/baz', '/media/foo/bob'], - path_to_runtime=None) + path_to_runtime=None, + minify=False) def test_main_runtime(): @@ -585,7 +619,8 @@ def test_main_runtime(): mock_flash.assert_called_once_with( path_to_python='foo.py', paths_to_microbits=['/media/foo/bar'], - path_to_runtime='baz.hex') + path_to_runtime='baz.hex', + minify=False) def test_main_named_args(): @@ -596,7 +631,8 @@ def test_main_named_args(): uflash.main(argv=['-r', 'baz.hex']) mock_flash.assert_called_once_with(path_to_python=None, paths_to_microbits=[], - path_to_runtime='baz.hex') + path_to_runtime='baz.hex', + minify=False) def test_main_watch_flag():
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "numpy>=1.16.0", "pandas>=1.0.0", "nudatus", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 iniconfig==2.1.0 nudatus==0.0.5 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 tomli==2.2.1 tzdata==2025.2 -e git+https://github.com/ntoll/uflash.git@8139fe11aa97c45334592cd8546ebb306ac41ccb#egg=uflash
name: uflash channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - nudatus==0.0.5 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - tomli==2.2.1 - tzdata==2025.2 prefix: /opt/conda/envs/uflash
[ "tests/test_uflash.py::test_get_minifier", "tests/test_uflash.py::test_flash_with_python_script", "tests/test_uflash.py::test_hexlify_minify_without_minifier", "tests/test_uflash.py::test_hexlify_minify", "tests/test_uflash.py::test_main_no_args", "tests/test_uflash.py::test_main_first_arg_python", "tests/test_uflash.py::test_main_two_args", "tests/test_uflash.py::test_main_multiple_microbits", "tests/test_uflash.py::test_main_runtime", "tests/test_uflash.py::test_main_named_args" ]
[]
[ "tests/test_uflash.py::test_get_version", "tests/test_uflash.py::test_hexlify", "tests/test_uflash.py::test_unhexlify", "tests/test_uflash.py::test_unhexlify_not_python", "tests/test_uflash.py::test_unhexlify_bad_unicode", "tests/test_uflash.py::test_hexlify_empty_script", "tests/test_uflash.py::test_embed_hex", "tests/test_uflash.py::test_embed_no_python", "tests/test_uflash.py::test_embed_no_runtime", "tests/test_uflash.py::test_extract", "tests/test_uflash.py::test_extract_sandwiched", "tests/test_uflash.py::test_extract_not_valid_hex", "tests/test_uflash.py::test_extract_no_python", "tests/test_uflash.py::test_find_microbit_posix_exists", "tests/test_uflash.py::test_find_microbit_posix_missing", "tests/test_uflash.py::test_find_microbit_nt_exists", "tests/test_uflash.py::test_find_microbit_nt_missing", "tests/test_uflash.py::test_find_microbit_unknown_os", "tests/test_uflash.py::test_save_hex", "tests/test_uflash.py::test_save_hex_no_hex", "tests/test_uflash.py::test_save_hex_path_not_to_hex_file", "tests/test_uflash.py::test_flash_no_args", "tests/test_uflash.py::test_flash_has_python_no_path_to_microbit", "tests/test_uflash.py::test_flash_with_path_to_multiple_microbits", "tests/test_uflash.py::test_flash_with_path_to_microbit", "tests/test_uflash.py::test_flash_with_path_to_runtime", "tests/test_uflash.py::test_flash_cannot_find_microbit", "tests/test_uflash.py::test_flash_wrong_python", "tests/test_uflash.py::test_main_first_arg_help", "tests/test_uflash.py::test_main_first_arg_version", "tests/test_uflash.py::test_main_first_arg_not_python", "tests/test_uflash.py::test_flash_raises", "tests/test_uflash.py::test_flash_raises_with_info", "tests/test_uflash.py::test_watch_raises", "tests/test_uflash.py::test_extract_raises", "tests/test_uflash.py::test_main_watch_flag", "tests/test_uflash.py::test_extract_command", "tests/test_uflash.py::test_extract_paths", "tests/test_uflash.py::test_extract_command_source_only", "tests/test_uflash.py::test_extract_command_no_source", "tests/test_uflash.py::test_watch_no_source", "tests/test_uflash.py::test_watch_file", "tests/test_uflash.py::test_hexlify_validates_script_length" ]
[]
MIT License
2,305
[ "requirements.txt", "uflash.py" ]
[ "requirements.txt", "uflash.py" ]
gmr__tornado-aws-9
549b844677b8112f7da3a57dd86e4a2e7178235f
2018-03-19 12:53:53
549b844677b8112f7da3a57dd86e4a2e7178235f
diff --git a/docs/history.rst b/docs/history.rst index 51124bb..5590628 100644 --- a/docs/history.rst +++ b/docs/history.rst @@ -3,6 +3,10 @@ Version History =============== +1.1.0 (2018-03-19) +------------------ + - Add ``close`` method to ``AWSClient`` (#9 from `31z4 <https://github.com/31z4>_`) + 1.0.0 (2018-01-19) ------------------ - Add new exception type ``tornado_aws.exceptions.RequestException`` (#5 from `nvllsvm <https://github.com/nvllsvm>_`) diff --git a/requires/installation.txt b/requires/installation.txt index c98ba2b..78ed743 100644 --- a/requires/installation.txt +++ b/requires/installation.txt @@ -1,1 +1,1 @@ -tornado>=4.0 +tornado>=4.0,<5.0 diff --git a/setup.py b/setup.py index d2addcf..daebb99 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ def read_requirements(name): setuptools.setup( name='tornado-aws', - version='1.0.0', + version='1.1.0', description=DESC, long_description=open('README.rst').read(), author='Gavin M. Roy', diff --git a/tornado_aws/__init__.py b/tornado_aws/__init__.py index 81ee2f7..4fb5d99 100644 --- a/tornado_aws/__init__.py +++ b/tornado_aws/__init__.py @@ -6,6 +6,6 @@ from tornado_aws.client import AWSClient from tornado_aws.client import AsyncAWSClient from tornado_aws.client import exceptions -__version__ = '1.0.0' +__version__ = '1.1.0' __all__ = ['AWSClient', 'AsyncAWSClient', 'exceptions'] diff --git a/tornado_aws/client.py b/tornado_aws/client.py index 95c8e90..775c40a 100644 --- a/tornado_aws/client.py +++ b/tornado_aws/client.py @@ -181,6 +181,10 @@ class AWSClient(object): headers, body, True) raise aws_error if aws_error else error + def close(self): + """Closes the underlying HTTP client, freeing any resources used.""" + self._client.close() + def _process_error(self, error): """Attempt to process the error coming from AWS. Returns ``True`` if the client should attempt to fetch credentials and the AWSError
AsyncHTTPClient(force_instance=True) could cause memory leaks What's the reason of forcing a new instance of the `AsyncHTTPClient`? The Tornado documentation states > close() is generally only necessary when either the IOLoop is also being closed, or the force_instance=True argument was used when creating the AsyncHTTPClient Not closing an `AsyncHTTPClient ` instance is potentially causing memory leaks. And I see an evidence of this within my running Tornado application that uses `AsyncAWSClient` with `AsyncHTTPClient.configure('tornado.curl_httpclient.CurlAsyncHTTPClient')` ``` >>> import objgraph >>> objgraph.show_most_common_types() Curl 393010 list 33683 function 23579 ``` With 393010 `Curl` instances the proccess eats about 14GB of memory which is insane! At the very least the `close` method of the `AsyncHTTPClient` must be exposed so that one could close it explicitly. Additionally `force_instance=True` could be removed if there was no particular reason for it.
gmr/tornado-aws
diff --git a/tests/client_tests.py b/tests/client_tests.py index 0d02db9..42eed16 100644 --- a/tests/client_tests.py +++ b/tests/client_tests.py @@ -125,6 +125,20 @@ class AsyncClientConfigTestCase(ClientConfigTestCase): CLIENT = client.AsyncAWSClient +class ClientCloseTestCase(TestCase): + + def test_close_is_called(self): + with self.client_with_default_creds('s3') as obj: + with mock.patch.object(obj._client, 'close') as close: + obj.close() + close.assert_called_once() + + +class AsyncClientCloseTestCase(ClientCloseTestCase): + + CLIENT = client.AsyncAWSClient + + class AMZErrorTestCase(TestCase): def test_awz_error(self):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 5 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[curl]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "coverage", "codecov", "flake8", "futures", "mock", "pycurl", "pylint", "wheel", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requires/installation.txt", "requires/testing.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 dill==0.3.4 flake8==5.0.4 futures==2.2.0 idna==3.10 importlib-metadata==4.2.0 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==5.2.0 nose==1.3.7 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pycurl==7.45.6 pyflakes==2.5.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 -e git+https://github.com/gmr/tornado-aws.git@549b844677b8112f7da3a57dd86e4a2e7178235f#egg=tornado_aws typed-ast==1.5.5 typing_extensions==4.1.1 urllib3==1.26.20 wrapt==1.16.0 zipp==3.6.0
name: tornado-aws channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - dill==0.3.4 - flake8==5.0.4 - futures==2.2.0 - idna==3.10 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pycurl==7.45.6 - pyflakes==2.5.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - typed-ast==1.5.5 - typing-extensions==4.1.1 - urllib3==1.26.20 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/tornado-aws
[ "tests/client_tests.py::ClientCloseTestCase::test_close_is_called", "tests/client_tests.py::AsyncClientCloseTestCase::test_close_is_called" ]
[ "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_credentials_and_request_fail", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_credentials_fail", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_expired_credentials", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_needs_credentials", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_os_error", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_refresh_failure", "tests/client_tests.py::AsyncClientFetchTestCase::test_fetch_success" ]
[ "tests/client_tests.py::ClientConfigTestCase::test_passed_in_values", "tests/client_tests.py::ClientConfigTestCase::test_with_valid_config_default_profile", "tests/client_tests.py::ClientConfigTestCase::test_with_valid_config_specified_profile", "tests/client_tests.py::AsyncClientConfigTestCase::test_passed_in_values", "tests/client_tests.py::AsyncClientConfigTestCase::test_with_valid_config_default_profile", "tests/client_tests.py::AsyncClientConfigTestCase::test_with_valid_config_specified_profile", "tests/client_tests.py::AMZErrorTestCase::test_awz_error", "tests/client_tests.py::AMZErrorTestCase::test_unsupported_payload", "tests/client_tests.py::XMLErrorTestCase::test_advertising_error", "tests/client_tests.py::XMLErrorTestCase::test_ec2_error", "tests/client_tests.py::XMLErrorTestCase::test_s3_error", "tests/client_tests.py::XMLErrorTestCase::test_simpledb_error", "tests/client_tests.py::XMLErrorTestCase::test_unparsable_error", "tests/client_tests.py::XMLErrorTestCase::test_unsupported_error", "tests/client_tests.py::ProcessErrorTestCase::test_599_bypasses_processing", "tests/client_tests.py::ProcessErrorTestCase::test_process_bogus_response", "tests/client_tests.py::ProcessErrorTestCase::test_process_dynamodb_error_creds", "tests/client_tests.py::ProcessErrorTestCase::test_process_error_awz_creds", "tests/client_tests.py::ProcessErrorTestCase::test_process_error_ec2_non_auth", "tests/client_tests.py::ProcessErrorTestCase::test_process_error_s3", "tests/client_tests.py::ProcessErrorTestCase::test_process_simpledb_non_auth", "tests/client_tests.py::ClientFetchTestCase::test_fetch_needs_credentials", "tests/client_tests.py::ClientFetchTestCase::test_fetch_no_headers", "tests/client_tests.py::ClientFetchTestCase::test_fetch_os_error", "tests/client_tests.py::ClientFetchTestCase::test_fetch_success", "tests/client_tests.py::ClientFetchTestCase::test_fetch_when_client_fails_credentials", "tests/client_tests.py::ClientFetchTestCase::test_fetch_when_client_needs_credentials", "tests/client_tests.py::ClientFetchTestCase::test_fetch_when_client_raises_error", "tests/client_tests.py::NoCurlAsyncTestCase::test_no_curl_raises_exception" ]
[]
BSD 3-Clause "New" or "Revised" License
2,307
[ "tornado_aws/client.py", "setup.py", "requires/installation.txt", "docs/history.rst", "tornado_aws/__init__.py" ]
[ "tornado_aws/client.py", "setup.py", "requires/installation.txt", "docs/history.rst", "tornado_aws/__init__.py" ]
dask__dask-3301
11a50f0d329bdaf1ea6b7f0cff9500f55699fd36
2018-03-19 16:23:38
48c4a589393ebc5b335cc5c7df291901401b0b15
jakirkham: LGTM. Thanks @martindurant. Were you running into some bad behavior because of this or was it just slow? martindurant: @jakirkham , this is specifically in response to #3248 . Something more sophisticated might be useful in any more complicated case, to minimise the calls to astype. jakirkham: Thanks for the info. Would expect that endianness would be preserved by `promote_types`. Though resolving conflicts between different endians is less clear (and may be at the crux of this issue). Didn't see anything that matched on the NumPy issue tracker so raised as issue ( https://github.com/numpy/numpy/issues/10767 ).
diff --git a/dask/array/core.py b/dask/array/core.py index a4990fe95..1d3d54f75 100644 --- a/dask/array/core.py +++ b/dask/array/core.py @@ -1208,7 +1208,12 @@ class Array(Base): @wraps(store) def store(self, target, **kwargs): - return store([self], [target], **kwargs) + r = store([self], [target], **kwargs) + + if kwargs.get("return_stored", False): + r = r[0] + + return r def to_hdf5(self, filename, datapath, **kwargs): """ Store array in HDF5 file @@ -2611,8 +2616,12 @@ def concatenate(seq, axis=0, allow_unknown_chunksizes=False): cum_dims = [0] + list(accumulate(add, [len(a.chunks[axis]) for a in seq])) - dt = reduce(np.promote_types, [a.dtype for a in seq]) - seq = [x.astype(dt) for x in seq] + seq_dtypes = [a.dtype for a in seq] + if len(set(seq_dtypes)) > 1: + dt = reduce(np.promote_types, seq_dtypes) + seq = [x.astype(dt) for x in seq] + else: + dt = seq_dtypes[0] names = [a.name for a in seq]
optimization of array.concatenate depends strongly on endianness I have encountered a dask optimization issue that I think is at the core of xgcm/xmitgcm#73. Basically, I am constructing a big dask array by concatenating many numpy memmaps, each created within a `from_delayed` function. Then I want to get back out a single value from this big array. In theory this should go very fast and use very little memory, as if I had accessed the original memmap. And indeed it does...unless the dtype is big endian! Here is how to reproduce this issue. First create some test data ```python import numpy as np import dask import dask.array # create some example binary data (8GB) # large dataset is necessary to see timing differences shape = (1, 50, 1000, 2000) nfiles = 10 dtype = np.dtype('f8') # note: this doesn't matter at this stage data = np.zeros(shape, dtype) filenames = ['data.{:04d}'.format(n) for n in range(nfiles)] for fname in filenames: data.tofile(fname) ``` Now define some functions to read it into dask arrays ```python def read_as_memmap(fname, dtype): return np.memmap(fname, dtype=dtype, shape=shape) def lazy_memmap(fname, dtype): return dask.array.from_delayed( dask.delayed(read_as_memmap)(fname, dtype), shape, dtype) def read_all_data(dtype): return dask.array.concatenate( [lazy_memmap(fname, dtype) for fname in filenames]) ``` Now perform a timing test on reading back a single value with default (little endian) datatype ```python dtype = np.dtype('f8') all_data = read_all_data(dtype) %timeit lazy_memmap(filenames[-1], dtype)[0, 0, 0].compute() %timeit all_data[-1, 0, 0, 0].compute() ``` On my machine I get ``` 100 loops, best of 3: 2.54 ms per loop 100 loops, best of 3: 2.3 ms per loop ``` basically identical and very fast, as we expect. The dask graph for `all_data[-1, 0, 0, 0]` looks like this: ![image](https://user-images.githubusercontent.com/1197350/37015504-abb9adee-20d5-11e8-8506-14fafe84dba0.png) Instead, if I repeat the test with a big-endian dtype ```python dtype = np.dtype('>f8') all_data = read_all_data(dtype) %timeit lazy_memmap(filenames[-1], dtype)[0, 0, 0].compute() %timeit all_data[-1, 0, 0, 0].compute() ``` I get this: ``` 100 loops, best of 3: 2.57 ms per loop 1 loop, best of 3: 929 ms per loop ``` The `ResourceProfiler` diagnostics also indicate much higher memory usage. Now the dask graph looks like this ![image](https://user-images.githubusercontent.com/1197350/37015518-bc1160ce-20d5-11e8-84d6-0f5a9ace8dd8.png) There appears to be an extra call to `astype` which is interfering with the optimization somehow. I'm using dask version 0.17.1.
dask/dask
diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py index cc233f4b4..87f420c25 100644 --- a/dask/array/tests/test_array_core.py +++ b/dask/array/tests/test_array_core.py @@ -335,6 +335,17 @@ def test_concatenate(): pytest.raises(ValueError, lambda: concatenate([a, b, c], axis=2)) [email protected]('dtypes', [(('>f8', '>f8'), '>f8'), + (('<f4', '<f8'), '<f8')]) +def test_concatenate_types(dtypes): + dts_in, dt_out = dtypes + arrs = [np.zeros(4, dtype=dt) for dt in dts_in] + darrs = [from_array(arr, chunks=(2,)) for arr in arrs] + + x = concatenate(darrs, axis=0) + assert x.dtype == dt_out + + def test_concatenate_unknown_axes(): dd = pytest.importorskip('dask.dataframe') pd = pytest.importorskip('pandas') @@ -1513,6 +1524,26 @@ def test_store_locks(): assert lock.acquire_count == nchunks +def test_store_method_return(): + d = da.ones((10, 10), chunks=(2, 2)) + a = d + 1 + + for compute in [False, True]: + for return_stored in [False, True]: + at = np.zeros(shape=(10, 10)) + r = a.store( + at, get=dask.threaded.get, + compute=compute, return_stored=return_stored + ) + + if return_stored: + assert isinstance(r, Array) + elif compute: + assert r is None + else: + assert isinstance(r, Delayed) + + @pytest.mark.xfail(reason="can't lock with multiprocessing") def test_store_multiprocessing_lock(): d = da.ones((10, 10), chunks=(2, 2))
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_media" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
1.21
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio", "numpydoc", "sphinx", "sphinx_rtd_theme", "cloudpickle", "pandas>=0.19.0", "distributed" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 click==8.0.4 cloudpickle==2.2.1 contextvars==2.4 coverage==6.2 -e git+https://github.com/dask/dask.git@11a50f0d329bdaf1ea6b7f0cff9500f55699fd36#egg=dask distributed==1.21.8 docutils==0.18.1 execnet==1.9.0 HeapDict==1.0.1 idna==3.10 imagesize==1.4.1 immutables==0.19 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==3.0.3 locket==1.0.0 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work msgpack==1.0.5 numpy==1.19.5 numpydoc==1.1.0 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 partd==1.2.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work Pygments==2.14.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 sortedcontainers==2.4.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tblib==1.7.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 toolz==0.12.0 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 zict==2.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - babel==2.11.0 - charset-normalizer==2.0.12 - click==8.0.4 - cloudpickle==2.2.1 - contextvars==2.4 - coverage==6.2 - distributed==1.21.8 - docutils==0.18.1 - execnet==1.9.0 - heapdict==1.0.1 - idna==3.10 - imagesize==1.4.1 - immutables==0.19 - jinja2==3.0.3 - locket==1.0.0 - markupsafe==2.0.1 - msgpack==1.0.5 - numpy==1.19.5 - numpydoc==1.1.0 - pandas==1.1.5 - partd==1.2.0 - psutil==7.0.0 - pygments==2.14.0 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sortedcontainers==2.4.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tblib==1.7.0 - tomli==1.2.3 - toolz==0.12.0 - tornado==6.1 - urllib3==1.26.20 - zict==2.1.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_array_core.py::test_concatenate_types[dtypes0]", "dask/array/tests/test_array_core.py::test_store_method_return" ]
[ "dask/array/tests/test_array_core.py::test_field_access", "dask/array/tests/test_array_core.py::test_field_access_with_shape", "dask/array/tests/test_array_core.py::test_matmul", "dask/array/tests/test_array_core.py::test_from_array_names" ]
[ "dask/array/tests/test_array_core.py::test_getem", "dask/array/tests/test_array_core.py::test_top", "dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules", "dask/array/tests/test_array_core.py::test_top_literals", "dask/array/tests/test_array_core.py::test_atop_literals", "dask/array/tests/test_array_core.py::test_concatenate3_on_scalars", "dask/array/tests/test_array_core.py::test_chunked_dot_product", "dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one", "dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions", "dask/array/tests/test_array_core.py::test_broadcast_dimensions", "dask/array/tests/test_array_core.py::test_Array", "dask/array/tests/test_array_core.py::test_uneven_chunks", "dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims", "dask/array/tests/test_array_core.py::test_keys", "dask/array/tests/test_array_core.py::test_Array_computation", "dask/array/tests/test_array_core.py::test_stack", "dask/array/tests/test_array_core.py::test_short_stack", "dask/array/tests/test_array_core.py::test_stack_scalars", "dask/array/tests/test_array_core.py::test_stack_promote_type", "dask/array/tests/test_array_core.py::test_stack_rechunk", "dask/array/tests/test_array_core.py::test_concatenate", "dask/array/tests/test_array_core.py::test_concatenate_types[dtypes1]", "dask/array/tests/test_array_core.py::test_concatenate_unknown_axes", "dask/array/tests/test_array_core.py::test_concatenate_rechunk", "dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings", "dask/array/tests/test_array_core.py::test_block_simple_row_wise", "dask/array/tests/test_array_core.py::test_block_simple_column_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_row_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_multiple_rows", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_column_wise", "dask/array/tests/test_array_core.py::test_block_mixed_1d_and_2d", "dask/array/tests/test_array_core.py::test_block_complicated", "dask/array/tests/test_array_core.py::test_block_nested", "dask/array/tests/test_array_core.py::test_block_3d", "dask/array/tests/test_array_core.py::test_block_with_mismatched_shape", "dask/array/tests/test_array_core.py::test_block_no_lists", "dask/array/tests/test_array_core.py::test_block_invalid_nesting", "dask/array/tests/test_array_core.py::test_block_empty_lists", "dask/array/tests/test_array_core.py::test_block_tuple", "dask/array/tests/test_array_core.py::test_binops", "dask/array/tests/test_array_core.py::test_broadcast_shapes", "dask/array/tests/test_array_core.py::test_elemwise_on_scalars", "dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays", "dask/array/tests/test_array_core.py::test_elemwise_differently_chunked", "dask/array/tests/test_array_core.py::test_elemwise_dtype", "dask/array/tests/test_array_core.py::test_operators", "dask/array/tests/test_array_core.py::test_operator_dtype_promotion", "dask/array/tests/test_array_core.py::test_T", "dask/array/tests/test_array_core.py::test_norm", "dask/array/tests/test_array_core.py::test_broadcast_to", "dask/array/tests/test_array_core.py::test_broadcast_to_array", "dask/array/tests/test_array_core.py::test_broadcast_to_scalar", "dask/array/tests/test_array_core.py::test_broadcast_to_chunks", "dask/array/tests/test_array_core.py::test_broadcast_arrays", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape0-v_shape0]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape1-v_shape1]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape2-v_shape2]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape3-v_shape3]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape4-v_shape4]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape5-v_shape5]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape6-v_shape6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]", "dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]", "dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]", "dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]", "dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]", "dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]", "dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]", "dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]", "dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]", "dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]", "dask/array/tests/test_array_core.py::test_reshape[original_shape31-new_shape31-chunks31]", "dask/array/tests/test_array_core.py::test_reshape[original_shape32-new_shape32-chunks32]", "dask/array/tests/test_array_core.py::test_reshape[original_shape33-new_shape33-chunks33]", "dask/array/tests/test_array_core.py::test_reshape[original_shape34-new_shape34-chunks34]", "dask/array/tests/test_array_core.py::test_reshape_exceptions", "dask/array/tests/test_array_core.py::test_reshape_splat", "dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only", "dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions", "dask/array/tests/test_array_core.py::test_full", "dask/array/tests/test_array_core.py::test_map_blocks", "dask/array/tests/test_array_core.py::test_map_blocks2", "dask/array/tests/test_array_core.py::test_map_blocks_with_constants", "dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs", "dask/array/tests/test_array_core.py::test_map_blocks_with_chunks", "dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference", "dask/array/tests/test_array_core.py::test_from_function_requires_block_args", "dask/array/tests/test_array_core.py::test_repr", "dask/array/tests/test_array_core.py::test_slicing_with_ellipsis", "dask/array/tests/test_array_core.py::test_slicing_with_ndarray", "dask/array/tests/test_array_core.py::test_dtype", "dask/array/tests/test_array_core.py::test_blockdims_from_blockshape", "dask/array/tests/test_array_core.py::test_coerce", "dask/array/tests/test_array_core.py::test_bool", "dask/array/tests/test_array_core.py::test_store_delayed_target", "dask/array/tests/test_array_core.py::test_store", "dask/array/tests/test_array_core.py::test_store_regions", "dask/array/tests/test_array_core.py::test_store_compute_false", "dask/array/tests/test_array_core.py::test_store_locks", "dask/array/tests/test_array_core.py::test_to_dask_dataframe", "dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions", "dask/array/tests/test_array_core.py::test_dtype_complex", "dask/array/tests/test_array_core.py::test_astype", "dask/array/tests/test_array_core.py::test_arithmetic", "dask/array/tests/test_array_core.py::test_elemwise_consistent_names", "dask/array/tests/test_array_core.py::test_optimize", "dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays", "dask/array/tests/test_array_core.py::test_getter", "dask/array/tests/test_array_core.py::test_size", "dask/array/tests/test_array_core.py::test_nbytes", "dask/array/tests/test_array_core.py::test_itemsize", "dask/array/tests/test_array_core.py::test_Array_normalizes_dtype", "dask/array/tests/test_array_core.py::test_from_array_with_lock", "dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter", "dask/array/tests/test_array_core.py::test_from_array_no_asarray", "dask/array/tests/test_array_core.py::test_from_array_getitem", "dask/array/tests/test_array_core.py::test_from_array_minus_one", "dask/array/tests/test_array_core.py::test_asarray", "dask/array/tests/test_array_core.py::test_asanyarray", "dask/array/tests/test_array_core.py::test_from_func", "dask/array/tests/test_array_core.py::test_concatenate3_2", "dask/array/tests/test_array_core.py::test_map_blocks3", "dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks", "dask/array/tests/test_array_core.py::test_normalize_chunks", "dask/array/tests/test_array_core.py::test_raise_on_no_chunks", "dask/array/tests/test_array_core.py::test_chunks_is_immutable", "dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs", "dask/array/tests/test_array_core.py::test_long_slice", "dask/array/tests/test_array_core.py::test_ellipsis_slicing", "dask/array/tests/test_array_core.py::test_point_slicing", "dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice", "dask/array/tests/test_array_core.py::test_slice_with_floats", "dask/array/tests/test_array_core.py::test_slice_with_integer_types", "dask/array/tests/test_array_core.py::test_index_with_integer_types", "dask/array/tests/test_array_core.py::test_vindex_basic", "dask/array/tests/test_array_core.py::test_vindex_nd", "dask/array/tests/test_array_core.py::test_vindex_negative", "dask/array/tests/test_array_core.py::test_vindex_errors", "dask/array/tests/test_array_core.py::test_vindex_merge", "dask/array/tests/test_array_core.py::test_empty_array", "dask/array/tests/test_array_core.py::test_memmap", "dask/array/tests/test_array_core.py::test_to_npy_stack", "dask/array/tests/test_array_core.py::test_view", "dask/array/tests/test_array_core.py::test_view_fortran", "dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension", "dask/array/tests/test_array_core.py::test_broadcast_chunks", "dask/array/tests/test_array_core.py::test_chunks_error", "dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs", "dask/array/tests/test_array_core.py::test_dont_fuse_outputs", "dask/array/tests/test_array_core.py::test_dont_dealias_outputs", "dask/array/tests/test_array_core.py::test_timedelta_op", "dask/array/tests/test_array_core.py::test_to_delayed", "dask/array/tests/test_array_core.py::test_to_delayed_optimize_graph", "dask/array/tests/test_array_core.py::test_cumulative", "dask/array/tests/test_array_core.py::test_atop_names", "dask/array/tests/test_array_core.py::test_atop_new_axes", "dask/array/tests/test_array_core.py::test_atop_kwargs", "dask/array/tests/test_array_core.py::test_atop_chunks", "dask/array/tests/test_array_core.py::test_from_delayed", "dask/array/tests/test_array_core.py::test_A_property", "dask/array/tests/test_array_core.py::test_copy_mutate", "dask/array/tests/test_array_core.py::test_npartitions", "dask/array/tests/test_array_core.py::test_astype_gh1151", "dask/array/tests/test_array_core.py::test_elemwise_name", "dask/array/tests/test_array_core.py::test_map_blocks_name", "dask/array/tests/test_array_core.py::test_array_picklable", "dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks", "dask/array/tests/test_array_core.py::test_concatenate_axes", "dask/array/tests/test_array_core.py::test_atop_concatenate", "dask/array/tests/test_array_core.py::test_common_blockdim", "dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly", "dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks", "dask/array/tests/test_array_core.py::test_uneven_chunks_atop", "dask/array/tests/test_array_core.py::test_warn_bad_rechunking", "dask/array/tests/test_array_core.py::test_optimize_fuse_keys", "dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn", "dask/array/tests/test_array_core.py::test_map_blocks_delayed", "dask/array/tests/test_array_core.py::test_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_2d", "dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks", "dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d", "dask/array/tests/test_array_core.py::test_index_array_with_array_1d", "dask/array/tests/test_array_core.py::test_index_array_with_array_2d", "dask/array/tests/test_array_core.py::test_setitem_1d", "dask/array/tests/test_array_core.py::test_setitem_2d", "dask/array/tests/test_array_core.py::test_setitem_errs", "dask/array/tests/test_array_core.py::test_zero_slice_dtypes", "dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk", "dask/array/tests/test_array_core.py::test_atop_zero_shape", "dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes", "dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape", "dask/array/tests/test_array_core.py::test_from_array_name", "dask/array/tests/test_array_core.py::test_concatenate_errs", "dask/array/tests/test_array_core.py::test_stack_errs", "dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]", "dask/array/tests/test_array_core.py::test_constructor_plugin", "dask/array/tests/test_array_core.py::test_no_warnings_on_metadata", "dask/array/tests/test_array_core.py::test_delayed_array_key_hygeine", "dask/array/tests/test_array_core.py::test_empty_chunks_in_array_len", "dask/array/tests/test_array_core.py::test_meta[None]", "dask/array/tests/test_array_core.py::test_meta[dtype1]" ]
[]
BSD 3-Clause "New" or "Revised" License
2,308
[ "dask/array/core.py" ]
[ "dask/array/core.py" ]