instance_id
stringlengths 10
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2014-04-30 14:58:36
2025-04-30 20:14:11
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
273k
| patch
stringlengths 251
7.06M
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 231
997k
| meta
dict | version
stringclasses 851
values | install_config
dict | requirements
stringlengths 93
34.2k
⌀ | environment
stringlengths 760
20.5k
⌀ | FAIL_TO_PASS
listlengths 1
9.39k
| FAIL_TO_FAIL
listlengths 0
2.69k
| PASS_TO_PASS
listlengths 0
7.87k
| PASS_TO_FAIL
listlengths 0
192
| license_name
stringclasses 55
values | __index_level_0__
int64 0
21.4k
| before_filepaths
listlengths 1
105
| after_filepaths
listlengths 1
105
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ifosch__accloudtant-52 | 71d752b9244ff84978a83413be84035a92d5c077 | 2016-01-30 11:16:12 | 71d752b9244ff84978a83413be84035a92d5c077 | diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py
index 26af2d1..905018e 100644
--- a/accloudtant/aws/reports.py
+++ b/accloudtant/aws/reports.py
@@ -16,10 +16,13 @@ class Reports(object):
def find_reserved_instance(self):
for instance in self.instances:
- instance.current = float(self.prices.prices[instance.key][instance.region][instance.size]['od'])
+ instance_region = self.prices.prices[instance.key][instance.region]
+ instance_size = instance_region[instance.size]
+ instance.current = float(instance_size['od'])
if instance.state == 'stopped':
instance.current = 0.0
- instance.best = float(self.prices.prices[instance.key][instance.region][instance.size]['ri']['yrTerm3']['allUpfront']['effectiveHourly'])
+ instance_allUpfront = instance_size['ri']['yrTerm3']['allUpfront']
+ instance.best = float(instance_allUpfront['effectiveHourly'])
for reserved in self.reserved_instances['ReservedInstances']:
if 'InstancesLeft' not in reserved.keys():
reserved['InstancesLeft'] = reserved['InstanceCount']
@@ -39,6 +42,8 @@ class Reports(object):
'State',
'Launch time',
'Reserved',
+ 'Current hourly price',
+ 'Renewed hourly price',
]
table = []
for instance in self.instances:
@@ -51,6 +56,8 @@ class Reports(object):
instance.state,
instance.launch_time.strftime('%Y-%m-%d %H:%M:%S'),
instance.reserved,
+ instance.current,
+ instance.best,
]
table.append(row)
return tabulate(table, headers)
| Add price information to EC2 instances information
<!---
@huboard:{"order":6.0732421875,"milestone_order":0.09375,"custom_state":""}
-->
| ifosch/accloudtant | diff --git a/tests/aws/report_expected.txt b/tests/aws/report_expected.txt
index 928efa2..f2bbb8b 100644
--- a/tests/aws/report_expected.txt
+++ b/tests/aws/report_expected.txt
@@ -1,9 +1,9 @@
-Id Name Type AZ OS State Launch time Reserved
----------- --------- ---------- ---------- ------------------------ ------- ------------------- ----------
-i-912a4392 web1 c3.8xlarge us-east-1c Windows running 2015-10-22 14:15:10 Yes
-i-1840273e app1 r2.8xlarge us-east-1b Red Hat Enterprise Linux running 2015-10-22 14:15:10 Yes
-i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running 2015-10-22 14:15:10 Yes
-i-1840273d database1 r2.8xlarge us-east-1c Linux/UNIX stopped 2015-10-22 14:15:10 No
-i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes
-i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes
-i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No
+Id Name Type AZ OS State Launch time Reserved Current hourly price Renewed hourly price
+---------- --------- ---------- ---------- ------------------------ ------- ------------------- ---------- ---------------------- ----------------------
+i-912a4392 web1 c3.8xlarge us-east-1c Windows running 2015-10-22 14:15:10 Yes 0.5121 0.3894
+i-1840273e app1 r2.8xlarge us-east-1b Red Hat Enterprise Linux running 2015-10-22 14:15:10 Yes 0.3894 0.3794
+i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running 2015-10-22 14:15:10 Yes 0.5225 0.389
+i-1840273d database1 r2.8xlarge us-east-1c Linux/UNIX stopped 2015-10-22 14:15:10 No 0 0.379
+i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
+i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
+i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No 0.767 0.3892
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ifosch/accloudtant.git@71d752b9244ff84978a83413be84035a92d5c077#egg=accloudtant
boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
tabulate==0.9.0
tomli==2.2.1
urllib3==1.26.20
| name: accloudtant
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- tabulate==0.9.0
- tomli==2.2.1
- urllib3==1.26.20
prefix: /opt/conda/envs/accloudtant
| [
"tests/aws/test_reports.py::test_reports"
]
| []
| [
"tests/aws/test_instance.py::test_instance",
"tests/aws/test_instance.py::test_guess_os",
"tests/aws/test_instance.py::test_match_reserved_instance",
"tests/aws/test_prices.py::test_process_ec2",
"tests/aws/test_prices.py::test_process_model",
"tests/aws/test_prices.py::test_process_generic",
"tests/aws/test_prices.py::test_process_on_demand",
"tests/aws/test_prices.py::test_process_reserved",
"tests/aws/test_prices.py::test_process_data_transfer",
"tests/aws/test_prices.py::test_process_ebs",
"tests/aws/test_prices.py::test_process_eip",
"tests/aws/test_prices.py::test_process_cw",
"tests/aws/test_prices.py::test_process_elb",
"tests/aws/test_prices.py::test_print_prices",
"tests/aws/test_prices.py::test_prices",
"tests/aws/test_prices.py::test_prices_with_warning",
"tests/test_utils.py::test_fix_lazy_json"
]
| []
| null | 405 | [
"accloudtant/aws/reports.py"
]
| [
"accloudtant/aws/reports.py"
]
|
|
guykisel__inline-plz-28 | 59cb7f9721ca3390fa31c48583e786e3728e8f1a | 2016-01-31 22:54:38 | 59cb7f9721ca3390fa31c48583e786e3728e8f1a | diff --git a/inlineplz/linters/__init__.py b/inlineplz/linters/__init__.py
index f4b0a73..420508e 100644
--- a/inlineplz/linters/__init__.py
+++ b/inlineplz/linters/__init__.py
@@ -13,7 +13,7 @@ from inlineplz import parsers
LINTERS = {
'prospector': {
'install': ['pip', 'install', 'prospector'],
- 'run': ['prospector', '--zero-exit'],
+ 'run': ['prospector', '--zero-exit', '-o', 'json'],
'dotfiles': ['.prospector.yaml'],
'parser': parsers.ProspectorParser
},
diff --git a/inlineplz/parsers/prospector.py b/inlineplz/parsers/prospector.py
index 68acb07..8146c2a 100644
--- a/inlineplz/parsers/prospector.py
+++ b/inlineplz/parsers/prospector.py
@@ -1,43 +1,31 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
+from collections import OrderedDict
+import json
+
from inlineplz.parsers.base import ParserBase
from inlineplz.message import Message
class ProspectorParser(ParserBase):
- """Parse default prospector output."""
+ """Parse json prospector output."""
def parse(self, lint_data):
messages = []
- current_message = None
- current_filename = ''
- current_line = ''
-
- messages_found = False
-
- for line in lint_data.split('\n'):
- # check for message block
- if not line.strip():
- continue
- if not messages_found:
- if line.strip() == 'Messages':
- messages_found = True
- continue
- # check for end of message block
- elif line.strip() == 'Check Information':
- break
- # new filename
- if not line.startswith(' '):
- current_filename = line.strip()
- continue
- # new line number
- elif not line.startswith(' '):
- current_line = int(line.replace(' Line: ', '').strip())
- current_message = Message(current_filename, current_line)
- messages.append(current_message)
- continue
- # new content
- current_message.comments.append(line.lstrip())
-
+ for msgdata in json.loads(
+ lint_data,
+ object_pairs_hook=OrderedDict
+ ).get('messages'):
+ msg = Message(
+ msgdata['location']['path'],
+ msgdata['location']['line']
+ )
+ msgbody = '{0}: {1} ({2})'.format(
+ msgdata['source'],
+ msgdata['message'],
+ msgdata['code']
+ )
+ msg.comments.append(msgbody)
+ messages.append(msg)
return messages
| refactor prospector parser to use json formatter | guykisel/inline-plz | diff --git a/tests/parsers/test_prospector.py b/tests/parsers/test_prospector.py
index f673b81..fbba037 100644
--- a/tests/parsers/test_prospector.py
+++ b/tests/parsers/test_prospector.py
@@ -17,14 +17,12 @@ prospector_path = os.path.join(
def test_prospector():
with open(prospector_path) as inputfile:
messages = prospector.ProspectorParser().parse(inputfile.read())
- assert messages[0].content == '`pylint: syntax-error / invalid syntax`'
- assert messages[0].line_number == 34
- assert messages[0].path == 'docs/conf.py'
- assert messages[1].content == '`pylint: unused-import / Unused Message imported from message`'
- assert messages[1].line_number == 4
- assert messages[1].path == 'inline-plz/parsers/base.py'
- assert messages[9].content == ('```\npylint: misplaced-comparison-constant / Comparison '
- 'should be __name__ == \'__main__\' (col 3)\npylint: '
- 'pretend this is a real message\n```')
- assert messages[9].line_number == 113
- assert len(messages) == 11
+ assert messages[0].content == '`pep257: Missing docstring in public package (D104)`'
+ assert messages[0].line_number == 1
+ assert messages[0].path == 'inlineplz/util/__init__.py'
+ assert messages[1].content == '`pep257: Missing docstring in public package (D104)`'
+ assert messages[1].line_number == 1
+ assert messages[1].path == 'inlineplz/parsers/__init__.py'
+ assert messages[9].content == ('`pep257: One-line docstring should fit on one line with quotes (found 2) (D200)`')
+ assert messages[9].line_number == 1
+ assert len(messages) == 32
diff --git a/tests/testdata/parsers/prospector.txt b/tests/testdata/parsers/prospector.txt
index 7c9ed99..c6ec9f7 100644
--- a/tests/testdata/parsers/prospector.txt
+++ b/tests/testdata/parsers/prospector.txt
@@ -1,48 +1,407 @@
-Messages
-========
-
-docs\conf.py
- Line: 34
- pylint: syntax-error / invalid syntax
-
-inline-plz\parsers\base.py
- Line: 4
- pylint: unused-import / Unused Message imported from message
- Line: 9
- pylint: redefined-builtin / Redefining built-in 'input' (col 20)
-
-inline-plz\parsers\prospector.py
- Line: 5
- pylint: unused-import / Unused Message imported from message
- Line: 8
- pylint: redefined-builtin / Redefining built-in 'input' (col 20)
- Line: 17
- pylint: undefined-variable / Undefined variable 'message' (col 32)
- Line: 24
- pylint: undefined-variable / Undefined variable 'message' (col 32)
- Line: 25
- pylint: redefined-variable-type / Redefinition of current_line type from str to int (col 16)
-
-travis_pypi_setup.py
- Line: 20
- pylint: bare-except / No exception type(s) specified
- Line: 113
- pylint: misplaced-comparison-constant / Comparison should be __name__ == '__main__' (col 3)
- pylint: pretend this is a real message
- Line: 114
- pylint: wrong-import-position / Import "import argparse" should be placed at the top of the module (col 4)
-
-
-
-Check Information
-=================
- Started: 2016-01-09 12:50:17.649090
- Finished: 2016-01-09 12:50:19.027461
- Time Taken: 1.38 seconds
- Formatter: grouped
- Profiles: default, no_doc_warnings, no_test_warnings, strictness_medium, strictness_high, strictness_veryhigh, no_member_warnings
- Strictness: None
- Libraries Used:
- Tools Run: dodgy, mccabe, pep8, profile-validator, pyflakes, pylint
- Messages Found: 11
-
+{
+ "messages": [
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 1,
+ "path": "inlineplz\\util\\__init__.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D104",
+ "message": "Missing docstring in public package"
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 1,
+ "path": "inlineplz\\parsers\\__init__.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D104",
+ "message": "Missing docstring in public package"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 0,
+ "line": 9,
+ "path": "inlineplz\\main.py",
+ "module": "inlineplz.main",
+ "function": null
+ },
+ "code": "unused-import",
+ "message": "Unused parsers imported from inlineplz"
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 11,
+ "path": "inlineplz\\message.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D105",
+ "message": "Missing docstring in magic method"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 8,
+ "path": "setup.py",
+ "module": "setup",
+ "function": null
+ },
+ "code": "import-error",
+ "message": "Unable to import 'distutils.core'"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 0,
+ "line": 17,
+ "path": "setup.py",
+ "module": "setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"requirements\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 0,
+ "line": 23,
+ "path": "setup.py",
+ "module": "setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"test_requirements\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 12,
+ "path": "setup.py",
+ "module": "setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"readme\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 15,
+ "path": "setup.py",
+ "module": "setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"history\""
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 1,
+ "path": "travis_pypi_setup.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D200",
+ "message": "One-line docstring should fit on one line with quotes (found 2)"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 115,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"parser\""
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 69,
+ "path": "travis_pypi_setup.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D200",
+ "message": "One-line docstring should fit on one line with quotes (found 2)"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 27,
+ "line": 72,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": "prepend_line"
+ },
+ "code": "invalid-name",
+ "message": "Invalid variable name \"f\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 9,
+ "line": 106,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": "main"
+ },
+ "code": "redefined-outer-name",
+ "message": "Redefining name 'args' from outer scope (line 121)"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 32,
+ "line": 77,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": "prepend_line"
+ },
+ "code": "invalid-name",
+ "message": "Invalid variable name \"f\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 3,
+ "line": 113,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "misplaced-comparison-constant",
+ "message": "Comparison should be __name__ == '__main__'"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 27,
+ "line": 82,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": "load_yaml_config"
+ },
+ "code": "invalid-name",
+ "message": "Invalid variable name \"f\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 19,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "wrong-import-order",
+ "message": "standard import \"from urllib import urlopen\" comes before \"import yaml\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 0,
+ "line": 20,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "bare-except",
+ "message": "No exception type(s) specified"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 21,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "wrong-import-order",
+ "message": "standard import \"from urllib.request import urlopen\" comes before \"import yaml\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 32,
+ "line": 87,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": "save_yaml_config"
+ },
+ "code": "invalid-name",
+ "message": "Invalid variable name \"f\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 121,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "invalid-name",
+ "message": "Invalid constant name \"args\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 114,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "wrong-import-order",
+ "message": "standard import \"import argparse\" comes before \"import yaml\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 114,
+ "path": "travis_pypi_setup.py",
+ "module": "travis_pypi_setup",
+ "function": null
+ },
+ "code": "wrong-import-position",
+ "message": "Import \"import argparse\" should be placed at the top of the module"
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 1,
+ "path": "inlineplz\\interfaces\\__init__.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D104",
+ "message": "Missing docstring in public package"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 16,
+ "line": 36,
+ "path": "inlineplz\\parsers\\prospector.py",
+ "module": "inlineplz.parsers.prospector",
+ "function": "ProspectorParser.parse"
+ },
+ "code": "redefined-variable-type",
+ "message": "Redefinition of current_line type from str to int"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 12,
+ "line": 16,
+ "path": "inlineplz\\interfaces\\github.py",
+ "module": "inlineplz.interfaces.github",
+ "function": "GitHubInterface.__init__"
+ },
+ "code": "invalid-name",
+ "message": "Invalid attribute name \"gh\""
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 12,
+ "line": 18,
+ "path": "inlineplz\\interfaces\\github.py",
+ "module": "inlineplz.interfaces.github",
+ "function": "GitHubInterface.__init__"
+ },
+ "code": "redefined-variable-type",
+ "message": "Redefinition of self.gh type from github3.github.GitHub to github3.github.GitHubEnterprise"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 0,
+ "line": 4,
+ "path": "inlineplz\\interfaces\\github.py",
+ "module": "inlineplz.interfaces.github",
+ "function": null
+ },
+ "code": "unused-import",
+ "message": "Unused import os.path"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 20,
+ "line": 58,
+ "path": "inlineplz\\interfaces\\github.py",
+ "module": "inlineplz.interfaces.github",
+ "function": "GitHubInterface.position"
+ },
+ "code": "unused-variable",
+ "message": "Unused variable 'hunk_no'"
+ },
+ {
+ "source": "pylint",
+ "location": {
+ "character": 4,
+ "line": 14,
+ "path": "inlineplz\\interfaces\\github.py",
+ "module": "inlineplz.interfaces.github",
+ "function": "GitHubInterface.__init__"
+ },
+ "code": "too-many-arguments",
+ "message": "Too many arguments (6/5)"
+ },
+ {
+ "source": "pep257",
+ "location": {
+ "character": 0,
+ "line": 1,
+ "path": "inlineplz\\__init__.py",
+ "module": null,
+ "function": null
+ },
+ "code": "D104",
+ "message": "Missing docstring in public package"
+ }
+ ],
+ "summary": {
+ "strictness": "from profile",
+ "tools": [
+ "dodgy",
+ "mccabe",
+ "pep257",
+ "pep8",
+ "profile-validator",
+ "pyflakes",
+ "pylint"
+ ],
+ "formatter": "json",
+ "started": "2016-01-31 14:43:39.317922",
+ "profiles": ".prospector.yaml, full_pep8, doc_warnings, no_test_warnings, strictness_veryhigh, no_member_warnings",
+ "time_taken": "3.94",
+ "completed": "2016-01-31 14:43:43.256803",
+ "libraries": [],
+ "message_count": 32
+ }
+}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"numpy>=1.16.0",
"pandas>=1.0.0",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
github3.py==4.0.1
idna==3.10
iniconfig==2.1.0
-e git+https://github.com/guykisel/inline-plz.git@59cb7f9721ca3390fa31c48583e786e3728e8f1a#egg=inlineplz
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
pycparser==2.22
PyJWT==2.10.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.32.3
six==1.17.0
tomli==2.2.1
tzdata==2025.2
unidiff==0.7.5
uritemplate==4.1.1
urllib3==2.3.0
xmltodict==0.14.2
| name: inline-plz
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- github3-py==4.0.1
- idna==3.10
- iniconfig==2.1.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pycparser==2.22
- pyjwt==2.10.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
- unidiff==0.7.5
- uritemplate==4.1.1
- urllib3==2.3.0
- xmltodict==0.14.2
prefix: /opt/conda/envs/inline-plz
| [
"tests/parsers/test_prospector.py::test_prospector"
]
| []
| []
| []
| ISC License | 407 | [
"inlineplz/parsers/prospector.py",
"inlineplz/linters/__init__.py"
]
| [
"inlineplz/parsers/prospector.py",
"inlineplz/linters/__init__.py"
]
|
|
guykisel__inline-plz-29 | 3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8 | 2016-01-31 23:15:58 | 3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8 | diff --git a/inlineplz/interfaces/github.py b/inlineplz/interfaces/github.py
index 005b8ff..4dd4397 100644
--- a/inlineplz/interfaces/github.py
+++ b/inlineplz/interfaces/github.py
@@ -26,14 +26,14 @@ class GitHubInterface(InterfaceBase):
def post_messages(self, messages):
messages_to_post = 0
for msg in messages:
- if not msg.content:
+ if not msg.comments:
continue
msg_position = self.position(msg)
if msg_position:
messages_to_post += 1
if not self.is_duplicate(msg, msg_position):
self.pull_request.create_review_comment(
- msg.content,
+ self.format_message(msg),
self.last_sha,
msg.path,
msg_position
@@ -44,10 +44,22 @@ class GitHubInterface(InterfaceBase):
for comment in self.pull_request.review_comments():
if (comment.position == position and
comment.path == message.path and
- comment.body.strip() == message.content.strip()):
+ comment.body.strip() == self.format_message(message).strip()):
return True
return False
+ @staticmethod
+ def format_message(message):
+ if not message.comments:
+ return ''
+ if len(message.comments) > 1:
+ return (
+ '```\n' +
+ '\n'.join(sorted(list(message.comments))) +
+ '\n```'
+ )
+ return '`{0}`'.format(list(message.comments)[0].strip())
+
def position(self, message):
"""Calculate position within the PR, which is not the line number"""
patch = unidiff.PatchSet(self.diff.split('\n'))
diff --git a/inlineplz/linters/__init__.py b/inlineplz/linters/__init__.py
index 420508e..458fd7c 100644
--- a/inlineplz/linters/__init__.py
+++ b/inlineplz/linters/__init__.py
@@ -9,6 +9,7 @@ import subprocess
import traceback
from inlineplz import parsers
+from inlineplz import message
LINTERS = {
'prospector': {
@@ -44,7 +45,7 @@ LINTERS = {
def lint(install=False):
- messages = []
+ messages = message.Messages()
for config in LINTERS.values():
if any(dotfile in os.listdir(os.getcwd())
for dotfile in config.get('dotfiles')):
@@ -52,7 +53,7 @@ def lint(install=False):
if install and config.get('install'):
subprocess.check_call(config.get('install'))
output = subprocess.check_output(config.get('run')).decode('utf-8')
- messages.extend(config.get('parser')().parse(output))
+ messages.add_messages(config.get('parser')().parse(output))
except subprocess.CalledProcessError:
traceback.print_exc()
- return messages
+ return messages.get_messages()
diff --git a/inlineplz/message.py b/inlineplz/message.py
index 61011a4..da2c722 100644
--- a/inlineplz/message.py
+++ b/inlineplz/message.py
@@ -1,12 +1,32 @@
# -*- coding: utf-8 -*-
+import os
+
+
+class Messages(object):
+
+ def __init__(self):
+ self.messages = {}
+
+ def add_message(self, path, line, message):
+ if (path, line) not in self.messages:
+ self.messages[(path, line)] = Message(path, line)
+ self.messages[(path, line)].append(message)
+
+ def add_messages(self, messages):
+ for message in messages:
+ self.add_message(*message)
+
+ def get_messages(self):
+ return self.messages.values()
+
class Message(object):
def __init__(self, path, line_number):
- self.path = path.replace('\\', '/')
+ self.path = os.path.relpath(path).replace('\\', '/')
self.line_number = line_number
- self.comments = []
+ self.comments = set()
def __str__(self):
return """
@@ -16,10 +36,5 @@ Message:
Content: {2}
""".format(self.path, self.line_number, self.content).strip()
- @property
- def content(self):
- if not self.comments:
- return ''
- if len(self.comments) > 1:
- return '```\n' + '\n'.join(self.comments) + '\n```'
- return '`{0}`'.format(self.comments[0].strip())
+ def append(self, message):
+ self.comments.add(message)
diff --git a/inlineplz/parsers/eslint.py b/inlineplz/parsers/eslint.py
index 6a0c211..f30d7b2 100644
--- a/inlineplz/parsers/eslint.py
+++ b/inlineplz/parsers/eslint.py
@@ -5,24 +5,21 @@ from collections import OrderedDict
import json
from inlineplz.parsers.base import ParserBase
-from inlineplz.message import Message
class ESLintParser(ParserBase):
"""Parse json eslint output."""
def parse(self, lint_data):
- messages = []
+ messages = set()
for filedata in json.loads(
lint_data,
object_pairs_hook=OrderedDict
):
if filedata.get('messages'):
for msgdata in filedata['messages']:
- msg = Message(
- filedata.get('filePath'),
- msgdata.get('line')
- )
- msg.comments.append(msgdata.get('message'))
- messages.append(msg)
+ path = filedata['filePath']
+ line = msgdata['line']
+ msgbody = msgdata['message']
+ messages.add((path, line, msgbody))
return messages
diff --git a/inlineplz/parsers/jscs.py b/inlineplz/parsers/jscs.py
index e3fd72c..b80d8db 100644
--- a/inlineplz/parsers/jscs.py
+++ b/inlineplz/parsers/jscs.py
@@ -5,24 +5,21 @@ from collections import OrderedDict
import json
from inlineplz.parsers.base import ParserBase
-from inlineplz.message import Message
class JSCSParser(ParserBase):
"""Parse json jscs output."""
def parse(self, lint_data):
- messages = []
+ messages = set()
for filename, msgs in json.loads(
lint_data,
object_pairs_hook=OrderedDict
).items():
if msgs:
for msgdata in msgs:
- msg = Message(
- filename,
- msgdata.get('line')
- )
- msg.comments.append(msgdata.get('message'))
- messages.append(msg)
+ path = filename
+ line = msgdata['line']
+ msgbody = msgdata['message']
+ messages.add((path, line, msgbody))
return messages
diff --git a/inlineplz/parsers/jshint.py b/inlineplz/parsers/jshint.py
index 9c8beee..7e8863f 100644
--- a/inlineplz/parsers/jshint.py
+++ b/inlineplz/parsers/jshint.py
@@ -4,24 +4,21 @@ from __future__ import absolute_import
import xmltodict
from inlineplz.parsers.base import ParserBase
-from inlineplz.message import Message
class JSHintParser(ParserBase):
"""Parse json jshint output."""
def parse(self, lint_data):
- messages = []
+ messages = set()
obj = xmltodict.parse(lint_data)
for filedata in obj['checkstyle']['file']:
for errordata in filedata['error']:
try:
- msg = Message(
- filedata.get('@name'),
- int(errordata.get('@line'))
- )
- msg.comments.append(errordata.get('@message'))
- messages.append(msg)
- except AttributeError:
+ path = filedata['@name']
+ line = int(errordata['@line'])
+ msgbody = errordata['@message']
+ messages.add((path, line, msgbody))
+ except (AttributeError, TypeError):
pass
return messages
diff --git a/inlineplz/parsers/prospector.py b/inlineplz/parsers/prospector.py
index 8146c2a..43b8eb6 100644
--- a/inlineplz/parsers/prospector.py
+++ b/inlineplz/parsers/prospector.py
@@ -5,27 +5,23 @@ from collections import OrderedDict
import json
from inlineplz.parsers.base import ParserBase
-from inlineplz.message import Message
class ProspectorParser(ParserBase):
"""Parse json prospector output."""
def parse(self, lint_data):
- messages = []
+ messages = set()
for msgdata in json.loads(
lint_data,
object_pairs_hook=OrderedDict
).get('messages'):
- msg = Message(
- msgdata['location']['path'],
- msgdata['location']['line']
- )
+ path = msgdata['location']['path']
+ line = msgdata['location']['line']
msgbody = '{0}: {1} ({2})'.format(
msgdata['source'],
msgdata['message'],
msgdata['code']
)
- msg.comments.append(msgbody)
- messages.append(msg)
+ messages.add((path, line, msgbody))
return messages
| move message formatting logic from message module into interface modules | guykisel/inline-plz | diff --git a/tests/parsers/test_eslint.py b/tests/parsers/test_eslint.py
index d8e765b..289099d 100644
--- a/tests/parsers/test_eslint.py
+++ b/tests/parsers/test_eslint.py
@@ -15,7 +15,7 @@ eslint_path = os.path.join(
def test_eslint():
with open(eslint_path) as inputfile:
- messages = eslint.ESLintParser().parse(inputfile.read())
- assert messages[0].content == '`Parsing error: Illegal return statement`'
- assert messages[0].line_number == 17
- assert messages[0].path == 'C:/Users/Guy/Documents/jshint/tests/unit/fixtures/asi.js'
+ messages = sorted(list(eslint.ESLintParser().parse(inputfile.read())))
+ assert messages[0][2] == 'Parsing error: Illegal return statement'
+ assert messages[0][1] == 17
+ assert messages[0][0] == 'C:\\Users\\Guy\\Documents\\jshint\\tests\\unit\\fixtures\\asi.js'
diff --git a/tests/parsers/test_jscs.py b/tests/parsers/test_jscs.py
index e0577f6..d71c08f 100644
--- a/tests/parsers/test_jscs.py
+++ b/tests/parsers/test_jscs.py
@@ -15,7 +15,7 @@ jscs_path = os.path.join(
def test_jscs():
with open(jscs_path) as inputfile:
- messages = jscs.JSCSParser().parse(inputfile.read())
- assert messages[0].content == '`maximumLineLength: Line must be at most 100 characters`'
- assert messages[0].line_number == 1
- assert messages[0].path == './data/non-ascii-identifier-part-only.js'
+ messages = sorted(list(jscs.JSCSParser().parse(inputfile.read())))
+ assert messages[0][2] == 'maximumLineLength: Line must be at most 100 characters'
+ assert messages[0][1] == 1
+ assert messages[0][0] == './data/non-ascii-identifier-part-only.js'
diff --git a/tests/parsers/test_jshint.py b/tests/parsers/test_jshint.py
index fb4c4c6..f1d8c79 100644
--- a/tests/parsers/test_jshint.py
+++ b/tests/parsers/test_jshint.py
@@ -15,7 +15,7 @@ jshint_path = os.path.join(
def test_jshint():
with open(jshint_path) as inputfile:
- messages = jshint.JSHintParser().parse(inputfile.read())
- assert messages[0].content == '`Use the function form of "use strict". (W097)`'
- assert messages[0].line_number == 8
- assert messages[0].path == 'conf/cli-options.js'
+ messages = sorted(list(jshint.JSHintParser().parse(inputfile.read())))
+ assert messages[0][2] == 'Use the function form of "use strict". (W097)'
+ assert messages[0][1] == 7
+ assert messages[0][0] == 'Makefile.js'
diff --git a/tests/parsers/test_prospector.py b/tests/parsers/test_prospector.py
index fbba037..4b3e569 100644
--- a/tests/parsers/test_prospector.py
+++ b/tests/parsers/test_prospector.py
@@ -16,13 +16,15 @@ prospector_path = os.path.join(
def test_prospector():
with open(prospector_path) as inputfile:
- messages = prospector.ProspectorParser().parse(inputfile.read())
- assert messages[0].content == '`pep257: Missing docstring in public package (D104)`'
- assert messages[0].line_number == 1
- assert messages[0].path == 'inlineplz/util/__init__.py'
- assert messages[1].content == '`pep257: Missing docstring in public package (D104)`'
- assert messages[1].line_number == 1
- assert messages[1].path == 'inlineplz/parsers/__init__.py'
- assert messages[9].content == ('`pep257: One-line docstring should fit on one line with quotes (found 2) (D200)`')
- assert messages[9].line_number == 1
+ messages = sorted(list(prospector.ProspectorParser().parse(inputfile.read())))
+ assert messages[0][2] == 'pep257: Missing docstring in public package (D104)'
+ assert messages[0][1] == 1
+ assert messages[0][0] == 'inlineplz\__init__.py'
+
+ assert messages[1][2] == 'pep257: Missing docstring in public package (D104)'
+ assert messages[1][1] == 1
+ assert messages[1][0] == 'inlineplz\interfaces\__init__.py'
+
+ assert messages[9][2] == 'pep257: Missing docstring in public package (D104)'
+ assert messages[9][1] == 1
assert len(messages) == 32
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 7
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
github3.py==3.2.0
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/guykisel/inline-plz.git@3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8#egg=inlineplz
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycparser==2.21
PyJWT==2.4.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
requests==2.27.1
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
unidiff==0.7.5
uritemplate==4.1.1
urllib3==1.26.20
xmltodict==0.14.2
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: inline-plz
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- github3-py==3.2.0
- idna==3.10
- pycparser==2.21
- pyjwt==2.4.0
- python-dateutil==2.9.0.post0
- requests==2.27.1
- six==1.17.0
- unidiff==0.7.5
- uritemplate==4.1.1
- urllib3==1.26.20
- xmltodict==0.14.2
prefix: /opt/conda/envs/inline-plz
| [
"tests/parsers/test_eslint.py::test_eslint",
"tests/parsers/test_jscs.py::test_jscs",
"tests/parsers/test_jshint.py::test_jshint",
"tests/parsers/test_prospector.py::test_prospector"
]
| []
| []
| []
| ISC License | 408 | [
"inlineplz/parsers/eslint.py",
"inlineplz/parsers/jscs.py",
"inlineplz/parsers/prospector.py",
"inlineplz/linters/__init__.py",
"inlineplz/interfaces/github.py",
"inlineplz/message.py",
"inlineplz/parsers/jshint.py"
]
| [
"inlineplz/parsers/eslint.py",
"inlineplz/parsers/jscs.py",
"inlineplz/parsers/prospector.py",
"inlineplz/linters/__init__.py",
"inlineplz/interfaces/github.py",
"inlineplz/message.py",
"inlineplz/parsers/jshint.py"
]
|
|
JonathonReinhart__scuba-42 | 9aa705d7d0419f1930ae034e2210d69f66f5bf2a | 2016-02-01 13:01:13 | 9aa705d7d0419f1930ae034e2210d69f66f5bf2a | diff --git a/scuba/__main__.py b/scuba/__main__.py
index 57f8671..4a54fae 100755
--- a/scuba/__main__.py
+++ b/scuba/__main__.py
@@ -217,7 +217,7 @@ def main(argv=None):
if g_verbose or scuba_args.dry_run:
appmsg('Docker command line:')
- print(format_cmdline(run_args))
+ print('$ ' + format_cmdline(run_args))
if scuba_args.dry_run:
appmsg('Exiting for dry run. Temporary files not removed:')
diff --git a/scuba/utils.py b/scuba/utils.py
index f4d742b..0bd2e0e 100644
--- a/scuba/utils.py
+++ b/scuba/utils.py
@@ -3,13 +3,33 @@ try:
except ImportError:
from pipes import quote as shell_quote
+
def format_cmdline(args, maxwidth=80):
+ '''Format args into a shell-quoted command line.
+
+ The result will be wrapped to maxwidth characters where possible,
+ not breaking a single long argument.
+ '''
+
+ # Leave room for the space and backslash at the end of each line
+ maxwidth -= 2
+
def lines():
line = ''
for a in (shell_quote(a) for a in args):
- if len(line) + len(a) > maxwidth:
+ # If adding this argument will make the line too long,
+ # yield the current line, and start a new one.
+ if len(line) + len(a) + 1 > maxwidth:
yield line
line = ''
- line += ' ' + a
- return '$' + ' \\\n'.join(lines())
+ # Append this argument to the current line, separating
+ # it by a space from the existing arguments.
+ if line:
+ line += ' ' + a
+ else:
+ line = a
+
+ yield line
+
+ return ' \\\n'.join(lines())
| utils.format_cmdline misses last line
It appears that utils.format_cmdline fails to yield the last accumulated line.
This also means that a better test could be written, which splits the result back out, and compares it to the list of input arguments. | JonathonReinhart/scuba | diff --git a/tests/test_utils.py b/tests/test_utils.py
new file mode 100644
index 0000000..ff9ad97
--- /dev/null
+++ b/tests/test_utils.py
@@ -0,0 +1,49 @@
+from __future__ import print_function
+
+from nose.tools import *
+from unittest import TestCase
+
+import logging
+import shlex
+from itertools import chain
+
+from .utils import *
+
+import scuba.utils
+
+
+class TestUtils(TestCase):
+
+ def _parse_cmdline(self, cmdline):
+ # Strip the formatting and whitespace
+ lines = [l.rstrip('\\').strip() for l in cmdline.splitlines()]
+
+ # Split each line, and return a flattened list of arguments
+ return chain.from_iterable(map(shlex.split, lines))
+
+ def _test_format_cmdline(self, args):
+
+ # Call the unit-under-test to get the formatted command line
+ result = scuba.utils.format_cmdline(args)
+
+ # Parse the result back out to a list of arguments
+ out_args = self._parse_cmdline(result)
+
+ # Verify that they match
+ assert_seq_equal(out_args, args)
+
+
+ def test_basic(self):
+ '''format_cmdline works as expected'''
+
+ self._test_format_cmdline([
+ 'something',
+ '-a',
+ '-b',
+ '--long', 'option text',
+ '-s', 'hort',
+ 'a very long argument here that will end up on its own line because it is so wide and nothing else will fit at the default width',
+ 'and now',
+ 'some', 'more', 'stuff',
+ 'and even more stuff',
+ ])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==3.7.1
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
-e git+https://github.com/JonathonReinhart/scuba.git@9aa705d7d0419f1930ae034e2210d69f66f5bf2a#egg=SCUBA
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: scuba
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==3.7.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/scuba
| [
"tests/test_utils.py::TestUtils::test_basic"
]
| []
| []
| []
| MIT License | 409 | [
"scuba/__main__.py",
"scuba/utils.py"
]
| [
"scuba/__main__.py",
"scuba/utils.py"
]
|
|
geowurster__tinymr-17 | 5dbf46845a8caba995916f76d1e681860f1e198f | 2016-02-02 06:14:38 | 5dbf46845a8caba995916f76d1e681860f1e198f | diff --git a/.travis.yml b/.travis.yml
index 8813f7b..04f68af 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -15,6 +15,7 @@ python:
- pypy3
install:
+ - pip install pip setuptools --upgrade
- pip install -e .\[dev\]
script:
diff --git a/tinymr/_mrtools.py b/tinymr/_mrtools.py
index 1a200db..ce3f9ef 100644
--- a/tinymr/_mrtools.py
+++ b/tinymr/_mrtools.py
@@ -82,7 +82,8 @@ def sort_partitioned_values(kv_stream):
return ((k, sorter(v, key=lambda x: x[0])) for k, v in kv_stream)
-class ReduceJob(namedtuple('ReduceJob', ['reducer', 'sort', 'jobs', 'chunksize'])):
+class ReduceJobConf(
+ namedtuple('ReduceJob', ['reducer', 'sort', 'jobs', 'chunksize'])):
"""
Describes a reduce job. Makes keeping track of multiple reducers easier.
diff --git a/tinymr/base.py b/tinymr/base.py
index eba8905..c29bbb6 100644
--- a/tinymr/base.py
+++ b/tinymr/base.py
@@ -3,7 +3,9 @@ Base classes. Subclass away!
"""
+import inspect
from itertools import chain
+import logging
import six
@@ -201,7 +203,8 @@ class BaseMapReduce(object):
Sort the output from each `reducer()` before executing the next or
before being passed to `output()`.
- Define one property per reducer, so `reducer2()` would be `sort_reduce2`.
+ Define one property per reducer, so `reducer2()` would be
+ `sort_reduce2`.
Returns
-------
@@ -210,6 +213,16 @@ class BaseMapReduce(object):
return self.sort
+ @property
+ def logger(self):
+
+ """
+ Each MapReduce task gets its own logger with a name like
+ `tinymr-ClassName`.
+ """
+
+ return logging.getLogger('tinymr-{}'.format(self.__class__.__name__))
+
def close(self):
"""
@@ -318,18 +331,43 @@ class BaseMapReduce(object):
return ((key, tuple(values)) for key, values in pairs)
@property
- def _reduce_jobs(self):
-
- reducers = tools.sorter(filter(
- lambda x: not x.startswith('_') and 'reducer' in x,
- dir(self)))
-
- for r in reducers:
- yield _mrtools.ReduceJob(
- reducer=getattr(self, r),
- sort=getattr(self, 'sort_{}'.format(r.replace('reducer', 'reduce'))),
- jobs=getattr(self, '{}_jobs'.format(r.replace('reducer', 'reduce'))),
- chunksize=getattr(self, '{}_jobs'.format(r.replace('reducer', 'reduce'))))
+ def _reduce_job_confs(self):
+
+ """
+ The user can define multiple reduce operations, each with their own
+ independent job configuration, to be executed in a specified order.
+ This method produces one `_mrtools.ReduceJobConf()` per reduce
+ operation in execution order.
+
+ Returns
+ -------
+ tuple
+ """
+
+ # We encourage user's to add their own properties and methods, so
+ # we want to be really confident that we are _only_ grabbing the
+ # reducer methods, otherwise difficult to debug failures might pop up.
+ # Can't assume the reducers were defined in order.
+ reducers = {}
+ for method in (m for m in dir(self) if m != '_reduce_job_confs'):
+
+ if method.startswith('reducer') and \
+ inspect.ismethod(getattr(self, method)):
+
+ str_idx = method.lstrip('reducer') or '-1'
+
+ sort_method = 'sort_reduce{}'.format(str_idx)
+ jobs_method = 'reduce{}_jobs'.format(str_idx)
+ chunksize_method = 'reduce{}_chunksize'.format(str_idx)
+
+ reducers[int(str_idx)] = _mrtools.ReduceJobConf(
+ reducer=getattr(self, method),
+ sort=getattr(self, sort_method, self.sort_reduce),
+ jobs=getattr(self, jobs_method, self.reduce_jobs),
+ chunksize=getattr(
+ self, chunksize_method, self.reduce_chunksize))
+
+ return [reducers.pop(i) for i in sorted(reducers.keys())]
def _map_combine_partition(self, stream):
diff --git a/tinymr/memory.py b/tinymr/memory.py
index f48b4a9..97bf1d6 100644
--- a/tinymr/memory.py
+++ b/tinymr/memory.py
@@ -12,38 +12,54 @@ from tinymr import tools
from tinymr.tools import runner
-logger = logging.getLogger('tinymr')
-logger.setLevel(logging.DEBUG)
-
-
class MemMapReduce(base.BaseMapReduce):
- def __call__(self, stream):
+ def __call__(self, stream, log_level=logging.NOTSET):
+
+ original_log_level = self.logger.level
+ self.logger.setLevel(log_level)
sliced = tools.slicer(stream, self.map_chunksize)
# Map, partition, combine, partition
+ self.logger.info(
+ "Running map, combine, and partition phase with %s jobs, chunksize "
+ "%s, sort_map=%s, and sort_combine=%s",
+ self.map_jobs, self.map_chunksize, self.sort_map, self.sort_combine)
+
with runner(self._map_combine_partition, sliced, self.map_jobs) as mcp:
partitioned = tools.merge_partitions(*mcp, sort=self.sort_combine)
+ self.logger.info("Finished map with %s keys", len(partitioned))
+ self.logger.info("Initializing reduce phase")
self.init_reduce()
# Run all partition jobs
reducer_input = partitioned
- for rj in self._reduce_jobs:
+ for rj in self._reduce_job_confs:
+ self.logger.info("Running reduce job %s", rj)
+
+ # Pin the reduce job so we can treat it like a lambda
func = functools.partial(
self._reduce_partition, reducer=rj.reducer, sort=rj.sort)
reducer_input = _mrtools.strip_sort_key(reducer_input)
sliced = tools.slicer(reducer_input, rj.chunksize)
-
with runner(func, sliced, rj.jobs) as reduced:
partitioned = tools.merge_partitions(*reduced, sort=rj.sort)
+ self.logger.info(
+ "Finished reduce job %s with %s keys", rj, len(partitioned))
+
partitioned = _mrtools.strip_sort_key(partitioned)
+ self.logger.info("Sorting output data by key")
if self.sort_output:
partitioned = self._output_sorter(partitioned)
- return self.output(partitioned)
+ try:
+ self.logger.info("Producing output dataset")
+ return self.output(partitioned)
+ finally:
+ self.logger.setLevel(original_log_level)
| ReduceJob().chunksize is populated with reduce_jobs | geowurster/tinymr | diff --git a/tests/test_base.py b/tests/test_base.py
index 7468888..47309ac 100644
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -5,6 +5,7 @@ Unittests for tinymr.base
import pytest
+from tinymr import _mrtools
from tinymr import base
from tinymr import errors
@@ -36,6 +37,48 @@ def test_default_methods():
assert list(mr.output(expected)) == expected
+def test_reduce_job_confs():
+ # Make sure attributes are coming from the correct location
+ class MR(base.BaseMapReduce):
+
+ jobs = 4
+ reduce2_chunksize = 10
+ reduce10_jobs = 2
+ sort = False
+ sort_reduce2 = True
+
+ # Define out of order to test sorting
+ def reducer10(self, key, values):
+ pass
+
+ def reducer(self, key, values):
+ pass
+
+ def reducer2(self, key, values):
+ pass
+
+ mr = MR()
+
+ rj = _mrtools.ReduceJobConf(
+ reducer=mr.reducer,
+ sort=False,
+ jobs=4,
+ chunksize=1)
+ rj2 = _mrtools.ReduceJobConf(
+ reducer=mr.reducer2,
+ sort=True,
+ jobs=4,
+ chunksize=10)
+ rj10 = _mrtools.ReduceJobConf(
+ reducer=mr.reducer10,
+ sort=False,
+ jobs=2,
+ chunksize=1)
+
+ assert mr._reduce_job_confs == [rj, rj2, rj10]
+
+
+
# def test_context_manager():
#
# class MapReduce(base.BaseMapReduce):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 4
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
coveralls==3.3.1
docopt==0.6.2
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
requests==2.27.1
six==1.17.0
-e git+https://github.com/geowurster/tinymr.git@5dbf46845a8caba995916f76d1e681860f1e198f#egg=tinymr
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tinymr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==3.3.1
- docopt==0.6.2
- idna==3.10
- pytest-cov==4.0.0
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- urllib3==1.26.20
prefix: /opt/conda/envs/tinymr
| [
"tests/test_base.py::test_reduce_job_confs"
]
| []
| [
"tests/test_base.py::test_not_implemented_methods",
"tests/test_base.py::test_default_settings",
"tests/test_base.py::test_default_methods"
]
| []
| New BSD License | 410 | [
"tinymr/_mrtools.py",
".travis.yml",
"tinymr/base.py",
"tinymr/memory.py"
]
| [
"tinymr/_mrtools.py",
".travis.yml",
"tinymr/base.py",
"tinymr/memory.py"
]
|
|
mmerickel__pyramid_services-12 | 85f423102ec14195698b1e44b12fccf30650679d | 2016-02-03 20:48:38 | 85f423102ec14195698b1e44b12fccf30650679d | diff --git a/CHANGES.txt b/CHANGES.txt
index 8d05981..6240712 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,8 +1,21 @@
unreleased
==========
+Backward Incompatibilities
+--------------------------
+
- Drop Python 3.2 support.
+- Use the original service context interface as the cache key instead
+ of the current context. This means the service will be properly created
+ only once for any context satisfying the original interface.
+
+ Previously, if you requested the same service from 2 different contexts
+ in the same request you would receive 2 service objects, instead of
+ a cached version of the original service, assuming the service was
+ registered to satisfy both contexts.
+ See https://github.com/mmerickel/pyramid_services/pull/12
+
0.3 (2015-12-13)
================
diff --git a/pyramid_services/__init__.py b/pyramid_services/__init__.py
index 9b80908..197e94a 100644
--- a/pyramid_services/__init__.py
+++ b/pyramid_services/__init__.py
@@ -25,6 +25,11 @@ def includeme(config):
config.add_directive('register_service_factory', register_service_factory)
config.add_directive('find_service_factory', find_service_factory)
+class ServiceInfo(object):
+ def __init__(self, factory, context_iface):
+ self.factory = factory
+ self.context_iface = context_iface
+
class SingletonServiceWrapper(object):
def __init__(self, service):
self.service = service
@@ -64,13 +69,15 @@ def register_service_factory(
else:
context_iface = context
+ info = ServiceInfo(service_factory, context_iface)
+
def register():
adapters = config.registry.adapters
adapters.register(
(IServiceClassifier, context_iface),
iface,
name,
- service_factory,
+ info,
)
discriminator = ('service factories', (iface, context, name))
@@ -101,11 +108,16 @@ def find_service(request, iface=Interface, context=_marker, name=''):
svc = cache.lookup(svc_types, iface, name=name, default=None)
if svc is None:
adapters = request.registry.adapters
- svc_factory = adapters.lookup(svc_types, iface, name=name)
- if svc_factory is None:
+ info = adapters.lookup(svc_types, iface, name=name)
+ if info is None:
raise ValueError('could not find registered service')
- svc = svc_factory(context, request)
- cache.register(svc_types, iface, name, svc)
+ svc = info.factory(context, request)
+ cache.register(
+ (IServiceClassifier, info.context_iface),
+ iface,
+ name,
+ svc,
+ )
return svc
def find_service_factory(
@@ -118,7 +130,7 @@ def find_service_factory(
svc_types = (IServiceClassifier, context_iface)
adapters = config_or_request.registry.adapters
- svc_factory = adapters.lookup(svc_types, iface, name=name)
- if svc_factory is None:
+ info = adapters.lookup(svc_types, iface, name=name)
+ if info is None:
raise ValueError('could not find registered service')
- return svc_factory
+ return info.factory
| Singleton per request object
One issue I ran into with your **dbsession** service example (that uses a service factory) is the following:
When looking for the service different sessions are returned depending on the context. This is by design. Citing the documentation:
> The factory will be used at most once per request/context/name combination.
Having different DB session in one request is quite ugly and I can think of no use case for it. How did I end up with different contexts? By using the *request* object outside a view function invoked directly with *pyramid.threadlocal.get_current_request* . In that case the context is None.
This example by [Jon Rosebaugh - What the Zope Transaction Manager Means To Me (and you)](https://metaclassical.com/what-the-zope-transaction-manager-means-to-me-and-you/) would work but I wanted to use *pyramid_services* (for consistency, because I'm already using it in other parts of the application).
I'm aware that this might not be the place for this since it classifies more as a question than an issue, but you could at least fix the example to be more clear that
```python
request.find_service(name='db')
```
will return different sessions depending on the request context.
My workaround is to always pass context=None:
```python
request.find_service(name='db', context=None)
```
Is there a simpler solution (e.g. a singleton per request)? *register_service()* would register a singleton object for the whole application, but I do want a different DB session for every request. | mmerickel/pyramid_services | diff --git a/pyramid_services/tests/test_it.py b/pyramid_services/tests/test_it.py
index 1e482a3..5abeec3 100644
--- a/pyramid_services/tests/test_it.py
+++ b/pyramid_services/tests/test_it.py
@@ -256,6 +256,25 @@ class TestIntegration_register_service_factory(unittest.TestCase):
self.assertEqual(resp.body, b'foo')
self.assertEqual(called, [True])
+ def test_context_does_not_affect_cache(self):
+ config = self.config
+
+ config.register_service_factory(
+ lambda ctx, req: DummyService('foo'), name='foo')
+
+ def dummy_view(context, request):
+ s1 = request.find_service(name='foo', context=Root())
+ s2 = request.find_service(name='foo', context=Leaf())
+ self.assertTrue(s1 is s2)
+ return s1.result
+
+ config.add_view(dummy_view, renderer='string')
+
+ app = self._makeApp()
+ resp = app.get('/')
+ self.assertEqual(resp.body, b'foo')
+
+
class TestIntegration_find_service_factory(unittest.TestCase):
def setUp(self):
self.config = pyramid.testing.setUp()
@@ -305,12 +324,12 @@ class DummyService(object):
class DummyServiceFactory(object):
def __init__(self, result):
- self.result = DummyService(result)
+ self.result = result
def __call__(self, context, request):
self.context = context
self.request = request
- return self.result
+ return DummyService(self.result)
class DummyView(object):
def __init__(self, *a, **kw):
@@ -318,5 +337,5 @@ class DummyView(object):
self.kw = kw
def __call__(self, request):
- svc = request.find_service(*self.a, **self.kw)
- return svc()
+ self.svc = request.find_service(*self.a, **self.kw)
+ return self.svc()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
beautifulsoup4==4.12.3
certifi==2021.5.30
coverage==6.2
hupper==1.10.3
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
PasteDeploy==2.1.1
plaster==1.0
plaster-pastedeploy==0.7
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pyramid==2.0.2
-e git+https://github.com/mmerickel/pyramid_services.git@85f423102ec14195698b1e44b12fccf30650679d#egg=pyramid_services
pytest==6.2.4
soupsieve==2.3.2.post1
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
translationstring==1.4
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
venusian==3.0.0
waitress==2.0.0
WebOb==1.8.9
WebTest==3.0.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
zope.deprecation==4.4.0
zope.interface==5.5.2
| name: pyramid_services
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.12.3
- coverage==6.2
- hupper==1.10.3
- nose==1.3.7
- pastedeploy==2.1.1
- plaster==1.0
- plaster-pastedeploy==0.7
- pyramid==2.0.2
- soupsieve==2.3.2.post1
- translationstring==1.4
- venusian==3.0.0
- waitress==2.0.0
- webob==1.8.9
- webtest==3.0.0
- zope-deprecation==4.4.0
- zope-interface==5.5.2
prefix: /opt/conda/envs/pyramid_services
| [
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_context_does_not_affect_cache"
]
| []
| [
"pyramid_services/tests/test_it.py::TestIntegration_register_service::test_context_sensitive",
"pyramid_services/tests/test_it.py::TestIntegration_register_service::test_iface",
"pyramid_services/tests/test_it.py::TestIntegration_register_service::test_introspectable",
"pyramid_services/tests/test_it.py::TestIntegration_register_service::test_name",
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_context_sensitive",
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_iface",
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_introspectable",
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_name",
"pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_with_no_context",
"pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory",
"pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory_fail",
"pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory_service"
]
| []
| MIT License | 411 | [
"CHANGES.txt",
"pyramid_services/__init__.py"
]
| [
"CHANGES.txt",
"pyramid_services/__init__.py"
]
|
|
networkx__networkx-1976 | 293632863d5e895691b56dff4b12c937b2ea77dc | 2016-02-05 18:54:35 | 3f4fd85765bf2d88188cfd4c84d0707152e6cd1e | diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py
index 2ad8bdfb4..c3457139a 100644
--- a/networkx/algorithms/connectivity/kcutsets.py
+++ b/networkx/algorithms/connectivity/kcutsets.py
@@ -3,6 +3,7 @@
Kanevsky all minimum node k cutsets algorithm.
"""
from operator import itemgetter
+from itertools import combinations
import networkx as nx
from .utils import build_auxiliary_node_connectivity
@@ -86,6 +87,22 @@ def all_node_cuts(G, k=None, flow_func=None):
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is disconnected.')
+ # Addess some corner cases first.
+ # For cycle graphs
+ if G.order() == G.size():
+ if all(2 == d for n, d in G.degree()):
+ seen = set()
+ for u in G:
+ for v in nx.non_neighbors(G, u):
+ if (u, v) not in seen and (v, u) not in seen:
+ yield {v, u}
+ seen.add((v, u))
+ return
+ # For complete Graphs
+ if nx.density(G) == 1:
+ for cut_set in combinations(G, len(G)-1):
+ yield set(cut_set)
+ return
# Initialize data structures.
# Keep track of the cuts already computed so we do not repeat them.
seen = []
| all_node_cuts returns too few and incorrect cuts.
This could be a documentation issue, a bug or a user understanding issue. Are these cases pathological for the algorithm?
Given a square graph:
a -- b
| |
c -- d
Based on a cursory reading of the documentation, I would have expected all_node_cuts() to return:
[{'a','d'}, {'c','b'}]
I get `[{'a','c'}]` but if this is a valid node cut then surely {a,b}, {b,d}, {c,d} are also equally valid and a function called "all node cuts" should return them.
G = nx.Graph([('a','b'), ('a','c'), ('c','d'), ('b','d')])
print( G.nodes() )
print( G.edges() )
print( list(nx.all_node_cuts(G)) )
>>> ['a', 'c', 'b', 'd']
>>> [('a', 'c'), ('a', 'b'), ('c', 'd'), ('b', 'd')]
>>> [{'a', 'c'}]
Expanding to a hexagon, we see similar pattern of node cuts. There are many isometric node cuts omitted from the results list. Two of the proposed cuts fail to create "two or more connected components" as the documentation suggests.
G = nx.Graph([('a','b'), ('b','c'), ('c','d'),('d','e'), ('e','f'),('f','a')])
list(nx.all_node_cuts(G))
>>> [{'a', 'c'}, {'a', 'b'}, {'b', 'c'}]
| networkx/networkx | diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py
index e11547faf..9ca49698a 100644
--- a/networkx/algorithms/connectivity/tests/test_kcutsets.py
+++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py
@@ -241,3 +241,27 @@ def test_non_repeated_cuts():
assert_true(len(solution) == len(cuts))
for cut in cuts:
assert_true(cut in solution)
+
+
+def test_cycle_graph():
+ G = nx.cycle_graph(5)
+ solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}]
+ cuts = list(nx.all_node_cuts(G))
+ assert_true(len(solution) == len(cuts))
+ for cut in cuts:
+ assert_true(cut in solution)
+
+
+def test_complete_graph():
+ G = nx.complete_graph(5)
+ solution = [
+ {0, 1, 2, 3},
+ {0, 1, 2, 4},
+ {0, 1, 3, 4},
+ {0, 2, 3, 4},
+ {1, 2, 3, 4},
+ ]
+ cuts = list(nx.all_node_cuts(G))
+ assert_true(len(solution) == len(cuts))
+ for cut in cuts:
+ assert_true(cut in solution)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | help | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgdal-dev graphviz"
],
"python": "3.6",
"reqs_path": [
"requirements/default.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
decorator==5.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/networkx/networkx.git@293632863d5e895691b56dff4b12c937b2ea77dc#egg=networkx
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: networkx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/networkx
| [
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_cycle_graph",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_complete_graph"
]
| []
| [
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_torrents_and_ferraro_graph",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_example_1",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_random_gnp",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_shell",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_configuration",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_karate",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_articulation_points",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_grid_2d_graph",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_disconnected_graph",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_alternative_flow_functions",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_is_separating_set_complete_graph",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_is_separating_set",
"networkx/algorithms/connectivity/tests/test_kcutsets.py::test_non_repeated_cuts"
]
| []
| BSD 3-Clause | 412 | [
"networkx/algorithms/connectivity/kcutsets.py"
]
| [
"networkx/algorithms/connectivity/kcutsets.py"
]
|
|
unified-font-object__ufoNormalizer-23 | f14d55967d4621114ff598a4407ee839f5387ff4 | 2016-02-06 21:19:33 | f14d55967d4621114ff598a4407ee839f5387ff4 | anthrotype: thank you Denis! This looks much better than my hotfix :+1:
can you remind me why we can't read/write plist files as unicode strings and we need to use bytes? | diff --git a/normalization/ufonormalizer.py b/normalization/ufonormalizer.py
index 7d0fa56..c4f15fb 100644
--- a/normalization/ufonormalizer.py
+++ b/normalization/ufonormalizer.py
@@ -1,6 +1,6 @@
#! /usr/bin/env python
# -*- coding: utf-8 -*-
-from __future__ import print_function
+from __future__ import print_function, unicode_literals
import time
import os
@@ -11,6 +11,7 @@ import textwrap
import datetime
import glob
from collections import OrderedDict
+from io import open
"""
- filter out unknown attributes and subelements
@@ -89,22 +90,22 @@ except NameError:
# plistlib.readPlistFromString instead.
if hasattr(plistlib, "loads"):
- def _readPlistFromBytes(data):
+ def _loads(data):
return plistlib.loads(data)
- def _writePlistToBytes(plist):
+ def _dumps(plist):
return plistlib.dumps(plist)
elif hasattr(plistlib, "readPlistFromBytes"):
- def _readPlistFromBytes(data):
+ def _loads(data):
return plistlib.readPlistFromBytes(tobytes(data))
- def _writePlistToBytes(plist):
+ def _dumps(plist):
return plistlib.writePlistToBytes(plist)
else:
- def _readPlistFromBytes(data):
+ def _loads(data):
return plistlib.readPlistFromString(data)
- def _writePlistToBytes(plist):
+ def _dumps(plist):
return plistlib.writePlistToString(plist)
@@ -1334,9 +1335,8 @@ def subpathReadFile(ufoPath, *subpath):
Read the contents of a file.
"""
path = subpathJoin(ufoPath, *subpath)
- f = open(path, "rb")
- text = f.read()
- f.close()
+ with open(path, "r", encoding="utf-8") as f:
+ text = f.read()
return text
def subpathReadPlist(ufoPath, *subpath):
@@ -1344,12 +1344,14 @@ def subpathReadPlist(ufoPath, *subpath):
Read the contents of a property list
and convert it into a Python object.
"""
- text = subpathReadFile(ufoPath, *subpath)
- return _readPlistFromBytes(text)
+ path = subpathJoin(ufoPath, *subpath)
+ with open(path, "rb") as f:
+ data = f.read()
+ return _loads(data)
# write
-def subpathWriteFile(data, ufoPath, *subpath):
+def subpathWriteFile(text, ufoPath, *subpath):
"""
Write data to a file.
@@ -1360,19 +1362,12 @@ def subpathWriteFile(data, ufoPath, *subpath):
path = subpathJoin(ufoPath, *subpath)
if subpathExists(ufoPath, *subpath):
existing = subpathReadFile(ufoPath, *subpath)
-
- if type(data) != type(existing):
- if not isinstance(data, unicode):
- data = unicode(data, "utf-8")
- if not isinstance(existing, unicode):
- existing = unicode(existing, "utf-8")
else:
existing = None
- if data != existing:
- f = open(path, "wb")
- f.write(tobytes(data))
- f.close()
+ if text != existing:
+ with open(path, "w", encoding="utf-8") as f:
+ f.write(text)
def subpathWritePlist(data, ufoPath, *subpath):
"""
@@ -1383,8 +1378,16 @@ def subpathWritePlist(data, ufoPath, *subpath):
file contains data that is different
from the new data.
"""
- data = _writePlistToBytes(data)
- subpathWriteFile(data, ufoPath, *subpath)
+ data = _dumps(data)
+ path = subpathJoin(ufoPath, *subpath)
+ if subpathExists(ufoPath, *subpath):
+ existing = subpathReadPlist(ufoPath, *subpath)
+ else:
+ existing = None
+
+ if data != existing:
+ with open(path, "wb") as f:
+ f.write(data)
# rename
| String encoding issue
When using the latest version, I am getting this error. It seems to relate to © symbol in font info area.
```
File "/Users/…/bin/FDK/Tools/osx/Python/Current/lib/python2.7/site-packages/ufonormalizer-0.1-py2.7.egg/ufonormalizer.py", line 109, in tobytes
return s.encode(encoding, errors)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xa9' in position 310: ordinal not in range(128)
``` | unified-font-object/ufoNormalizer | diff --git a/normalization/test_ufonormalizer.py b/normalization/test_ufonormalizer.py
index 42957dc..6631c7d 100644
--- a/normalization/test_ufonormalizer.py
+++ b/normalization/test_ufonormalizer.py
@@ -11,10 +11,13 @@ from io import open
from xml.etree import cElementTree as ET
from ufonormalizer import (
normalizeGLIF, normalizeGlyphsDirectoryNames, normalizeGlyphNames,
- subpathJoin, subpathReadPlist, subpathWriteFile, subpathWritePlist,
- UFONormalizerError, XMLWriter, tobytes, userNameToFileName, handleClash1,
- handleClash2, xmlEscapeText, xmlEscapeAttribute, xmlConvertValue,
- xmlConvertFloat, xmlConvertInt,
+ subpathJoin, subpathSplit, subpathExists, subpathReadFile,
+ subpathReadPlist, subpathWriteFile, subpathWritePlist, subpathRenameFile,
+ subpathRenameDirectory, subpathRenameDirectory, subpathRemoveFile,
+ subpathGetModTime, subpathNeedsRefresh, modTimeLibKey, storeModTimes,
+ readModTimes, UFONormalizerError, XMLWriter,
+ tobytes, userNameToFileName, handleClash1, handleClash2, xmlEscapeText,
+ xmlEscapeAttribute, xmlConvertValue, xmlConvertFloat, xmlConvertInt,
_normalizeGlifAnchor, _normalizeGlifGuideline, _normalizeGlifLib,
_normalizeGlifNote, _normalizeFontInfoGuidelines, _normalizeGlifUnicode,
_normalizeGlifAdvance, _normalizeGlifImage, _normalizeDictGuideline,
@@ -25,16 +28,19 @@ from ufonormalizer import (
_normalizeGlifPointAttributesFormat2,
_normalizeGlifComponentAttributesFormat2, _normalizeGlifTransformation,
_normalizeColorString, _convertPlistElementToObject)
+from ufonormalizer import __version__ as ufonormalizerVersion
# Python 3.4 deprecated readPlistFromBytes and writePlistToBytes
# Python 2 has readPlistFromString and writePlistToString
try:
- from plistlib import loads
+ from plistlib import loads, dumps
except ImportError:
try:
from plistlib import readPlistFromBytes as loads
+ from plistlib import writePlistToBytes as dumps
except ImportError:
from plistlib import readPlistFromString as loads
+ from plistlib import writePlistToString as dumps
GLIFFORMAT1 = '''\
<?xml version="1.0" encoding="UTF-8"?>
@@ -1565,6 +1571,142 @@ class XMLWriterTest(unittest.TestCase):
self.assertEqual(xmlConvertInt(0o0000030), '24')
self.assertEqual(xmlConvertInt(65536), '65536')
+ def test_duplicateUFO(self):
+ pass
+
+
+class SubpathTest(unittest.TestCase):
+ def __init__(self, methodName):
+ unittest.TestCase.__init__(self, methodName)
+ self.filename = 'tmp'
+ self.plistname = 'tmp.plist'
+
+ def setUp(self):
+ self.directory = tempfile.mkdtemp()
+ self.filepath = os.path.join(self.directory, self.filename)
+ self.plistpath = os.path.join(self.directory, self.plistname)
+
+ def tearDown(self):
+ shutil.rmtree(self.directory)
+
+ def createTestFile(self, text, num=None):
+ if num is None:
+ with open(self.filepath, 'w', encoding='utf-8') as f:
+ f.write(text)
+ else:
+ for i in range(num):
+ filepath = self.filepath + str(i)
+ with open(filepath, 'w', encoding='utf-8') as f:
+ f.write(text)
+
+ def test_subpathJoin(self):
+ self.assertEqual(subpathJoin('a', 'b', 'c'),
+ os.path.join('a', 'b', 'c'))
+ self.assertEqual(subpathJoin('a', os.path.join('b', 'c')),
+ os.path.join('a', 'b', 'c'))
+
+ def test_subpathSplit(self):
+ self.assertEqual(subpathSplit(os.path.join('a', 'b')),
+ os.path.split(os.path.join('a', 'b')))
+ self.assertEqual(subpathSplit(os.path.join('a', 'b', 'c')),
+ os.path.split(os.path.join('a', 'b', 'c')))
+
+ def test_subpathExists(self):
+ self.createTestFile('')
+ self.assertTrue(subpathExists(self.directory, self.filepath))
+ self.assertFalse(subpathExists(self.directory, 'nofile.txt'))
+
+ def test_subpathReadFile(self):
+ text = 'foo bar™⁜'
+ self.createTestFile(text)
+ self.assertEqual(text, subpathReadFile(self.directory, self.filename))
+
+ def test_subpathReadPlist(self):
+ data = dict([('a', 'foo'), ('b', 'bar'), ('c', '™')])
+ with open(self.plistpath, 'wb') as f:
+ f.write(dumps(data))
+ self.assertEqual(subpathReadPlist(self.directory, self.plistname),
+ data)
+
+ def test_subpathWriteFile(self):
+ expected_text = 'foo bar™⁜'
+ subpathWriteFile(expected_text, self.directory, self.filename)
+ with open(self.filepath, 'r', encoding='utf-8') as f:
+ text = f.read()
+ self.assertEqual(text, expected_text)
+
+ def test_subpathWritePlist(self):
+ expected_data = dict([('a', 'foo'), ('b', 'bar'), ('c', '™')])
+ subpathWritePlist(expected_data, self.directory, self.plistname)
+ with open(self.plistpath, 'rb') as f:
+ data = loads(f.read())
+ self.assertEqual(data, expected_data)
+
+ def test_subpathRenameFile(self):
+ self.createTestFile('')
+ subpathRenameFile(self.directory, self.filename, self.filename + "_")
+ self.assertTrue(os.path.exists(self.filepath + "_"))
+
+ def test_subpathRenameDirectory(self):
+ dirname = 'tmpdir'
+ dirpath = os.path.join(self.directory, dirname)
+ os.mkdir(dirpath)
+ subpathRenameFile(self.directory, dirname, dirname + "_")
+ self.assertTrue(os.path.exists(dirpath + "_"))
+
+ def test_subpathRemoveFile(self):
+ self.createTestFile('')
+ subpathRemoveFile(self.directory, self.filename)
+ self.assertFalse(os.path.exists(self.filepath))
+
+ def test_subpathGetModTime(self):
+ self.createTestFile('')
+ mtime = subpathGetModTime(self.directory, self.filename)
+ self.assertEqual(os.path.getmtime(self.filepath), mtime)
+
+ def test_subpathNeedsRefresh(self):
+ import time
+ self.createTestFile('')
+ modTime = os.path.getmtime(self.filepath)
+ modTimes = {}
+ modTimes[self.filename] = float(modTime)
+ self.assertFalse(subpathNeedsRefresh(modTimes, self.directory,
+ self.filename))
+ time.sleep(1) # to get a different modtime
+ with open(self.filepath, 'w', encoding='utf-8') as f:
+ f.write('foo')
+ self.assertTrue(subpathNeedsRefresh(modTimes, self.directory,
+ self.filename))
+
+ def test_storeModTimes(self):
+ num = 5
+ lib = {}
+ modTimes = {}
+ self.createTestFile('', num)
+ filenames = [self.filename + str(i) for i in range(num)]
+ for filename in filenames:
+ filepath = os.path.join(self.directory, filename)
+ modTime = os.path.getmtime(filepath)
+ modTimes[filename] = float('%.1f' % (modTime))
+ lines = ['version: %s' % (ufonormalizerVersion)]
+ lines += ['%.1f %s' % (modTimes[filename], filename)
+ for filename in filenames]
+ storeModTimes(lib, modTimes)
+ self.assertEqual('\n'.join(lines), lib[modTimeLibKey])
+
+ def test_readModTimes(self):
+ num = 5
+ lib = {}
+ modTimes = {}
+ lines = ['version: %s' % (ufonormalizerVersion)]
+ filenames = [self.filename + str(i) for i in range(num)]
+ modTime = float(os.path.getmtime(self.directory))
+ for i, filename in enumerate(filenames):
+ modTimes[filename] = float('%.1f' % (modTime + i))
+ lines.append('%.1f %s' % (modTime + i, filename))
+ lib[modTimeLibKey] = '\n'.join(lines)
+ self.assertEqual(readModTimes(lib), modTimes)
+
class NameTranslationTest(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": null,
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/unified-font-object/ufoNormalizer.git@f14d55967d4621114ff598a4407ee839f5387ff4#egg=ufonormalizer
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: ufoNormalizer
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
prefix: /opt/conda/envs/ufoNormalizer
| [
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathReadFile",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathWriteFile"
]
| []
| [
"normalization/test_ufonormalizer.py::UFONormalizerErrorTest::test_str",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_convert_plist_Element_to_object",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_angle",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_x",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_y",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_angle",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_color",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_identifier",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_x",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_y",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_no_guidelines",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_defaults",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_height",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_invalid_values",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_undefined",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_width",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_color",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_identifier",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_x",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_y",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_formats_1_and_2",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_guideline_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_guideline_invalid",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_color",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_file_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_transformation",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_lib_defined",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_lib_undefined",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_no_formats",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_note_defined",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_note_undefined",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_outline_format1_element_order",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_outline_format1_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_unicode_with_hex",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_unicode_without_hex",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_defaults",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_no_base",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_no_transformation",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format2_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_no_base",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_subelement",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor_with_empty_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor_without_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_normal",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_point_without_attributes",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_unkown_child_element",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_unkown_point_type",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_normal",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_point_without_attributes",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_unknown_child_element",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_outline_format2_element_order",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_outline_format2_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_empty_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_invalid_x",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_invalid_y",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_name",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_x",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_y",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_subelement",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_type_and_smooth",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format2_everything",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_default",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_empty",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_invalid_value",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_non_default",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_unknown_attribute",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphNames_non_standard",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphNames_old_same_as_new",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphsDirectoryNames_non_standard",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphsDirectoryNames_old_same_as_new",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeLayerInfoPlist_color",
"normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalize_color_string",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_attributesToString",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_duplicateUFO",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_array",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_boolean",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_data",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_date",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_dict",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_float",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_integer",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_none",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_string",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_unknown_data_type",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertFloat",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertInt",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertValue",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlEscapeAttribute",
"normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlEscapeText",
"normalization/test_ufonormalizer.py::SubpathTest::test_readModTimes",
"normalization/test_ufonormalizer.py::SubpathTest::test_storeModTimes",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathExists",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathGetModTime",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathJoin",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathNeedsRefresh",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathReadPlist",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathRemoveFile",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathRenameDirectory",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathRenameFile",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathSplit",
"normalization/test_ufonormalizer.py::SubpathTest::test_subpathWritePlist",
"normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash1",
"normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash1_max_file_length",
"normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash2",
"normalization/test_ufonormalizer.py::NameTranslationTest::test_userNameToFileName"
]
| []
| BSD-3-Clause | 413 | [
"normalization/ufonormalizer.py"
]
| [
"normalization/ufonormalizer.py"
]
|
scrapy__scrapy-1771 | e328a9b9dfa4fbc79c59ed4f45f757e998301c31 | 2016-02-07 06:15:48 | a975a50558cd78a1573bee2e957afcb419fd1bd6 | diff --git a/scrapy/http/cookies.py b/scrapy/http/cookies.py
index e92c3fe73..a1e95102e 100644
--- a/scrapy/http/cookies.py
+++ b/scrapy/http/cookies.py
@@ -137,13 +137,29 @@ class WrappedRequest(object):
"""
return self.request.meta.get('is_unverifiable', False)
- # python3 uses request.unverifiable
+ def get_origin_req_host(self):
+ return urlparse_cached(self.request).hostname
+
+ # python3 uses attributes instead of methods
+ @property
+ def full_url(self):
+ return self.get_full_url()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+ @property
+ def type(self):
+ return self.get_type()
+
@property
def unverifiable(self):
return self.is_unverifiable()
- def get_origin_req_host(self):
- return urlparse_cached(self.request).hostname
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
def has_header(self, name):
return name in self.request.headers
| PY3: Fail to download the second or later requests to hosts using secure cookies
## Environment
* Mac OS X 10.10.5
* Python 3.4.2
* Scrapy 1.1.0rc1
* Twisted 15.5.0
## Steps to Reproduce
1. Save the following spider as `secure_cookie_spider.py`.
```py
import scrapy
class SecureCookieSpider(scrapy.Spider):
name = 'secure_cookie_spider'
start_urls = [
'https://github.com/',
]
def parse(self, response):
# Request the same url again
yield scrapy.Request(url=response.url, callback=self.parse_second_request)
def parse_second_request(self, response):
pass
```
2. Run the following command.
```
$ scrapy runspider secure_cookie_spider.py
```
## Expected Results
No error is reported.
## Actual Results
Fail to download the second request with `AttributeError: 'WrappedRequest' object has no attribute 'type'`.
```
$ scrapy runspider secure_cookie_spider.py
2016-02-07 11:57:11 [scrapy] INFO: Scrapy 1.1.0rc1 started (bot: scrapybot)
2016-02-07 11:57:11 [scrapy] INFO: Overridden settings: {}
2016-02-07 11:57:11 [scrapy] INFO: Enabled extensions:
['scrapy.extensions.corestats.CoreStats',
'scrapy.extensions.logstats.LogStats']
2016-02-07 11:57:11 [scrapy] INFO: Enabled downloader middlewares:
['scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware',
'scrapy.downloadermiddlewares.downloadtimeout.DownloadTimeoutMiddleware',
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware',
'scrapy.downloadermiddlewares.retry.RetryMiddleware',
'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware',
'scrapy.downloadermiddlewares.redirect.MetaRefreshMiddleware',
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware',
'scrapy.downloadermiddlewares.redirect.RedirectMiddleware',
'scrapy.downloadermiddlewares.cookies.CookiesMiddleware',
'scrapy.downloadermiddlewares.chunked.ChunkedTransferMiddleware',
'scrapy.downloadermiddlewares.stats.DownloaderStats']
2016-02-07 11:57:11 [scrapy] INFO: Enabled spider middlewares:
['scrapy.spidermiddlewares.httperror.HttpErrorMiddleware',
'scrapy.spidermiddlewares.offsite.OffsiteMiddleware',
'scrapy.spidermiddlewares.referer.RefererMiddleware',
'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware',
'scrapy.spidermiddlewares.depth.DepthMiddleware']
2016-02-07 11:57:11 [scrapy] INFO: Enabled item pipelines:
[]
2016-02-07 11:57:11 [scrapy] INFO: Spider opened
2016-02-07 11:57:11 [scrapy] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min)
2016-02-07 11:57:12 [scrapy] DEBUG: Crawled (200) <GET https://github.com/> (referer: None)
2016-02-07 11:57:12 [scrapy] ERROR: Error downloading <GET https://github.com/>
Traceback (most recent call last):
File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/twisted/internet/defer.py", line 1128, in _inlineCallbacks
result = g.send(result)
File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/core/downloader/middleware.py", line 37, in process_request
response = yield method(request=request, spider=spider)
File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/downloadermiddlewares/cookies.py", line 39, in process_request
jar.add_cookie_header(request)
File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/http/cookies.py", line 42, in add_cookie_header
cookies += self.jar._cookies_for_domain(host, wreq)
File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1242, in _cookies_for_domain
if not self._policy.return_ok(cookie, request):
File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1077, in return_ok
if not fn(cookie, request):
File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1103, in return_ok_secure
if cookie.secure and request.type != "https":
AttributeError: 'WrappedRequest' object has no attribute 'type'
2016-02-07 11:57:12 [scrapy] INFO: Closing spider (finished)
2016-02-07 11:57:12 [scrapy] INFO: Dumping Scrapy stats:
{'downloader/exception_count': 1,
'downloader/exception_type_count/builtins.AttributeError': 1,
'downloader/request_bytes': 211,
'downloader/request_count': 1,
'downloader/request_method_count/GET': 1,
'downloader/response_bytes': 9735,
'downloader/response_count': 1,
'downloader/response_status_count/200': 1,
'finish_reason': 'finished',
'finish_time': datetime.datetime(2016, 2, 7, 2, 57, 12, 757829),
'log_count/DEBUG': 1,
'log_count/ERROR': 1,
'log_count/INFO': 7,
'request_depth_max': 1,
'response_received_count': 1,
'scheduler/dequeued': 2,
'scheduler/dequeued/memory': 2,
'scheduler/enqueued': 2,
'scheduler/enqueued/memory': 2,
'start_time': datetime.datetime(2016, 2, 7, 2, 57, 11, 384330)}
2016-02-07 11:57:12 [scrapy] INFO: Spider closed (finished)
```
Note that no error is reported in Python 2. | scrapy/scrapy | diff --git a/tests/test_http_cookies.py b/tests/test_http_cookies.py
index d529f609b..549f779d8 100644
--- a/tests/test_http_cookies.py
+++ b/tests/test_http_cookies.py
@@ -14,12 +14,15 @@ class WrappedRequestTest(TestCase):
def test_get_full_url(self):
self.assertEqual(self.wrapped.get_full_url(), self.request.url)
+ self.assertEqual(self.wrapped.full_url, self.request.url)
def test_get_host(self):
self.assertEqual(self.wrapped.get_host(), urlparse(self.request.url).netloc)
+ self.assertEqual(self.wrapped.host, urlparse(self.request.url).netloc)
def test_get_type(self):
self.assertEqual(self.wrapped.get_type(), urlparse(self.request.url).scheme)
+ self.assertEqual(self.wrapped.type, urlparse(self.request.url).scheme)
def test_is_unverifiable(self):
self.assertFalse(self.wrapped.is_unverifiable())
@@ -32,6 +35,7 @@ class WrappedRequestTest(TestCase):
def test_get_origin_req_host(self):
self.assertEqual(self.wrapped.get_origin_req_host(), 'www.example.com')
+ self.assertEqual(self.wrapped.origin_req_host, 'www.example.com')
def test_has_header(self):
self.assertTrue(self.wrapped.has_header('content-type'))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libxml2-dev libxslt1-dev zlib1g-dev libffi-dev libssl-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@e328a9b9dfa4fbc79c59ed4f45f757e998301c31#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_http_cookies.py::WrappedRequestTest::test_get_full_url",
"tests/test_http_cookies.py::WrappedRequestTest::test_get_host",
"tests/test_http_cookies.py::WrappedRequestTest::test_get_origin_req_host",
"tests/test_http_cookies.py::WrappedRequestTest::test_get_type"
]
| []
| [
"tests/test_http_cookies.py::WrappedRequestTest::test_add_unredirected_header",
"tests/test_http_cookies.py::WrappedRequestTest::test_get_header",
"tests/test_http_cookies.py::WrappedRequestTest::test_has_header",
"tests/test_http_cookies.py::WrappedRequestTest::test_header_items",
"tests/test_http_cookies.py::WrappedRequestTest::test_is_unverifiable",
"tests/test_http_cookies.py::WrappedRequestTest::test_is_unverifiable2",
"tests/test_http_cookies.py::WrappedResponseTest::test_get_all",
"tests/test_http_cookies.py::WrappedResponseTest::test_getheaders",
"tests/test_http_cookies.py::WrappedResponseTest::test_info"
]
| []
| BSD 3-Clause "New" or "Revised" License | 415 | [
"scrapy/http/cookies.py"
]
| [
"scrapy/http/cookies.py"
]
|
|
pika__pika-701 | c467ad22fb8f2fd2bc925fa59d3b082fd035302b | 2016-02-07 08:43:01 | f73f9bbaddd90b03583a6693f6158e56fbede948 | vitaly-krugl: @gmr, please take a look when you have a moment | diff --git a/docs/examples/asynchronous_publisher_example.rst b/docs/examples/asynchronous_publisher_example.rst
index e1267ad..6cba07c 100644
--- a/docs/examples/asynchronous_publisher_example.rst
+++ b/docs/examples/asynchronous_publisher_example.rst
@@ -162,7 +162,7 @@ publisher.py::
different parameters. In this case, we'll close the connection
to shutdown the object.
- :param pika.channel.Channel channel: The closed channel
+ :param pika.channel.Channel: The closed channel
:param int reply_code: The numeric reason the channel was closed
:param str reply_text: The text reason the channel was closed
diff --git a/docs/examples/blocking_consume.rst b/docs/examples/blocking_consume.rst
index 8603c15..85852e4 100644
--- a/docs/examples/blocking_consume.rst
+++ b/docs/examples/blocking_consume.rst
@@ -9,21 +9,21 @@ When pika calls your method, it will pass in the channel, a :py:class:`pika.spec
Example of consuming messages and acknowledging them::
- import pika
+ import pika
- def on_message(channel, method_frame, header_frame, body):
- print(method_frame.delivery_tag)
- print(body)
- print()
- channel.basic_ack(delivery_tag=method_frame.delivery_tag)
+ def on_message(channel, method_frame, header_frame, body):
+ print(method_frame.delivery_tag)
+ print(body)
+ print()
+ channel.basic_ack(delivery_tag=method_frame.delivery_tag)
- connection = pika.BlockingConnection()
- channel = connection.channel()
- channel.basic_consume(on_message, 'test')
- try:
- channel.start_consuming()
- except KeyboardInterrupt:
- channel.stop_consuming()
- connection.close()
+ connection = pika.BlockingConnection()
+ channel = connection.channel()
+ channel.basic_consume(on_message, 'test')
+ try:
+ channel.start_consuming()
+ except KeyboardInterrupt:
+ channel.stop_consuming()
+ connection.close()
\ No newline at end of file
diff --git a/docs/examples/heartbeat_and_blocked_timeouts.rst b/docs/examples/heartbeat_and_blocked_timeouts.rst
new file mode 100644
index 0000000..ba2c4b5
--- /dev/null
+++ b/docs/examples/heartbeat_and_blocked_timeouts.rst
@@ -0,0 +1,37 @@
+Ensuring well-behaved connection with heartbeat and blocked-connection timeouts
+===============================================================================
+
+
+This example demonstrates explicit setting of heartbeat and blocked connection timeouts.
+
+Starting with RabbitMQ 3.5.5, the broker's default hearbeat timeout decreased from 580 seconds to 60 seconds. As a result, applications that perform lengthy processing in the same thread that also runs their Pika connection may experience unexpected dropped connections due to heartbeat timeout. Here, we specify an explicit lower bound for heartbeat timeout.
+
+When RabbitMQ broker is running out of certain resources, such as memory and disk space, it may block connections that are performing resource-consuming operations, such as publishing messages. Once a connection is blocked, RabbiMQ stops reading from that connection's socket, so no commands from the client will get through to te broker on that connection until the broker unblocks it. A blocked connection may last for an indefinite period of time, stalling the connection and possibly resulting in a hang (e.g., in BlockingConnection) until the connection is unblocked. Blocked Connectin Timeout is intended to interrupt (i.e., drop) a connection that has been blocked longer than the given timeout value.
+
+Example of configuring hertbeat and blocked-connection timeouts::
+
+ import pika
+
+
+ def main():
+
+ # NOTE: These paramerers work with all Pika connection types
+ params = pika.ConnectionParameters(heartbeat_interval=600,
+ blocked_connection_timeout=300)
+
+ conn = pika.BlockingConnection(params)
+
+ chan = conn.channel()
+
+ chan.basic_publish('', 'my-alphabet-queue', "abc")
+
+ # If publish causes the connection to become blocked, then this conn.close()
+ # would hang until the connection is unblocked, if ever. However, the
+ # blocked_connection_timeout connection parameter would interrupt the wait,
+ # resulting in ConnectionClosed exception from BlockingConnection (or the
+ # on_connection_closed callback call in an asynchronous adapter)
+ conn.close()
+
+
+ if __name__ == '__main__':
+ main()
diff --git a/docs/version_history.rst b/docs/version_history.rst
index c3eb70d..9d7ffc3 100644
--- a/docs/version_history.rst
+++ b/docs/version_history.rst
@@ -18,6 +18,10 @@ Next Release
never be serviced in the asynchronous scenario.
- `Channel.basic_reject` fixed to allow `delivery_tag` to be of type `long`
as well as `int`. (by quantum5)
+ - Implemented support for blocked connection timeouts in
+ `pika.connection.Connection`. This feature is available to all pika adapters.
+ See `pika.connection.ConnectionParameters` docstring to learn more about
+ `blocked_connection_timeout` configuration.
0.10.0 2015-09-02
-----------------
diff --git a/examples/heatbeat_and_blocked_timeouts.py b/examples/heatbeat_and_blocked_timeouts.py
new file mode 100644
index 0000000..3fe9a99
--- /dev/null
+++ b/examples/heatbeat_and_blocked_timeouts.py
@@ -0,0 +1,48 @@
+"""
+This example demonstrates explicit setting of heartbeat and blocked connection
+timeouts.
+
+Starting with RabbitMQ 3.5.5, the broker's default hearbeat timeout decreased
+from 580 seconds to 60 seconds. As a result, applications that perform lengthy
+processing in the same thread that also runs their Pika connection may
+experience unexpected dropped connections due to heartbeat timeout. Here, we
+specify an explicit lower bound for heartbeat timeout.
+
+When RabbitMQ broker is running out of certain resources, such as memory and
+disk space, it may block connections that are performing resource-consuming
+operations, such as publishing messages. Once a connection is blocked, RabbiMQ
+stops reading from that connection's socket, so no commands from the client will
+get through to te broker on that connection until the broker unblocks it. A
+blocked connection may last for an indefinite period of time, stalling the
+connection and possibly resulting in a hang (e.g., in BlockingConnection) until
+the connection is unblocked. Blocked Connectin Timeout is intended to interrupt
+(i.e., drop) a connection that has been blocked longer than the given timeout
+value.
+"""
+
+
+import pika
+
+
+def main():
+
+ # NOTE: These paramerers work with all Pika connection types
+ params = pika.ConnectionParameters(heartbeat_interval=600,
+ blocked_connection_timeout=300)
+
+ conn = pika.BlockingConnection(params)
+
+ chan = conn.channel()
+
+ chan.basic_publish('', 'my-alphabet-queue', "abc")
+
+ # If publish causes the connection to become blocked, then this conn.close()
+ # would hang until the connection is unblocked, if ever. However, the
+ # blocked_connection_timeout connection parameter would interrupt the wait,
+ # resulting in ConnectionClosed exception from BlockingConnection (or the
+ # on_connection_closed callback call in an asynchronous adapter)
+ conn.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py
index d355c60..3bfc71a 100644
--- a/pika/adapters/base_connection.py
+++ b/pika/adapters/base_connection.py
@@ -78,6 +78,29 @@ class BaseConnection(connection.Connection):
on_open_error_callback,
on_close_callback)
+ def __repr__(self):
+ def get_socket_repr(sock):
+ if sock is None:
+ return None
+
+ sockname = sock.getsockname()
+
+ peername = None
+ try:
+ peername = sock.getpeername()
+ except socket.error:
+ # not connected?
+ pass
+
+ return '%s->%s' % (sockname, peername)
+
+ return (
+ '<%s state=%s socket=%s params=%s>' %
+ (self.__class__.__name__,
+ self.connection_state,
+ get_socket_repr(self.socket),
+ self.params))
+
def add_timeout(self, deadline, callback_method):
"""Add the callback_method to the IOLoop timer to fire after deadline
seconds. Returns a handle to the timeout
@@ -315,7 +338,8 @@ class BaseConnection(connection.Connection):
LOGGER.error("Socket Error: %s", error_code)
# Disconnect from our IOLoop and let Connection know what's up
- self._on_terminate(-1, repr(error_value))
+ self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR,
+ repr(error_value))
def _handle_timeout(self):
"""Handle a socket timeout in read or write.
@@ -349,7 +373,8 @@ class BaseConnection(connection.Connection):
error_msg = ('BAD libc: Write-Only but Read+Error. '
'Assume socket disconnected.')
LOGGER.error(error_msg)
- self._on_terminate(-1, error_msg)
+ self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR,
+ error_msg)
if self.socket and (events & self.ERROR):
LOGGER.error('Error event %r, %r', events, error)
@@ -391,7 +416,9 @@ class BaseConnection(connection.Connection):
# Empty data, should disconnect
if not data or data == 0:
LOGGER.error('Read empty data, calling disconnect')
- return self._on_terminate(-1, "EOF")
+ return self._on_terminate(
+ connection.InternalCloseReasons.SOCKET_ERROR,
+ "EOF")
# Pass the data into our top level frame dispatching method
self._on_data_available(data)
@@ -400,13 +427,13 @@ class BaseConnection(connection.Connection):
def _handle_write(self):
"""Try and write as much as we can, if we get blocked requeue
what's left"""
- bytes_written = 0
+ total_bytes_sent = 0
try:
while self.outbound_buffer:
frame = self.outbound_buffer.popleft()
while True:
try:
- bw = self.socket.send(frame)
+ num_bytes_sent = self.socket.send(frame)
break
except _SOCKET_ERROR as error:
if error.errno == errno.EINTR:
@@ -414,10 +441,10 @@ class BaseConnection(connection.Connection):
else:
raise
- bytes_written += bw
- if bw < len(frame):
+ total_bytes_sent += num_bytes_sent
+ if num_bytes_sent < len(frame):
LOGGER.debug("Partial write, requeing remaining data")
- self.outbound_buffer.appendleft(frame[bw:])
+ self.outbound_buffer.appendleft(frame[num_bytes_sent:])
break
except socket.timeout:
@@ -433,7 +460,7 @@ class BaseConnection(connection.Connection):
else:
return self._handle_error(error)
- return bytes_written
+ return total_bytes_sent
def _init_connection_state(self):
diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py
index d93413b..9c74de5 100644
--- a/pika/adapters/blocking_connection.py
+++ b/pika/adapters/blocking_connection.py
@@ -212,7 +212,7 @@ class _TimerEvt(object): # pylint: disable=R0903
self.timer_id = None
def __repr__(self):
- return '%s(timer_id=%s, callback=%s)' % (self.__class__.__name__,
+ return '<%s timer_id=%s callback=%s>' % (self.__class__.__name__,
self.timer_id, self._callback)
def dispatch(self):
@@ -236,9 +236,9 @@ class _ConnectionBlockedUnblockedEvtBase(object): # pylint: disable=R0903
self._method_frame = method_frame
def __repr__(self):
- return '%s(callback=%s, frame=%s)' % (self.__class__.__name__,
- self._callback,
- self._method_frame)
+ return '<%s callback=%s, frame=%s>' % (self.__class__.__name__,
+ self._callback,
+ self._method_frame)
def dispatch(self):
"""Dispatch the user's callback method"""
@@ -266,7 +266,7 @@ class BlockingConnection(object): # pylint: disable=R0902
receive messages from RabbitMQ using
:meth:`basic_consume <BlockingChannel.basic_consume>` or if you want to be
notified of a delivery failure when using
- :meth:`basic_publish <BlockingChannel.basic_publish>` .
+ :meth:`basic_publish <BlockingChannel.basic_publish>`.
For more information about communicating with the blocking_connection
adapter, be sure to check out the
@@ -274,6 +274,40 @@ class BlockingConnection(object): # pylint: disable=R0902
:class:`Channel <pika.channel.Channel>` based communication for the
blocking_connection adapter.
+ To prevent recursion/reentrancy, the blocking connection and channel
+ implementations queue asynchronously-delivered events received
+ in nested context (e.g., while waiting for `BlockingConnection.channel` or
+ `BlockingChannel.queue_declare` to complete), dispatching them synchronously
+ once nesting returns to the desired context. This concerns all callbacks,
+ such as those registered via `BlockingConnection.add_timeout`,
+ `BlockingConnection.add_on_connection_blocked_callback`,
+ `BlockingConnection.add_on_connection_unblocked_callback`,
+ `BlockingChannel.basic_consume`, etc.
+
+ Blocked Connection deadlock avoidance: when RabbitMQ becomes low on
+ resources, it emits Connection.Blocked (AMQP extension) to the client
+ connection when client makes a resource-consuming request on that connection
+ or its channel (e.g., `Basic.Publish`); subsequently, RabbitMQ suspsends
+ processing requests from that connection until the affected resources are
+ restored. See http://www.rabbitmq.com/connection-blocked.html. This
+ may impact `BlockingConnection` and `BlockingChannel` operations in a
+ way that users might not be expecting. For example, if the user dispatches
+ `BlockingChannel.basic_publish` in non-publisher-confirmation mode while
+ RabbitMQ is in this low-resource state followed by a synchronous request
+ (e.g., `BlockingConnection.channel`, `BlockingChannel.consume`,
+ `BlockingChannel.basic_consume`, etc.), the synchronous request will block
+ indefinitely (until Connection.Unblocked) waiting for RabbitMQ to reply. If
+ the blocked state persists for a long time, the blocking operation will
+ appear to hang. In this state, `BlockingConnection` instance and its
+ channels will not dispatch user callbacks. SOLUTION: To break this potential
+ deadlock, applications may configure the `blocked_connection_timeout`
+ connection parameter when instantiating `BlockingConnection`. Upon blocked
+ connection timeout, this adapter will raise ConnectionClosed exception with
+ first exception arg of
+ `pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT`. See
+ `pika.connection.ConnectionParameters` documentation to learn more about
+ `blocked_connection_timeout` configuration.
+
"""
# Connection-opened callback args
_OnOpenedArgs = namedtuple('BlockingConnection__OnOpenedArgs',
@@ -341,6 +375,9 @@ class BlockingConnection(object): # pylint: disable=R0902
self._process_io_for_connection_setup()
+ def __repr__(self):
+ return '<%s impl=%r>' % (self.__class__.__name__, self._impl)
+
def _cleanup(self):
"""Clean up members that might inhibit garbage collection"""
self._impl.ioloop.deactivate_poller()
@@ -541,8 +578,10 @@ class BlockingConnection(object): # pylint: disable=R0902
instead of relying on back pressure throttling. The callback
will be passed the `Connection.Blocked` method frame.
+ See also `ConnectionParameters.blocked_connection_timeout`.
+
:param method callback_method: Callback to call on `Connection.Blocked`,
- having the signature callback_method(pika.frame.Method), where the
+ having the signature `callback_method(pika.frame.Method)`, where the
method frame's `method` member is of type
`pika.spec.Connection.Blocked`
@@ -559,7 +598,7 @@ class BlockingConnection(object): # pylint: disable=R0902
:param method callback_method: Callback to call on
`Connection.Unblocked`, having the signature
- callback_method(pika.frame.Method), where the method frame's
+ `callback_method(pika.frame.Method)`, where the method frame's
`method` member is of type `pika.spec.Connection.Unblocked`
"""
@@ -640,7 +679,12 @@ class BlockingConnection(object): # pylint: disable=R0902
for impl_channel in pika.compat.dictvalues(self._impl._channels):
channel = impl_channel._get_cookie()
if channel.is_open:
- channel.close(reply_code, reply_text)
+ try:
+ channel.close(reply_code, reply_text)
+ except exceptions.ChannelClosed as exc:
+ # Log and suppress broker-closed channel
+ LOGGER.warning('Got ChannelClosed while closing channel '
+ 'from connection.close: %r', exc)
# Close the connection
self._impl.close(reply_code, reply_text)
@@ -844,8 +888,8 @@ class _ConsumerCancellationEvt(_ChannelPendingEvt): # pylint: disable=R0903
self.method_frame = method_frame
def __repr__(self):
- return '%s(method_frame=%r)' % (self.__class__.__name__,
- self.method_frame)
+ return '<%s method_frame=%r>' % (self.__class__.__name__,
+ self.method_frame)
@property
def method(self):
@@ -879,10 +923,10 @@ class _ReturnedMessageEvt(_ChannelPendingEvt): # pylint: disable=R0903
self.body = body
def __repr__(self):
- return ('%s(callback=%r, channel=%r, method=%r, properties=%r, '
- 'body=%.300r') % (self.__class__.__name__, self.callback,
- self.channel, self.method, self.properties,
- self.body)
+ return ('<%s callback=%r channel=%r method=%r properties=%r '
+ 'body=%.300r>') % (self.__class__.__name__, self.callback,
+ self.channel, self.method, self.properties,
+ self.body)
def dispatch(self):
"""Dispatch user's callback"""
@@ -989,7 +1033,7 @@ class _QueueConsumerGeneratorInfo(object): # pylint: disable=R0903
self.pending_events = deque()
def __repr__(self):
- return '%s(params=%r, consumer_tag=%r)' % (
+ return '<%s params=%r consumer_tag=%r>' % (
self.__class__.__name__, self.params, self.consumer_tag)
@@ -1118,20 +1162,26 @@ class BlockingChannel(object): # pylint: disable=R0904,R0902
LOGGER.info("Created channel=%s", self.channel_number)
- def _cleanup(self):
- """Clean up members that might inhibit garbage collection"""
- self._message_confirmation_result.reset()
- self._pending_events = deque()
- self._consumer_infos = dict()
-
def __int__(self):
"""Return the channel object as its channel number
+ NOTE: inherited from legacy BlockingConnection; might be error-prone;
+ use `channel_number` property instead.
+
:rtype: int
"""
return self.channel_number
+ def __repr__(self):
+ return '<%s impl=%r>' % (self.__class__.__name__, self._impl)
+
+ def _cleanup(self):
+ """Clean up members that might inhibit garbage collection"""
+ self._message_confirmation_result.reset()
+ self._pending_events = deque()
+ self._consumer_infos = dict()
+
@property
def channel_number(self):
"""Channel number"""
diff --git a/pika/adapters/select_connection.py b/pika/adapters/select_connection.py
index 875c48a..ca7b53d 100644
--- a/pika/adapters/select_connection.py
+++ b/pika/adapters/select_connection.py
@@ -167,7 +167,7 @@ class IOLoop(object):
return self._poller.add_timeout(deadline, callback_method)
def remove_timeout(self, timeout_id):
- """[API] Remove a timeout if it's still in the timeout stack
+ """[API] Remove a timeout
:param str timeout_id: The timeout id to remove
diff --git a/pika/adapters/twisted_connection.py b/pika/adapters/twisted_connection.py
index 62e595c..e2c7625 100644
--- a/pika/adapters/twisted_connection.py
+++ b/pika/adapters/twisted_connection.py
@@ -16,6 +16,7 @@ import functools
from twisted.internet import defer, error, reactor
from twisted.python import log
+from pika import connection
from pika import exceptions
from pika.adapters import base_connection
@@ -338,7 +339,8 @@ class TwistedConnection(base_connection.BaseConnection):
if not reason.check(error.ConnectionDone):
log.err(reason)
- self._on_terminate(-1, str(reason))
+ self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR,
+ str(reason))
def doRead(self):
self._handle_read()
diff --git a/pika/channel.py b/pika/channel.py
index b7afe54..2165479 100644
--- a/pika/channel.py
+++ b/pika/channel.py
@@ -75,6 +75,11 @@ class Channel(object):
"""
return self.channel_number
+ def __repr__(self):
+ return '<%s number=%s conn=%r>' % (self.__class__.__name__,
+ self.channel_number,
+ self.connection)
+
def add_callback(self, callback, replies, one_shot=True):
"""Pass in a callback handler and a list replies from the
RabbitMQ broker which you'd like the callback notified of. Callbacks
@@ -943,9 +948,10 @@ class Channel(object):
"""
LOGGER.info('%s', method_frame)
- LOGGER.warning('Received remote Channel.Close (%s): %s',
+ LOGGER.warning('Received remote Channel.Close (%s): %r on channel %s',
method_frame.method.reply_code,
- method_frame.method.reply_text)
+ method_frame.method.reply_text,
+ self)
if self.connection.is_open:
self._send_method(spec.Channel.CloseOk())
self._set_state(self.CLOSED)
diff --git a/pika/connection.py b/pika/connection.py
index 51b0b44..1bbf8e7 100644
--- a/pika/connection.py
+++ b/pika/connection.py
@@ -35,6 +35,16 @@ PRODUCT = "Pika Python Client Library"
LOGGER = logging.getLogger(__name__)
+class InternalCloseReasons(object):
+ """Internal reason codes passed to the user's on_close_callback.
+
+ AMQP 0.9.1 specification sites IETF RFC 821 for reply codes. To avoid
+ conflict, the `InternalCloseReasons` namespace uses negative integers.
+ """
+ SOCKET_ERROR = -1
+ BLOCKED_CONNECTION_TIMEOUT = -2
+
+
class Parameters(object):
"""Base connection parameters class definition
@@ -54,6 +64,7 @@ class Parameters(object):
:param dict DEFAULT_SSL_OPTIONS: {}
:param int DEFAULT_SSL_PORT: 5671
:param bool DEFAULT_BACKPRESSURE_DETECTION: False
+ :param number DEFAULT_BLOCKED_CONNECTION_TIMEOUT: None
"""
DEFAULT_BACKPRESSURE_DETECTION = False
@@ -72,6 +83,7 @@ class Parameters(object):
DEFAULT_SSL_PORT = 5671
DEFAULT_USERNAME = 'guest'
DEFAULT_VIRTUAL_HOST = '/'
+ DEFAULT_BLOCKED_CONNECTION_TIMEOUT = None
def __init__(self):
self.virtual_host = self.DEFAULT_VIRTUAL_HOST
@@ -90,6 +102,13 @@ class Parameters(object):
self.ssl_options = self.DEFAULT_SSL_OPTIONS
self.socket_timeout = self.DEFAULT_SOCKET_TIMEOUT
+ # If not None, blocked_connection_timeout is the timeout, in seconds,
+ # for the connection to remain blocked; if the timeout expires, the
+ # connection will be torn down, triggering the connection's
+ # on_close_callback
+ self.blocked_connection_timeout = (
+ self.DEFAULT_BLOCKED_CONNECTION_TIMEOUT)
+
def __repr__(self):
"""Represent the info about the instance.
@@ -276,6 +295,23 @@ class Parameters(object):
raise ValueError('socket_timeout must be > 0')
return True
+ @staticmethod
+ def _validate_blocked_connection_timeout(blocked_connection_timeout):
+ """Validate that the blocked_connection_timeout value is None or a
+ number
+
+ :param real blocked_connection_timeout: The value to validate
+ :rtype: bool
+ :raises: TypeError
+
+ """
+ if blocked_connection_timeout is not None:
+ if not isinstance(blocked_connection_timeout, (int, float)):
+ raise TypeError('blocked_connection_timeout must be a Real number')
+ if blocked_connection_timeout < 0:
+ raise ValueError('blocked_connection_timeout must be >= 0')
+ return True
+
@staticmethod
def _validate_ssl(ssl):
"""Validate the SSL toggle is a bool
@@ -320,21 +356,6 @@ class ConnectionParameters(Parameters):
"""Connection parameters object that is passed into the connection adapter
upon construction.
- :param str host: Hostname or IP Address to connect to
- :param int port: TCP port to connect to
- :param str virtual_host: RabbitMQ virtual host to use
- :param pika.credentials.Credentials credentials: auth credentials
- :param int channel_max: Maximum number of channels to allow
- :param int frame_max: The maximum byte size for an AMQP frame
- :param int heartbeat_interval: How often to send heartbeats
- :param bool ssl: Enable SSL
- :param dict ssl_options: Arguments passed to ssl.wrap_socket as
- :param int connection_attempts: Maximum number of retry attempts
- :param int|float retry_delay: Time to wait in seconds, before the next
- :param int|float socket_timeout: Use for high latency networks
- :param str locale: Set the locale value
- :param bool backpressure_detection: Toggle backpressure detection
-
"""
def __init__(self,
@@ -351,7 +372,8 @@ class ConnectionParameters(Parameters):
retry_delay=None,
socket_timeout=None,
locale=None,
- backpressure_detection=None):
+ backpressure_detection=None,
+ blocked_connection_timeout=None):
"""Create a new ConnectionParameters instance.
:param str host: Hostname or IP Address to connect to
@@ -371,6 +393,15 @@ class ConnectionParameters(Parameters):
:param int|float socket_timeout: Use for high latency networks
:param str locale: Set the locale value
:param bool backpressure_detection: Toggle backpressure detection
+ :param blocked_connection_timeout: If not None,
+ the value is a non-negative timeout, in seconds, for the
+ connection to remain blocked (triggered by Connection.Blocked from
+ broker); if the timeout expires before connection becomes unblocked,
+ the connection will be torn down, triggering the adapter-specific
+ mechanism for informing client app about the closed connection (
+ e.g., on_close_callback or ConnectionClosed exception) with
+ `reason_code` of `InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT`.
+ :type blocked_connection_timeout: None, int, float
"""
super(ConnectionParameters, self).__init__()
@@ -413,6 +444,9 @@ class ConnectionParameters(Parameters):
if (backpressure_detection is not None and
self._validate_backpressure(backpressure_detection)):
self.backpressure_detection = backpressure_detection
+ if self._validate_blocked_connection_timeout(
+ blocked_connection_timeout):
+ self.blocked_connection_timeout = blocked_connection_timeout
class URLParameters(Parameters):
@@ -447,6 +481,11 @@ class URLParameters(Parameters):
connection failure.
- socket_timeout:
Override low level socket timeout value
+ - blocked_connection_timeout:
+ Set the timeout, in seconds, that the connection may remain blocked
+ (triggered by Connection.Blocked from broker); if the timeout
+ expires before connection becomes unblocked, the connection will be
+ torn down, triggering the connection's on_close_callback
:param str url: The AMQP URL to connect to
@@ -540,8 +579,8 @@ class URLParameters(Parameters):
self.heartbeat = values['heartbeat']
elif ('heartbeat_interval' in values and
- self._validate_heartbeat_interval(
- values['heartbeat_interval'])):
+ self._validate_heartbeat_interval(
+ values['heartbeat_interval'])):
warnings.warn('heartbeat_interval is deprecated, use heartbeat',
DeprecationWarning, stacklevel=2)
self.heartbeat = values['heartbeat_interval']
@@ -557,6 +596,12 @@ class URLParameters(Parameters):
self._validate_socket_timeout(values['socket_timeout'])):
self.socket_timeout = values['socket_timeout']
+ if ('blocked_connection_timeout' in values and
+ self._validate_blocked_connection_timeout(
+ values['blocked_connection_timeout'])):
+ self.blocked_connection_timeout = values[
+ 'blocked_connection_timeout']
+
if 'ssl_options' in values:
options = ast.literal_eval(values['ssl_options'])
if self._validate_ssl_options(options):
@@ -606,21 +651,35 @@ class Connection(object):
:param method on_open_error_callback: Called if the connection can't
be established: on_open_error_callback(connection, str|exception)
:param method on_close_callback: Called when the connection is closed:
- on_close_callback(connection, reason_code, reason_text)
+ `on_close_callback(connection, reason_code, reason_text)`, where
+ `reason_code` is either an IETF RFC 821 reply code for AMQP-level
+ closures or a value from `pika.connection.InternalCloseReasons` for
+ internal causes, such as socket errors.
"""
self._write_lock = threading.Lock()
+ self.connection_state = self.CONNECTION_CLOSED
+
+ # Used to hold timer if configured for Connection.Blocked timeout
+ self._blocked_conn_timer = None
+
+ self.heartbeat = None
+
+ # Set our configuration options
+ self.params = parameters or ConnectionParameters()
+
# Define our callback dictionary
self.callbacks = callback.CallbackManager()
+ # Initialize the connection state and connect
+ self._init_connection_state()
+
# Add the on connection error callback
self.callbacks.add(0, self.ON_CONNECTION_ERROR,
on_open_error_callback or self._on_connection_error,
False)
- self.heartbeat = None
-
# On connection callback
if on_open_callback:
self.add_on_open_callback(on_open_callback)
@@ -628,12 +687,6 @@ class Connection(object):
# On connection callback
if on_close_callback:
self.add_on_close_callback(on_close_callback)
-
- # Set our configuration options
- self.params = parameters or ConnectionParameters()
-
- # Initialize the connection state and connect
- self._init_connection_state()
self.connect()
def add_backpressure_callback(self, callback_method):
@@ -663,7 +716,12 @@ class Connection(object):
instead of relying on back pressure throttling. The callback
will be passed the ``Connection.Blocked`` method frame.
- :param method callback_method: Callback to call on `Connection.Blocked`
+ See also `ConnectionParameters.blocked_connection_timeout`.
+
+ :param method callback_method: Callback to call on `Connection.Blocked`,
+ having the signature `callback_method(pika.frame.Method)`, where the
+ method frame's `method` member is of type
+ `pika.spec.Connection.Blocked`
"""
self.callbacks.add(0, spec.Connection.Blocked, callback_method, False)
@@ -675,7 +733,9 @@ class Connection(object):
``Connection.Unblocked`` method frame.
:param method callback_method: Callback to call on
- `Connection.Unblocked`
+ `Connection.Unblocked`, having the signature
+ `callback_method(pika.frame.Method)`, where the method frame's
+ `method` member is of type `pika.spec.Connection.Unblocked`
"""
self.callbacks.add(0, spec.Connection.Unblocked, callback_method, False)
@@ -782,12 +842,10 @@ class Connection(object):
self.remaining_connection_attempts = self.params.connection_attempts
self._set_connection_state(self.CONNECTION_CLOSED)
- def remove_timeout(self, callback_method):
- """Adapters should override to call the callback after the
- specified number of seconds have elapsed, using a timer, or a
- thread, or similar.
+ def remove_timeout(self, timeout_id):
+ """Adapters should override: Remove a timeout
- :param method callback_method: The callback to remove a timeout for
+ :param str timeout_id: The timeout id to remove
"""
raise NotImplementedError
@@ -941,7 +999,7 @@ class Connection(object):
"""
if (value.method.version_major,
- value.method.version_minor) != spec.PROTOCOL_VERSION[0:2]:
+ value.method.version_minor) != spec.PROTOCOL_VERSION[0:2]:
raise exceptions.ProtocolVersionMismatch(frame.ProtocolHeader(),
value)
@@ -1171,6 +1229,18 @@ class Connection(object):
# simply closed the TCP/IP stream.
self.callbacks.add(0, spec.Connection.Close, self._on_connection_close)
+ if self.params.blocked_connection_timeout is not None:
+ if self._blocked_conn_timer is not None:
+ # Blocked connection timer was active when teardown was
+ # initiated
+ self.remove_timeout(self._blocked_conn_timer)
+ self._blocked_conn_timer = None
+
+ self.add_on_connection_blocked_callback(
+ self._on_connection_blocked)
+ self.add_on_connection_unblocked_callback(
+ self._on_connection_unblocked)
+
def _is_basic_deliver_frame(self, frame_value):
"""Returns true if the frame is a Basic.Deliver
@@ -1250,6 +1320,51 @@ class Connection(object):
# Start the communication with the RabbitMQ Broker
self._send_frame(frame.ProtocolHeader())
+ def _on_blocked_connection_timeout(self):
+ """ Called when the "connection blocked timeout" expires. When this
+ happens, we tear down the connection
+
+ """
+ self._blocked_conn_timer = None
+ self._on_terminate(InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT,
+ 'Blocked connection timeout expired')
+
+ def _on_connection_blocked(self, method_frame):
+ """Handle Connection.Blocked notification from RabbitMQ broker
+
+ :param pika.frame.Method method_frame: method frame having `method`
+ member of type `pika.spec.Connection.Blocked`
+ """
+ LOGGER.warning('Received %s from broker', method_frame)
+
+ if self._blocked_conn_timer is not None:
+ # RabbitMQ is not supposed to repeat Connection.Blocked, but it
+ # doesn't hurt to be careful
+ LOGGER.warning('_blocked_conn_timer %s already set when '
+ '_on_connection_blocked is called',
+ self._blocked_conn_timer)
+ else:
+ self._blocked_conn_timer = self.add_timeout(
+ self.params.blocked_connection_timeout,
+ self._on_blocked_connection_timeout)
+
+ def _on_connection_unblocked(self, method_frame):
+ """Handle Connection.Unblocked notification from RabbitMQ broker
+
+ :param pika.frame.Method method_frame: method frame having `method`
+ member of type `pika.spec.Connection.Blocked`
+ """
+ LOGGER.info('Received %s from broker', method_frame)
+
+ if self._blocked_conn_timer is None:
+ # RabbitMQ is supposed to pair Connection.Blocked/Unblocked, but it
+ # doesn't hurt to be careful
+ LOGGER.warning('_blocked_conn_timer was not active when '
+ '_on_connection_unblocked called')
+ else:
+ self.remove_timeout(self._blocked_conn_timer)
+ self._blocked_conn_timer = None
+
def _on_connection_close(self, method_frame):
"""Called when the connection is closed remotely via Connection.Close
frame from broker.
@@ -1412,8 +1527,9 @@ class Connection(object):
"""Terminate the connection and notify registered ON_CONNECTION_ERROR
and/or ON_CONNECTION_CLOSED callbacks
- :param integer reason_code: HTTP error code for AMQP-reported closures
- or -1 for other errors (such as socket errors)
+ :param integer reason_code: either IETF RFC 821 reply code for
+ AMQP-level closures or a value from `InternalCloseReasons` for
+ internal causes, such as socket errors
:param str reason_text: human-readable text message describing the error
"""
LOGGER.warning(
@@ -1436,6 +1552,10 @@ class Connection(object):
self._remove_callbacks(0, [spec.Connection.Close, spec.Connection.Start,
spec.Connection.Open])
+ if self.params.blocked_connection_timeout is not None:
+ self._remove_callbacks(0, [spec.Connection.Blocked,
+ spec.Connection.Unblocked])
+
# Close the socket
self._adapter_disconnect()
@@ -1501,7 +1621,7 @@ class Connection(object):
"""
if (self._is_method_frame(frame_value) and
- self._has_pending_callbacks(frame_value)):
+ self._has_pending_callbacks(frame_value)):
self.callbacks.process(frame_value.channel_number, # Prefix
frame_value.method, # Key
self, # Caller
| Connection blocked events do not get processed when calling `consume()`
If RabbitMQ is below a resource limit when I connect, the BlockingConnection can end up in a hanging state if I first publish a message and then directly go into `consume()`, even when using the `inactivity_timeout` parameter.
As far as I can see the problem seems to be that when `consume()` sends the `basic_consume` message it waits for the `OK` from Rabbit and does not call the defined `on_blocked_connection_callback` before entering the actual consumption loop. Obviously I could wait for a second after publishing my message and then call `process_data_events()` before entering `consume()`, but that's not a solution.
How to reproduce:
- Make sure you have a RabbitMQ which is already below resource limits. For example by setting `disk_free_limit` to some really high value.
- This callback gets called as long as the `time.sleep(1)` is there and the sleep is longer than it takes for the `blocked_connection` event to arrive. If you remove the sleep, then it just hangs and the callback does not get called because it's waiting for Rabbit's `OK` message before entering the consumption loop:
import time
import pika
connection_parameters = pika.ConnectionParameters(
host='192.168.1.101',
port=5672,
credentials=pika.PlainCredentials('guest', 'guest'),
)
conn = pika.BlockingConnection(connection_parameters)
def callback(method):
print('connection blocked!')
conn.add_on_connection_blocked_callback(callback)
chan = conn.channel()
chan.basic_publish(
exchange='test_exchange',
routing_key='key',
body='test'
)
time.sleep(1)
conn.process_data_events()
for msg in chan.consume(queue='somequeue', inactivity_timeout=1):
print(msg)
A possible solution might be to also make this `_flush_output` handle the `connection_blocked` event: https://github.com/pika/pika/blob/master/pika/adapters/blocking_connection.py#L1522 | pika/pika | diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py
index 9a44208..39446b8 100644
--- a/tests/acceptance/async_adapter_tests.py
+++ b/tests/acceptance/async_adapter_tests.py
@@ -15,6 +15,9 @@ import uuid
from pika import spec
from pika.compat import as_bytes
+import pika.connection
+import pika.frame
+import pika.spec
from async_test_base import (AsyncTestCase, BoundQueueTestCase, AsyncAdapters)
@@ -390,3 +393,68 @@ class TestZ_AccessDenied(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103
def on_open(self, connection):
super(TestZ_AccessDenied, self).on_open(connection)
self.stop()
+
+
+class TestBlockedConnectionTimesOut(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103
+ DESCRIPTION = "Verify that blocked connection terminates on timeout"
+
+ def start(self, *args, **kwargs):
+ self.parameters.blocked_connection_timeout = 0.001
+ self.on_closed_pair = None
+ super(TestBlockedConnectionTimesOut, self).start(*args, **kwargs)
+ self.assertEqual(
+ self.on_closed_pair,
+ (pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT,
+ 'Blocked connection timeout expired'))
+
+ def begin(self, channel):
+
+ # Simulate Connection.Blocked
+ channel.connection._on_connection_blocked(pika.frame.Method(
+ 0,
+ pika.spec.Connection.Blocked('Testing blocked connection timeout')))
+
+ def on_closed(self, connection, reply_code, reply_text):
+ """called when the connection has finished closing"""
+ self.on_closed_pair = (reply_code, reply_text)
+ super(TestBlockedConnectionTimesOut, self).on_closed(connection,
+ reply_code,
+ reply_text)
+
+
+class TestBlockedConnectionUnblocks(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103
+ DESCRIPTION = "Verify that blocked-unblocked connection closes normally"
+
+ def start(self, *args, **kwargs):
+ self.parameters.blocked_connection_timeout = 0.001
+ self.on_closed_pair = None
+ super(TestBlockedConnectionUnblocks, self).start(*args, **kwargs)
+ self.assertEqual(
+ self.on_closed_pair,
+ (200, 'Normal shutdown'))
+
+ def begin(self, channel):
+
+ # Simulate Connection.Blocked
+ channel.connection._on_connection_blocked(pika.frame.Method(
+ 0,
+ pika.spec.Connection.Blocked(
+ 'Testing blocked connection unblocks')))
+
+ # Simulate Connection.Unblocked
+ channel.connection._on_connection_unblocked(pika.frame.Method(
+ 0,
+ pika.spec.Connection.Unblocked()))
+
+ # Schedule shutdown after blocked connection timeout would expire
+ channel.connection.add_timeout(0.005, self.on_cleanup_timer)
+
+ def on_cleanup_timer(self):
+ self.stop()
+
+ def on_closed(self, connection, reply_code, reply_text):
+ """called when the connection has finished closing"""
+ self.on_closed_pair = (reply_code, reply_text)
+ super(TestBlockedConnectionUnblocks, self).on_closed(connection,
+ reply_code,
+ reply_text)
diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py
index a6e781a..4fac0f9 100644
--- a/tests/acceptance/blocking_adapter_test.py
+++ b/tests/acceptance/blocking_adapter_test.py
@@ -376,7 +376,7 @@ class TestProcessDataEvents(BlockingTestCaseBase):
self.assertLess(elapsed, 0.25)
-class TestConnectionBlockAndUnblock(BlockingTestCaseBase):
+class TestConnectionRegisterForBlockAndUnblock(BlockingTestCaseBase):
def test(self):
"""BlockingConnection register for Connection.Blocked/Unblocked"""
@@ -407,6 +407,35 @@ class TestConnectionBlockAndUnblock(BlockingTestCaseBase):
self.assertEqual(unblocked_buffer, ["unblocked"])
+class TestBlockedConnectionTimeout(BlockingTestCaseBase):
+
+ def test(self):
+ """BlockingConnection Connection.Blocked timeout """
+ url = DEFAULT_URL + '&blocked_connection_timeout=0.001'
+ conn = self._connect(url=url)
+
+ # NOTE: I haven't figured out yet how to coerce RabbitMQ to emit
+ # Connection.Block and Connection.Unblock from the test, so we'll
+ # simulate it for now
+
+ # Simulate Connection.Blocked
+ conn._impl._on_connection_blocked(pika.frame.Method(
+ 0,
+ pika.spec.Connection.Blocked('TestBlockedConnectionTimeout')))
+
+ # Wait for connection teardown
+ with self.assertRaises(pika.exceptions.ConnectionClosed) as excCtx:
+ while True:
+ conn.process_data_events(time_limit=1)
+
+ self.assertEqual(
+ excCtx.exception.args,
+ (pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT,
+ 'Blocked connection timeout expired'))
+
+
+
+
class TestAddTimeoutRemoveTimeout(BlockingTestCaseBase):
def test(self):
diff --git a/tests/unit/blocking_connection_tests.py b/tests/unit/blocking_connection_tests.py
index 8814bc9..775e490 100644
--- a/tests/unit/blocking_connection_tests.py
+++ b/tests/unit/blocking_connection_tests.py
@@ -18,7 +18,6 @@ Tests for pika.adapters.blocking_connection.BlockingConnection
import socket
-from pika.exceptions import AMQPConnectionError
try:
from unittest import mock # pylint: disable=E0611
@@ -33,6 +32,8 @@ except ImportError:
import pika
from pika.adapters import blocking_connection
+import pika.channel
+from pika.exceptions import AMQPConnectionError, ChannelClosed
class BlockingConnectionMockTemplate(blocking_connection.BlockingConnection):
@@ -198,7 +199,8 @@ class BlockingConnectionTests(unittest.TestCase):
'_process_io_for_connection_setup'):
connection = blocking_connection.BlockingConnection('params')
- connection._impl._channels = {1: mock.Mock()}
+ impl_channel_mock = mock.Mock()
+ connection._impl._channels = {1: impl_channel_mock}
with mock.patch.object(
blocking_connection.BlockingConnection,
@@ -207,6 +209,53 @@ class BlockingConnectionTests(unittest.TestCase):
connection._closed_result.signal_once()
connection.close(200, 'text')
+ impl_channel_mock._get_cookie.return_value.close.assert_called_once_with(
+ 200, 'text')
+ select_connection_class_mock.return_value.close.assert_called_once_with(
+ 200, 'text')
+
+ @patch.object(blocking_connection, 'SelectConnection',
+ spec_set=SelectConnectionTemplate)
+ def test_close_with_channel_closed_exception(self,
+ select_connection_class_mock):
+ select_connection_class_mock.return_value.is_closed = False
+
+ with mock.patch.object(blocking_connection.BlockingConnection,
+ '_process_io_for_connection_setup'):
+ connection = blocking_connection.BlockingConnection('params')
+
+ channel1_mock = mock.Mock(
+ is_open=True,
+ close=mock.Mock(side_effect=ChannelClosed,
+ spec_set=pika.channel.Channel.close),
+ spec_set=blocking_connection.BlockingChannel)
+
+ channel2_mock = mock.Mock(
+ is_open=True,
+ spec_set=blocking_connection.BlockingChannel)
+
+ connection._impl._channels = {
+ 1: mock.Mock(
+ _get_cookie=mock.Mock(
+ return_value=channel1_mock,
+ spec_set=pika.channel.Channel._get_cookie),
+ spec_set=pika.channel.Channel),
+ 2: mock.Mock(
+ _get_cookie=mock.Mock(
+ return_value=channel2_mock,
+ spec_set=pika.channel.Channel._get_cookie),
+ spec_set=pika.channel.Channel)
+ }
+
+ with mock.patch.object(
+ blocking_connection.BlockingConnection,
+ '_flush_output',
+ spec_set=blocking_connection.BlockingConnection._flush_output):
+ connection._closed_result.signal_once()
+ connection.close(200, 'text')
+
+ channel1_mock.close.assert_called_once_with(200, 'text')
+ channel2_mock.close.assert_called_once_with(200, 'text')
select_connection_class_mock.return_value.close.assert_called_once_with(
200, 'text')
diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py
index 93294a3..4a31e89 100644
--- a/tests/unit/channel_tests.py
+++ b/tests/unit/channel_tests.py
@@ -1287,6 +1287,8 @@ class ChannelTests(unittest.TestCase):
method_frame = frame.Method(self.obj.channel_number,
spec.Channel.Close(999, 'Test_Value'))
self.obj._on_close(method_frame)
- warning.assert_called_with('Received remote Channel.Close (%s): %s',
- method_frame.method.reply_code,
- method_frame.method.reply_text)
+ warning.assert_called_with(
+ 'Received remote Channel.Close (%s): %r on channel %s',
+ method_frame.method.reply_code,
+ method_frame.method.reply_text,
+ self.obj)
diff --git a/tests/unit/connection_tests.py b/tests/unit/connection_tests.py
index f7e2a53..821dfa3 100644
--- a/tests/unit/connection_tests.py
+++ b/tests/unit/connection_tests.py
@@ -42,14 +42,15 @@ def callback_method():
class ConnectionTests(unittest.TestCase):
- @mock.patch('pika.connection.Connection.connect')
- def setUp(self, connect):
- self.connection = connection.Connection()
+ def setUp(self):
+ with mock.patch('pika.connection.Connection.connect'):
+ self.connection = connection.Connection()
+ self.connection._set_connection_state(
+ connection.Connection.CONNECTION_OPEN)
+
self.channel = mock.Mock(spec=channel.Channel)
self.channel.is_open = True
self.connection._channels[1] = self.channel
- self.connection._set_connection_state(
- connection.Connection.CONNECTION_OPEN)
def tearDown(self):
del self.connection
@@ -335,7 +336,8 @@ class ConnectionTests(unittest.TestCase):
'ssl': True,
'connection_attempts': 2,
'locale': 'en',
- 'ssl_options': {'ssl': 'options'}
+ 'ssl_options': {'ssl': 'options'},
+ 'blocked_connection_timeout': 10.5
}
conn = connection.ConnectionParameters(**kwargs)
#check values
@@ -356,9 +358,10 @@ class ConnectionTests(unittest.TestCase):
'frame_max': 40000,
'heartbeat_interval': 7,
'backpressure_detection': False,
- 'ssl': True
+ 'ssl': True,
+ 'blocked_connection_timeout': 10.5
}
- #Test Type Errors
+ # Test Type Errors
for bad_field, bad_value in (
('host', 15672), ('port', '5672'), ('virtual_host', True),
('channel_max', '4'), ('frame_max', '5'),
@@ -366,9 +369,13 @@ class ConnectionTests(unittest.TestCase):
('heartbeat_interval', '6'), ('socket_timeout', '42'),
('retry_delay', 'two'), ('backpressure_detection', 'true'),
('ssl', {'ssl': 'dict'}), ('ssl_options', True),
- ('connection_attempts', 'hello')):
+ ('connection_attempts', 'hello'),
+ ('blocked_connection_timeout', set())):
+
bkwargs = copy.deepcopy(kwargs)
+
bkwargs[bad_field] = bad_value
+
self.assertRaises(TypeError, connection.ConnectionParameters,
**bkwargs)
@@ -590,3 +597,205 @@ class ConnectionTests(unittest.TestCase):
self.assertEqual(1, self.connection.frames_received)
if frame_type == frame.Heartbeat:
self.assertTrue(self.connection.heartbeat.received.called)
+
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ @mock.patch.object(connection.Connection,
+ 'add_on_connection_blocked_callback')
+ @mock.patch.object(connection.Connection,
+ 'add_on_connection_unblocked_callback')
+ def test_create_with_blocked_connection_timeout_config(
+ self,
+ add_on_unblocked_callback_mock,
+ add_on_blocked_callback_mock,
+ connect_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ # Check
+ conn.add_on_connection_blocked_callback.assert_called_once_with(
+ conn._on_connection_blocked)
+
+ conn.add_on_connection_unblocked_callback.assert_called_once_with(
+ conn._on_connection_unblocked)
+
+ @mock.patch.object(connection.Connection, 'add_timeout')
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ def test_connection_blocked_sets_timer(
+ self,
+ connect_mock,
+ add_timeout_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ # Check
+ conn.add_timeout.assert_called_once_with(
+ 60,
+ conn._on_blocked_connection_timeout)
+
+ self.assertIsNotNone(conn._blocked_conn_timer)
+
+ @mock.patch.object(connection.Connection, 'add_timeout')
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ def test_multi_connection_blocked_in_a_row_sets_timer_once(
+ self,
+ connect_mock,
+ add_timeout_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ # Simulate Connection.Blocked trigger
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ # Check
+ conn.add_timeout.assert_called_once_with(
+ 60,
+ conn._on_blocked_connection_timeout)
+
+ self.assertIsNotNone(conn._blocked_conn_timer)
+
+ timer = conn._blocked_conn_timer
+
+ # Simulate Connection.Blocked trigger again
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ self.assertEqual(conn.add_timeout.call_count, 1)
+ self.assertIs(conn._blocked_conn_timer, timer)
+
+ @mock.patch.object(connection.Connection, '_on_terminate')
+ @mock.patch.object(connection.Connection, 'add_timeout',
+ spec_set=connection.Connection.add_timeout)
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ def test_blocked_connection_timeout_teminates_connection(
+ self,
+ connect_mock,
+ add_timeout_mock,
+ on_terminate_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ conn._on_blocked_connection_timeout()
+
+ # Check
+ conn._on_terminate.assert_called_once_with(
+ connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT,
+ 'Blocked connection timeout expired')
+
+ self.assertIsNone(conn._blocked_conn_timer)
+
+ @mock.patch.object(connection.Connection, 'remove_timeout')
+ @mock.patch.object(connection.Connection, 'add_timeout',
+ spec_set=connection.Connection.add_timeout)
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ def test_connection_unblocked_removes_timer(
+ self,
+ connect_mock,
+ add_timeout_mock,
+ remove_timeout_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ self.assertIsNotNone(conn._blocked_conn_timer)
+
+ timer = conn._blocked_conn_timer
+
+ conn._on_connection_unblocked(
+ mock.Mock(name='frame.Method(Connection.Unblocked)'))
+
+ # Check
+ conn.remove_timeout.assert_called_once_with(timer)
+ self.assertIsNone(conn._blocked_conn_timer)
+
+ @mock.patch.object(connection.Connection, 'remove_timeout')
+ @mock.patch.object(connection.Connection, 'add_timeout',
+ spec_set=connection.Connection.add_timeout)
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ def test_multi_connection_unblocked_in_a_row_removes_timer_once(
+ self,
+ connect_mock,
+ add_timeout_mock,
+ remove_timeout_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ # Simulate Connection.Blocked
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ self.assertIsNotNone(conn._blocked_conn_timer)
+
+ timer = conn._blocked_conn_timer
+
+ # Simulate Connection.Unblocked
+ conn._on_connection_unblocked(
+ mock.Mock(name='frame.Method(Connection.Unblocked)'))
+
+ # Check
+ conn.remove_timeout.assert_called_once_with(timer)
+ self.assertIsNone(conn._blocked_conn_timer)
+
+ # Simulate Connection.Unblocked again
+ conn._on_connection_unblocked(
+ mock.Mock(name='frame.Method(Connection.Unblocked)'))
+
+ self.assertEqual(conn.remove_timeout.call_count, 1)
+ self.assertIsNone(conn._blocked_conn_timer)
+
+ @mock.patch.object(connection.Connection, 'remove_timeout')
+ @mock.patch.object(connection.Connection, 'add_timeout',
+ spec_set=connection.Connection.add_timeout)
+ @mock.patch.object(connection.Connection, 'connect',
+ spec_set=connection.Connection.connect)
+ @mock.patch.object(connection.Connection, '_adapter_disconnect',
+ spec_set=connection.Connection._adapter_disconnect)
+ def test_on_terminate_removes_timer(
+ self,
+ adapter_disconnect_mock,
+ connect_mock,
+ add_timeout_mock,
+ remove_timeout_mock):
+
+ conn = connection.Connection(
+ parameters=connection.ConnectionParameters(
+ blocked_connection_timeout=60))
+
+ conn._on_connection_blocked(
+ mock.Mock(name='frame.Method(Connection.Blocked)'))
+
+ self.assertIsNotNone(conn._blocked_conn_timer)
+
+ timer = conn._blocked_conn_timer
+
+ conn._on_terminate(0, 'test_on_terminate_removes_timer')
+
+ # Check
+ conn.remove_timeout.assert_called_once_with(timer)
+ self.assertIsNone(conn._blocked_conn_timer)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 9
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"yapf",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libev-dev"
],
"python": "3.5",
"reqs_path": [
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==5.2.0
nose==1.3.7
packaging==21.3
-e git+https://github.com/pika/pika.git@c467ad22fb8f2fd2bc925fa59d3b082fd035302b#egg=pika
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.27.1
tomli==1.2.3
tornado==6.1
Twisted==15.3.0
typing_extensions==4.1.1
urllib3==1.26.20
yapf==0.32.0
zipp==3.6.0
zope.interface==5.5.2
| name: pika
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==5.2.0
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.27.1
- tomli==1.2.3
- tornado==6.1
- twisted==15.3.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- yapf==0.32.0
- zipp==3.6.0
- zope-interface==5.5.2
prefix: /opt/conda/envs/pika
| [
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close_with_channel_closed_exception",
"tests/unit/channel_tests.py::ChannelTests::test_on_close_warning",
"tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_timeout_teminates_connection",
"tests/unit/connection_tests.py::ConnectionTests::test_connection_blocked_sets_timer",
"tests/unit/connection_tests.py::ConnectionTests::test_connection_unblocked_removes_timer",
"tests/unit/connection_tests.py::ConnectionTests::test_create_with_blocked_connection_timeout_config",
"tests/unit/connection_tests.py::ConnectionTests::test_good_connection_parameters",
"tests/unit/connection_tests.py::ConnectionTests::test_multi_connection_blocked_in_a_row_sets_timer_once",
"tests/unit/connection_tests.py::ConnectionTests::test_multi_connection_unblocked_in_a_row_removes_timer_once",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_removes_timer"
]
| [
"tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test",
"tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test",
"tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test",
"tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test",
"tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test",
"tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test",
"tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test",
"tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test",
"tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test",
"tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test",
"tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test",
"tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test",
"tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test",
"tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test",
"tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test",
"tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test",
"tests/acceptance/blocking_adapter_test.py::TestSleep::test",
"tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test",
"tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test",
"tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test",
"tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test",
"tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test",
"tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test",
"tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicGet::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicReject::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicNack::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test",
"tests/acceptance/blocking_adapter_test.py::TestTxCommit::test",
"tests/acceptance/blocking_adapter_test.py::TestTxRollback::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test",
"tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test",
"tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test",
"tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test",
"tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test",
"tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test",
"tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test",
"tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test",
"tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test",
"tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test",
"tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test",
"tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test",
"tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test",
"tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test",
"tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test",
"tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test",
"tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test"
]
| [
"tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test",
"tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_channel",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_attempts_with_timeout",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_constructor",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_error_close",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_no_error_close",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_user_initiated_close",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup_fails_with_open_error",
"tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_sleep",
"tests/unit/channel_tests.py::ChannelTests::test_add_callback",
"tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies",
"tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added",
"tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added",
"tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added",
"tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added",
"tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback",
"tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback",
"tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback",
"tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback",
"tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_callback_appended",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_validate",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_channel_cancelled_appended",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_no_consumer_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_on_cancel_appended",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_raises_value_error",
"tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_validate",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_pending_list_is_empty",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_called",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises",
"tests/unit/channel_tests.py::ChannelTests::test_basic_consume_has_pending_list",
"tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback",
"tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_validate",
"tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called",
"tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request",
"tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request",
"tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag",
"tests/unit/channel_tests.py::ChannelTests::test_basic_send_method_calls_rpc",
"tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called",
"tests/unit/channel_tests.py::ChannelTests::test_cleanup",
"tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called",
"tests/unit/channel_tests.py::ChannelTests::test_close_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_close_state",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_with_nowait_raises_value_error",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_no_basic_ack_callback",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_no_basic_nack_callback",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms",
"tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack",
"tests/unit/channel_tests.py::ChannelTests::test_consumer_tags",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_with_type_arg_assigns_to_exchange_type",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_with_type_arg_raises_deprecation_warning",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback",
"tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_get_pending_message",
"tests/unit/channel_tests.py::ChannelTests::test_get_pending_message_item_popped",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame",
"tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame",
"tests/unit/channel_tests.py::ChannelTests::test_has_content_false",
"tests/unit/channel_tests.py::ChannelTests::test_has_content_true",
"tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning",
"tests/unit/channel_tests.py::ChannelTests::test_init_blocked",
"tests/unit/channel_tests.py::ChannelTests::test_init_blocking",
"tests/unit/channel_tests.py::ChannelTests::test_init_callbacks",
"tests/unit/channel_tests.py::ChannelTests::test_init_cancelled",
"tests/unit/channel_tests.py::ChannelTests::test_init_channel_number",
"tests/unit/channel_tests.py::ChannelTests::test_init_connection",
"tests/unit/channel_tests.py::ChannelTests::test_init_consumers",
"tests/unit/channel_tests.py::ChannelTests::test_init_frame_dispatcher",
"tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback",
"tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number",
"tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback",
"tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback",
"tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback",
"tests/unit/channel_tests.py::ChannelTests::test_init_pending",
"tests/unit/channel_tests.py::ChannelTests::test_init_state",
"tests/unit/channel_tests.py::ChannelTests::test_is_closed_false",
"tests/unit/channel_tests.py::ChannelTests::test_is_closed_true",
"tests/unit/channel_tests.py::ChannelTests::test_is_closing_false",
"tests/unit/channel_tests.py::ChannelTests::test_is_closing_true",
"tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled",
"tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer",
"tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer",
"tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_pending",
"tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok",
"tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called",
"tests/unit/channel_tests.py::ChannelTests::test_on_deliver_pending_callbacks_called",
"tests/unit/channel_tests.py::ChannelTests::test_on_deliver_pending_called",
"tests/unit/channel_tests.py::ChannelTests::test_on_eventok",
"tests/unit/channel_tests.py::ChannelTests::test_on_flow",
"tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback",
"tests/unit/channel_tests.py::ChannelTests::test_on_flowok",
"tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset",
"tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback",
"tests/unit/channel_tests.py::ChannelTests::test_on_getempty",
"tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called",
"tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset",
"tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback",
"tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called",
"tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback",
"tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete",
"tests/unit/channel_tests.py::ChannelTests::test_onreturn",
"tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning",
"tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait",
"tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies",
"tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection",
"tests/unit/channel_tests.py::ChannelTests::test_send_method",
"tests/unit/channel_tests.py::ChannelTests::test_set_state",
"tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request",
"tests/unit/channel_tests.py::ChannelTests::test_validate_channel_and_callback_raises_channel_closed",
"tests/unit/channel_tests.py::ChannelTests::test_validate_channel_and_callback_raises_value_error_not_callable",
"tests/unit/connection_tests.py::ConnectionTests::test_add_callbacks",
"tests/unit/connection_tests.py::ConnectionTests::test_add_on_close_callback",
"tests/unit/connection_tests.py::ConnectionTests::test_add_on_open_error_callback",
"tests/unit/connection_tests.py::ConnectionTests::test_bad_type_connection_parameters",
"tests/unit/connection_tests.py::ConnectionTests::test_channel",
"tests/unit/connection_tests.py::ConnectionTests::test_client_properties",
"tests/unit/connection_tests.py::ConnectionTests::test_close_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_close_closes_open_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_close_ignores_closed_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_connect",
"tests/unit/connection_tests.py::ConnectionTests::test_connect_reconnect",
"tests/unit/connection_tests.py::ConnectionTests::test_new_conn_should_use_first_channel",
"tests/unit/connection_tests.py::ConnectionTests::test_next_channel_number_returns_lowest_unused",
"tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_no_open_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_non_closing_state",
"tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_open_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_on_close_ready_no_open_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_on_close_ready_open_channels",
"tests/unit/connection_tests.py::ConnectionTests::test_on_connection_close_ok",
"tests/unit/connection_tests.py::ConnectionTests::test_on_connection_closed",
"tests/unit/connection_tests.py::ConnectionTests::test_on_connection_start",
"tests/unit/connection_tests.py::ConnectionTests::test_on_connection_tune",
"tests/unit/connection_tests.py::ConnectionTests::test_on_data_available",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_cleans_up",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_access_denied_on_connection_error_and_closed",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_auth_on_connection_error_and_closed",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_connection_closed_callback",
"tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_protocol_on_connection_error_and_closed",
"tests/unit/connection_tests.py::ConnectionTests::test_process_url",
"tests/unit/connection_tests.py::ConnectionTests::test_set_backpressure_multiplier"
]
| []
| BSD 3-Clause "New" or "Revised" License | 416 | [
"docs/examples/heartbeat_and_blocked_timeouts.rst",
"pika/adapters/blocking_connection.py",
"pika/adapters/select_connection.py",
"pika/adapters/twisted_connection.py",
"pika/channel.py",
"pika/connection.py",
"examples/heatbeat_and_blocked_timeouts.py",
"docs/examples/asynchronous_publisher_example.rst",
"pika/adapters/base_connection.py",
"docs/version_history.rst",
"docs/examples/blocking_consume.rst"
]
| [
"docs/examples/heartbeat_and_blocked_timeouts.rst",
"pika/adapters/blocking_connection.py",
"pika/adapters/select_connection.py",
"pika/adapters/twisted_connection.py",
"pika/channel.py",
"pika/connection.py",
"examples/heatbeat_and_blocked_timeouts.py",
"docs/examples/asynchronous_publisher_example.rst",
"pika/adapters/base_connection.py",
"docs/version_history.rst",
"docs/examples/blocking_consume.rst"
]
|
picovico__python-sdk-13 | 5bbb1e00f64eeeb87bc682afe2b535394b1dd682 | 2016-02-08 11:51:34 | 5bbb1e00f64eeeb87bc682afe2b535394b1dd682 | diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index ca7fe0a..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-*.pyc
-__pycache__
-.cache
-.tox
-.eggs
-*.egg-info
diff --git a/picovico/__init__.py b/picovico/__init__.py
index 81dadc4..7e14d3d 100644
--- a/picovico/__init__.py
+++ b/picovico/__init__.py
@@ -1,5 +1,5 @@
from .session import PicovicoSessionMixin
-from .base import PicovicoRequest
+from .baserequest import PicovicoRequest
from .components import PicovicoComponentMixin
from .decorators import pv_auth_required
from . import urls as pv_urls
@@ -12,7 +12,7 @@ class PicovicoAPI(PicovicoSessionMixin, PicovicoComponentMixin):
if self.is_authorized():
self._ready_component_property()
-
+
def login(self, username, password):
""" Picovico: login with username and password """
assert username, 'username is required for login'
| Categorize URL config based on API endpoints.
The URL endpoints should clearly define the workflow. They should be categorized. | picovico/python-sdk | diff --git a/tests/pv_api_test.py b/tests/pv_api_test.py
index 377a51a..e368fe8 100644
--- a/tests/pv_api_test.py
+++ b/tests/pv_api_test.py
@@ -13,7 +13,7 @@ class TestPicovicoAPI:
post_call = method_calls.get('post').copy()
with pytest.raises(pv_exceptions.PicovicoAPINotAllowed):
api.me()
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = auth_response
api.login(*calls[2:])
post_call.update(data=dict(zip(calls, calls)), url=parse.urljoin(pv_urls.PICOVICO_BASE, pv_urls.PICOVICO_LOGIN))
@@ -42,13 +42,13 @@ class TestPicovicoAPI:
assert not api.is_authorized()
def test_login_authenticate(self, auth_response):
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = auth_response
api = PicovicoAPI('app_id', 'device_id')
assert not api.is_authorized()
api.login('username', 'password')
assert api.is_authorized()
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = auth_response
api = PicovicoAPI('app_id', 'device_id')
assert not api.is_authorized()
diff --git a/tests/pv_base_test.py b/tests/pv_base_test.py
index 34c7962..9939c52 100644
--- a/tests/pv_base_test.py
+++ b/tests/pv_base_test.py
@@ -3,7 +3,7 @@ import mock
import six
from six.moves.urllib import parse
-from picovico import base as api
+from picovico import baserequest as api
from picovico import urls
from picovico import exceptions
@@ -28,7 +28,7 @@ class TestPicovicoRequest:
assert args['url'] == parse.urljoin(urls.PICOVICO_BASE, urls.ME)
def test_api_methods(self, success_response):
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = success_response
pv_api = api.PicovicoRequest()
assert pv_api.get(urls.ME) == success_response.json()
@@ -37,7 +37,7 @@ class TestPicovicoRequest:
with pytest.raises(AssertionError) as excinfo:
pv_api.post(urls.ME, data="hello")
assert success_response.json() == pv_api.put(urls.ME)
- with mock.patch('picovico.base.open', mock.mock_open(read_data='bibble')):
+ with mock.patch('picovico.baserequest.open', mock.mock_open(read_data='bibble')):
pv_api.put(urls.ME, filename="fo", data_headers={'MUSIC_NAME': "Hello"}, )
assert 'MUSIC_NAME' in pv_api.headers
assert pv_api.request_args['method'] == 'put'
diff --git a/tests/pv_component_test.py b/tests/pv_component_test.py
index def6fb6..2ea7dc2 100644
--- a/tests/pv_component_test.py
+++ b/tests/pv_component_test.py
@@ -53,7 +53,7 @@ class TestComponent:
def test_library_and_free_component(self, success_response, method_calls, response_messages):
req = pv_base.PicovicoRequest(response_messages.get('valid_auth_header'))
style_component = PicovicoStyle(req)
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = success_response
get_call = method_calls.get('get').copy()
get_call.update(url=parse.urljoin(pv_urls.PICOVICO_BASE, pv_urls.PICOVICO_STYLES))
diff --git a/tests/pv_specific_component_test.py b/tests/pv_specific_component_test.py
index 964fd42..eb60208 100644
--- a/tests/pv_specific_component_test.py
+++ b/tests/pv_specific_component_test.py
@@ -12,7 +12,7 @@ class TestPhotoComponent:
req = PicovicoRequest(auth_header)
ph_comp = PicovicoPhoto(req)
assert ph_comp.component == 'photo'
- with mock.patch('picovico.base.requests.request') as mr:
+ with mock.patch('picovico.baserequest.requests.request') as mr:
mr.return_value = success_response
args = ("something", "something_thumb")
ph_comp.upload_photo_url(*args)
@@ -20,4 +20,4 @@ class TestPhotoComponent:
post_request.update(data=dict(zip(('url', 'thumbnail_url'), args)))
post_request.update(headers=auth_header)
mr.assert_called_with(**post_request)
-
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_removed_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
-e git+https://github.com/picovico/python-sdk.git@5bbb1e00f64eeeb87bc682afe2b535394b1dd682#egg=picovico
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/python-sdk
| [
"tests/pv_api_test.py::TestPicovicoAPI::test_auth_decoration",
"tests/pv_api_test.py::TestPicovicoAPI::test_api_proxy",
"tests/pv_api_test.py::TestPicovicoAPI::test_login_authenticate",
"tests/pv_base_test.py::TestPicovicoRequest::test_properties",
"tests/pv_base_test.py::TestPicovicoRequest::test_request_args",
"tests/pv_base_test.py::TestPicovicoRequest::test_api_methods",
"tests/pv_base_test.py::TestPicovicoRequest::test_authentication_header",
"tests/pv_component_test.py::TestComponentMixin::test_component_property",
"tests/pv_component_test.py::TestComponent::test_component_methods",
"tests/pv_component_test.py::TestComponent::test_photo_component",
"tests/pv_component_test.py::TestComponent::test_style_component",
"tests/pv_component_test.py::TestComponent::test_library_and_free_component",
"tests/pv_specific_component_test.py::TestPhotoComponent::test_upload_url"
]
| []
| []
| []
| null | 417 | [
".gitignore",
"picovico/__init__.py"
]
| [
".gitignore",
"picovico/__init__.py"
]
|
|
SiftScience__sift-python-34 | a9993ecae67cc4804fd572cd3ef95ec9535758a2 | 2016-02-08 22:37:17 | a9993ecae67cc4804fd572cd3ef95ec9535758a2 | diff --git a/README.rst b/README.rst
index 30404c0..4d3f726 100644
--- a/README.rst
+++ b/README.rst
@@ -68,9 +68,9 @@ Here's an example:
"$payment_gateway" : "$braintree",
"$card_bin" : "542486",
"$card_last4" : "4444"
- },
- "$currency_code" : "USD",
- "$amount" : 15230000,
+ },
+ "$currency_code" : "USD",
+ "$amount" : 15230000,
}
response = client.track(event, properties)
diff --git a/setup.py b/setup.py
index dd04072..e60cbaf 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ except:
setup(
name='Sift',
description='Python bindings for Sift Science\'s API',
- version='1.1.2.2', # NB: must be kept in sync with sift/version.py
+ version='1.1.2.3', # NB: must be kept in sync with sift/version.py
url='https://siftscience.com',
author='Sift Science',
diff --git a/sift/client.py b/sift/client.py
index 68e9311..72fbe38 100644
--- a/sift/client.py
+++ b/sift/client.py
@@ -105,8 +105,7 @@ class Client(object):
return Response(response)
except requests.exceptions.RequestException as e:
warnings.warn('Failed to track event: %s' % properties)
- warnings.warn(traceback.format_exception_only(type(e), e))
-
+ warnings.warn(traceback.format_exc())
return e
def score(self, user_id, timeout = None):
@@ -137,8 +136,7 @@ class Client(object):
return Response(response)
except requests.exceptions.RequestException as e:
warnings.warn('Failed to get score for user %s' % user_id)
- warnings.warn(traceback.format_exception_only(type(e), e))
-
+ warnings.warn(traceback.format_exc())
return e
def label(self, user_id, properties, timeout = None):
@@ -192,8 +190,7 @@ class Client(object):
except requests.exceptions.RequestException as e:
warnings.warn('Failed to unlabel user %s' % user_id)
- warnings.warn(traceback.format_exception_only(type(e), e))
-
+ warnings.warn(traceback.format_exc())
return e
diff --git a/sift/version.py b/sift/version.py
index 3b2ddd3..5a6aa7f 100644
--- a/sift/version.py
+++ b/sift/version.py
@@ -1,3 +1,3 @@
# NB: Be sure to keep in sync w/ setup.py
-VERSION = '1.1.2.2'
+VERSION = '1.1.2.3'
API_VERSION = '203'
| Warnings cause TypeError
I'm seeing some TypeError exceptions, it seems that the new warnings / exception handling code has some issues?
```
...
File "tasks.py", line 296, in track_sift_event
response = sift_client.track(event_type, attrs)
File "/home/ubuntu/closeio/venv/local/lib/python2.7/site-packages/sift/client.py", line 108, in track
warnings.warn(traceback.format_exception_only(type(e), e))
TypeError: expected string or buffer
```
Not sure exactly how/when/why this happens but it seems that the Sift responses aren't always handled properly.
/cc @JohnMcSpedon @fredsadaghiani | SiftScience/sift-python | diff --git a/tests/client_test.py b/tests/client_test.py
index e2d57c0..1c0b43e 100644
--- a/tests/client_test.py
+++ b/tests/client_test.py
@@ -1,4 +1,5 @@
import datetime
+import warnings
import json
import mock
import sift
@@ -303,6 +304,41 @@ class TestSiftPythonClient(unittest.TestCase):
assert(response.is_ok())
assert(response.api_error_message == "OK")
assert(response.body['score'] == 0.55)
+
+ def test_exception_during_track_call(self):
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always")
+ with mock.patch('requests.post') as mock_post:
+ mock_post.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed"))
+ response = self.sift_client.track('$transaction', valid_transaction_properties())
+ assert(len(w) == 2)
+ assert('Failed to track event:' in str(w[0].message))
+ assert('RequestException: Failed' in str(w[1].message))
+ assert('Traceback' in str(w[1].message))
+
+ def test_exception_during_score_call(self):
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always")
+ with mock.patch('requests.get') as mock_get:
+ mock_get.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed"))
+ response = self.sift_client.score('Fred')
+ assert(len(w) == 2)
+ assert('Failed to get score for user Fred' in str(w[0].message))
+ assert('RequestException: Failed' in str(w[1].message))
+ assert('Traceback' in str(w[1].message))
+
+ def test_exception_during_unlabel_call(self):
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always")
+ with mock.patch('requests.delete') as mock_delete:
+ mock_delete.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed"))
+ response = self.sift_client.unlabel('Fred')
+
+ assert(len(w) == 2)
+ assert('Failed to unlabel user Fred' in str(w[0].message))
+ assert('RequestException: Failed' in str(w[1].message))
+ assert('Traceback' in str(w[1].message))
+
def main():
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 4
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
-e git+https://github.com/SiftScience/sift-python.git@a9993ecae67cc4804fd572cd3ef95ec9535758a2#egg=Sift
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
| name: sift-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- idna==3.10
- mock==5.2.0
- requests==2.32.3
- urllib3==2.3.0
prefix: /opt/conda/envs/sift-python
| [
"tests/client_test.py::TestSiftPythonClient::test_exception_during_score_call",
"tests/client_test.py::TestSiftPythonClient::test_exception_during_track_call",
"tests/client_test.py::TestSiftPythonClient::test_exception_during_unlabel_call"
]
| []
| [
"tests/client_test.py::TestSiftPythonClient::test_constructor_api_key",
"tests/client_test.py::TestSiftPythonClient::test_constructor_invalid_api_url",
"tests/client_test.py::TestSiftPythonClient::test_constructor_requires_valid_api_key",
"tests/client_test.py::TestSiftPythonClient::test_event_ok",
"tests/client_test.py::TestSiftPythonClient::test_event_with_timeout_param_ok",
"tests/client_test.py::TestSiftPythonClient::test_global_api_key",
"tests/client_test.py::TestSiftPythonClient::test_label_user__with_special_chars_ok",
"tests/client_test.py::TestSiftPythonClient::test_label_user_ok",
"tests/client_test.py::TestSiftPythonClient::test_label_user_with_timeout_param_ok",
"tests/client_test.py::TestSiftPythonClient::test_score__with_special_user_id_chars_ok",
"tests/client_test.py::TestSiftPythonClient::test_score_ok",
"tests/client_test.py::TestSiftPythonClient::test_score_requires_user_id",
"tests/client_test.py::TestSiftPythonClient::test_score_with_timeout_param_ok",
"tests/client_test.py::TestSiftPythonClient::test_sync_score_ok",
"tests/client_test.py::TestSiftPythonClient::test_track_requires_properties",
"tests/client_test.py::TestSiftPythonClient::test_track_requires_valid_event",
"tests/client_test.py::TestSiftPythonClient::test_unicode_string_parameter_support",
"tests/client_test.py::TestSiftPythonClient::test_unlabel_user_ok",
"tests/client_test.py::TestSiftPythonClient::test_unlabel_user_with_special_chars_ok"
]
| []
| MIT License | 418 | [
"README.rst",
"sift/client.py",
"sift/version.py",
"setup.py"
]
| [
"README.rst",
"sift/client.py",
"sift/version.py",
"setup.py"
]
|
|
docker__docker-py-928 | 575305fdba6c57f06d605920e01b5e1d6b952d3e | 2016-02-09 00:47:19 | 4c34be5d4ab8a5a017950712e9c96b56d78d1c58 | diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 4404c217..61e5a8dc 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -128,7 +128,13 @@ def exclude_paths(root, patterns, dockerfile=None):
paths = get_paths(root, exclude_patterns, include_patterns,
has_exceptions=len(exceptions) > 0)
- return set(paths)
+ return set(paths).union(
+ # If the Dockerfile is in a subdirectory that is excluded, get_paths
+ # will not descend into it and the file will be skipped. This ensures
+ # it doesn't happen.
+ set([dockerfile])
+ if os.path.exists(os.path.join(root, dockerfile)) else set()
+ )
def should_include(path, exclude_patterns, include_patterns):
| [1.7] regression in .dockerignore handling
If the `Dockerfile` is being ignored by a path in the `.dockerignore` file, it is incorrectly being removed from the context. There is a special case handling when the file is being excluded directly, but it should also apply when there is a path which includes the `Dockerfile`.
Possibly caused by #863
```
docker-py version: 1.7.0
$ tree -a
.
├── Docker
│ ├── dc.yml
│ └── Dockerfile.debug
└── .dockerignore
1 directory, 3 files
$ cat Docker/Dockerfile.debug
FROM alpine:3.3
RUN echo building
CMD echo run
$ cat Docker/dc.yml
app:
build: ..
dockerfile: Docker/Dockerfile.debug
$ cat .dockerignore
Docker/
docker-compose -f Docker/dc.yml build
ERROR: Cannot locate specified Dockerfile: Docker/Dockerfile.debug
``` | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index df29b9d3..a0a96bbe 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -736,6 +736,7 @@ class ExcludePathsTest(base.BaseTestCase):
'foo/b.py',
'foo/bar/a.py',
'bar/a.py',
+ 'foo/Dockerfile3',
]
all_paths = set(dirs + files)
@@ -775,6 +776,14 @@ class ExcludePathsTest(base.BaseTestCase):
assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \
set(['Dockerfile.alt', '.dockerignore'])
+ assert self.exclude(['*'], dockerfile='foo/Dockerfile3') == \
+ set(['foo/Dockerfile3', '.dockerignore'])
+
+ def test_exclude_dockerfile_child(self):
+ includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
+ assert 'foo/Dockerfile3' in includes
+ assert 'foo/a.py' not in includes
+
def test_single_filename(self):
assert self.exclude(['a.py']) == self.all_paths - set(['a.py'])
@@ -825,28 +834,31 @@ class ExcludePathsTest(base.BaseTestCase):
def test_directory(self):
assert self.exclude(['foo']) == self.all_paths - set([
'foo', 'foo/a.py', 'foo/b.py',
- 'foo/bar', 'foo/bar/a.py',
+ 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
])
def test_directory_with_trailing_slash(self):
assert self.exclude(['foo']) == self.all_paths - set([
'foo', 'foo/a.py', 'foo/b.py',
- 'foo/bar', 'foo/bar/a.py',
+ 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
])
def test_directory_with_single_exception(self):
assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([
- 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar'
+ 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
+ 'foo/Dockerfile3'
])
def test_directory_with_subdir_exception(self):
assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([
- 'foo/a.py', 'foo/b.py', 'foo'
+ 'foo/a.py', 'foo/b.py', 'foo',
+ 'foo/Dockerfile3'
])
def test_directory_with_wildcard_exception(self):
assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([
- 'foo/bar', 'foo/bar/a.py', 'foo'
+ 'foo/bar', 'foo/bar/a.py', 'foo',
+ 'foo/Dockerfile3'
])
def test_subdirectory(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@575305fdba6c57f06d605920e01b5e1d6b952d3e#egg=docker_py
execnet==1.9.0
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child"
]
| [
"tests/unit/utils_test.py::SSLAdapterTest::test_only_uses_tls"
]
| [
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 419 | [
"docker/utils/utils.py"
]
| [
"docker/utils/utils.py"
]
|
|
docker__docker-py-929 | 575305fdba6c57f06d605920e01b5e1d6b952d3e | 2016-02-09 02:18:57 | 4c34be5d4ab8a5a017950712e9c96b56d78d1c58 | dnephin: LGTM | diff --git a/docker/auth/auth.py b/docker/auth/auth.py
index 399dae2b..eedb7944 100644
--- a/docker/auth/auth.py
+++ b/docker/auth/auth.py
@@ -46,7 +46,7 @@ def resolve_repository_name(repo_name):
def resolve_index_name(index_name):
index_name = convert_to_hostname(index_name)
- if index_name == 'index.'+INDEX_NAME:
+ if index_name == 'index.' + INDEX_NAME:
index_name = INDEX_NAME
return index_name
@@ -102,12 +102,14 @@ def encode_header(auth):
return base64.urlsafe_b64encode(auth_json)
-def parse_auth(entries):
+def parse_auth(entries, raise_on_error=False):
"""
Parses authentication entries
Args:
- entries: Dict of authentication entries.
+ entries: Dict of authentication entries.
+ raise_on_error: If set to true, an invalid format will raise
+ InvalidConfigFile
Returns:
Authentication registry.
@@ -115,6 +117,19 @@ def parse_auth(entries):
conf = {}
for registry, entry in six.iteritems(entries):
+ if not (isinstance(entry, dict) and 'auth' in entry):
+ log.debug(
+ 'Config entry for key {0} is not auth config'.format(registry)
+ )
+ # We sometimes fall back to parsing the whole config as if it was
+ # the auth config by itself, for legacy purposes. In that case, we
+ # fail silently and return an empty conf if any of the keys is not
+ # formatted properly.
+ if raise_on_error:
+ raise errors.InvalidConfigFile(
+ 'Invalid configuration for registry {0}'.format(registry)
+ )
+ return {}
username, password = decode_auth(entry['auth'])
log.debug(
'Found entry (registry={0}, username={1})'
@@ -170,7 +185,7 @@ def load_config(config_path=None):
res = {}
if data.get('auths'):
log.debug("Found 'auths' section")
- res.update(parse_auth(data['auths']))
+ res.update(parse_auth(data['auths'], raise_on_error=True))
if data.get('HttpHeaders'):
log.debug("Found 'HttpHeaders' section")
res.update({'HttpHeaders': data['HttpHeaders']})
| Using a docker/config.json file causes "TypeError: string indices must be integers"
Using a ~/.docker/config.json file causes docker-compose to output a Python error. @dnephin in https://github.com/docker/compose/issues/2697#issuecomment-172936366 suggests that this is an issue to be raised with the docker-py project instead of the compose project.
So here's a simple config.json file, and a dummy docker-compose.yml file which demonstrate the issue:
```
$ cat ~/.docker/config.json
{
"detachKeys": "ctrl-q,ctrl-u,ctrl-i,ctrl-t"
}
$ cat docker-compose.yml
version: '2'
services:
s1:
image: ubuntu
$ docker-compose ps
Traceback (most recent call last):
File "<string>", line 3, in <module>
File "/compose/compose/cli/main.py", line 55, in main
File "/compose/compose/cli/docopt_command.py", line 23, in sys_dispatch
File "/compose/compose/cli/docopt_command.py", line 26, in dispatch
File "/compose/compose/cli/main.py", line 172, in perform_command
File "/compose/compose/cli/command.py", line 52, in project_from_options
File "/compose/compose/cli/command.py", line 85, in get_project
File "/compose/compose/cli/command.py", line 66, in get_client
File "/compose/compose/cli/docker_client.py", line 37, in docker_client
File "/compose/venv/lib/python2.7/site-packages/docker/client.py", line 56, in __init__
File "/compose/venv/lib/python2.7/site-packages/docker/auth/auth.py", line 182, in load_config
File "/compose/venv/lib/python2.7/site-packages/docker/auth/auth.py", line 118, in parse_auth
TypeError: string indices must be integers
docker-compose returned -1
```
And if we delete the config and try again:
```
$ rm ~/.docker/config.json
$ docker-compose ps
Name Command State Ports
------------------------------
```
I'm using Docker Toolbox 1.10.0 on OSX 10.11.3. The docker container is being run on a VirtualBox VM managed by docker-machine. Here are some more version numbers:
```
$ docker version
Client:
Version: 1.10.0
API version: 1.22
Go version: go1.5.3
Git commit: 590d5108
Built: Thu Feb 4 18:18:11 2016
OS/Arch: darwin/amd64
Server:
Version: 1.10.0
API version: 1.22
Go version: go1.5.3
Git commit: 590d5108
Built: Thu Feb 4 19:55:25 2016
OS/Arch: linux/amd64
$ docker-compose version
docker-compose version 1.6.0, build d99cad6
docker-py version: 1.7.0
CPython version: 2.7.9
OpenSSL version: OpenSSL 1.0.1j 15 Oct 2014
``` | docker/docker-py | diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index 3fba602c..921aae00 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -433,3 +433,32 @@ class LoadConfigTest(base.Cleanup, base.BaseTestCase):
self.assertEqual(cfg['Name'], 'Spike')
self.assertEqual(cfg['Surname'], 'Spiegel')
+
+ def test_load_config_unknown_keys(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ dockercfg_path = os.path.join(folder, 'config.json')
+ config = {
+ 'detachKeys': 'ctrl-q, ctrl-u, ctrl-i'
+ }
+ with open(dockercfg_path, 'w') as f:
+ json.dump(config, f)
+
+ cfg = auth.load_config(dockercfg_path)
+ assert cfg == {}
+
+ def test_load_config_invalid_auth_dict(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ dockercfg_path = os.path.join(folder, 'config.json')
+ config = {
+ 'auths': {
+ 'scarlet.net': {'sakuya': 'izayoi'}
+ }
+ }
+ with open(dockercfg_path, 'w') as f:
+ json.dump(config, f)
+
+ self.assertRaises(
+ errors.InvalidConfigFile, auth.load_config, dockercfg_path
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
-e git+https://github.com/docker/docker-py.git@575305fdba6c57f06d605920e01b5e1d6b952d3e#egg=docker_py
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
requests==2.5.3
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
websocket_client==0.32.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- requests==2.5.3
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- websocket-client==0.32.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_invalid_auth_dict",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_unknown_keys"
]
| []
| [
"tests/unit/auth_test.py::RegressionTest::test_803_urlsafe_encode",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_hub_index_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_legacy_hub_index_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_invalid_index_name",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_dotted_hub_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost_with_username",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port_and_username",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_port",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_username",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_explicit_none",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_registry",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_fully_explicit",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_hostname_only",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_legacy_config",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_match",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_trailing_slash",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_insecure_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_secure_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_protocol",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_path_wrong_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_hub",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_legacy_hub",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_hub_image",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_library_image",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_private_registry",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_unauthenticated_registry",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_utf8",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_auths",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_headers",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_no_file",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_with_random_name"
]
| []
| Apache License 2.0 | 420 | [
"docker/auth/auth.py"
]
| [
"docker/auth/auth.py"
]
|
adamchainz__pretty-cron-11 | ec7be5a9f853342ded46342eb90fdc0c69c06f68 | 2016-02-09 18:37:29 | ec7be5a9f853342ded46342eb90fdc0c69c06f68 | diff --git a/HISTORY.rst b/HISTORY.rst
index 985360a..7b57b97 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -7,6 +7,8 @@ Pending Release
---------------
* New release notes here
+* Now interprets day 7 as Sunday as well as 0, like Linux crontab parsers -
+ thanks @vetyy.
1.0.0 (2015-07-28)
------------------
diff --git a/pretty_cron/api.py b/pretty_cron/api.py
index 14af101..f7ac1d0 100644
--- a/pretty_cron/api.py
+++ b/pretty_cron/api.py
@@ -76,6 +76,7 @@ _WEEKDAYS = {
4: "Thursday",
5: "Friday",
6: "Saturday",
+ 7: "Sunday",
}
| Missing key in _WEEKDAYS
Hello,
linux crontab allows users to set sunday as 0 or 7 (its same for crontab parser). When I was parsing some of my users cron settings I got key_error exception when they were using this syntax (7 as for sunday), you might consider adding this. (I have currently monkey patched that for time being)
Thanks
| adamchainz/pretty-cron | diff --git a/tests/test_prettify.py b/tests/test_prettify.py
index cec1af2..df46049 100644
--- a/tests/test_prettify.py
+++ b/tests/test_prettify.py
@@ -28,6 +28,9 @@ class PrettyCronTest(unittest.TestCase):
def test_weekly(self):
assert pc("0 0 * * 0") == "At 00:00 every Sunday"
+ def test_day_7_is_sunday(self):
+ assert pc("0 0 * * 7") == "At 00:00 every Sunday"
+
def test_monthly_and_weekly(self):
assert (
pc("0 0 1 * 1") ==
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
cryptography==40.0.2
docutils==0.18.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
jeepney==0.7.1
keyring==23.4.1
packaging==21.3
pkginfo==1.10.0
-e git+https://github.com/adamchainz/pretty-cron.git@ec7be5a9f853342ded46342eb90fdc0c69c06f68#egg=pretty_cron
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pytest==2.8.7
readme-renderer==34.0
requests==2.27.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
six==1.17.0
tqdm==4.64.1
twine==3.8.0
typing_extensions==4.1.1
urllib3==1.26.20
webencodings==0.5.1
zipp==3.6.0
| name: pretty-cron
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- cryptography==40.0.2
- docutils==0.18.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jeepney==0.7.1
- keyring==23.4.1
- packaging==21.3
- pkginfo==1.10.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==2.8.7
- readme-renderer==34.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- six==1.17.0
- tqdm==4.64.1
- twine==3.8.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/pretty-cron
| [
"tests/test_prettify.py::PrettyCronTest::test_day_7_is_sunday"
]
| []
| [
"tests/test_prettify.py::PrettyCronTest::test_continuous",
"tests/test_prettify.py::PrettyCronTest::test_daily",
"tests/test_prettify.py::PrettyCronTest::test_every_day_in_month",
"tests/test_prettify.py::PrettyCronTest::test_every_specific_day_in_month",
"tests/test_prettify.py::PrettyCronTest::test_every_specific_day_in_month_and_weekly",
"tests/test_prettify.py::PrettyCronTest::test_hourly",
"tests/test_prettify.py::PrettyCronTest::test_invalid_unchanged",
"tests/test_prettify.py::PrettyCronTest::test_minutely",
"tests/test_prettify.py::PrettyCronTest::test_monthly_and_weekly",
"tests/test_prettify.py::PrettyCronTest::test_nonsense_unchanged",
"tests/test_prettify.py::PrettyCronTest::test_one_day_in_month",
"tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_11th",
"tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_21st",
"tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_2nd",
"tests/test_prettify.py::PrettyCronTest::test_unsupported",
"tests/test_prettify.py::PrettyCronTest::test_weekly",
"tests/test_prettify.py::PrettyCronTest::test_yearly"
]
| []
| MIT License | 421 | [
"HISTORY.rst",
"pretty_cron/api.py"
]
| [
"HISTORY.rst",
"pretty_cron/api.py"
]
|
|
box__box-python-sdk-114 | 8b6afa5cdecd1b622658542c0f4eb5e003a37258 | 2016-02-10 21:46:38 | 98f1b812cdcf53276a369213f5cb59bfb2d0e545 | diff --git a/HISTORY.rst b/HISTORY.rst
index 5c9f373..2194c18 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -6,6 +6,11 @@ Release History
Upcoming
++++++++
+1.4.1 (2016-02-11)
+++++++++++++++++++
+
+- Files now support getting a direct download url.
+
1.4.0 (2016-01-05)
++++++++++++++++++
diff --git a/boxsdk/object/file.py b/boxsdk/object/file.py
index 96f6079..2b28053 100644
--- a/boxsdk/object/file.py
+++ b/boxsdk/object/file.py
@@ -237,3 +237,54 @@ def metadata(self, scope='global', template='properties'):
:class:`Metadata`
"""
return Metadata(self._session, self, scope, template)
+
+ def get_shared_link_download_url(
+ self,
+ access=None,
+ etag=None,
+ unshared_at=None,
+ allow_preview=None,
+ password=None,
+ ):
+ """
+ Get a shared link download url for the file with the given access permissions.
+ This url is a direct download url for the file.
+
+ :param access:
+ Determines who can access the shared link. May be open, company, or collaborators. If no access is
+ specified, the default access will be used.
+ :type access:
+ `unicode` or None
+ :param etag:
+ If specified, instruct the Box API to create the link only if the current version's etag matches.
+ :type etag:
+ `unicode` or None
+ :param unshared_at:
+ The date on which this link should be disabled. May only be set if the current user is not a free user
+ and has permission to set expiration dates.
+ :type unshared_at:
+ :class:`datetime.date` or None
+ :param allow_preview:
+ Whether or not the item being shared can be previewed when accessed via the shared link.
+ If this parameter is None, the default setting will be used.
+ :type allow_preview:
+ `bool` or None
+ :param password:
+ The password required to view this link. If no password is specified then no password will be set.
+ Please notice that this is a premium feature, which might not be available to your app.
+ :type password:
+ `unicode` or None
+ :returns:
+ The URL of the shared link that allows direct download.
+ :rtype:
+ `unicode`
+ :raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
+ """
+ item = self.create_shared_link(
+ access=access,
+ etag=etag,
+ unshared_at=unshared_at,
+ allow_preview=allow_preview,
+ password=password,
+ )
+ return item.shared_link['download_url']
diff --git a/boxsdk/object/item.py b/boxsdk/object/item.py
index 3883546..8487761 100644
--- a/boxsdk/object/item.py
+++ b/boxsdk/object/item.py
@@ -159,8 +159,17 @@ def move(self, parent_folder):
}
return self.update_info(data)
- def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_download=None, allow_preview=None, password=None):
- """Get a shared link for the item with the given access permissions.
+ def create_shared_link(
+ self,
+ access=None,
+ etag=None,
+ unshared_at=None,
+ allow_download=None,
+ allow_preview=None,
+ password=None,
+ ):
+ """
+ Create a shared link for the item with the given access permissions.
:param access:
Determines who can access the shared link. May be open, company, or collaborators. If no access is
@@ -191,10 +200,11 @@ def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_downlo
Please notice that this is a premium feature, which might not be available to your app.
:type password:
`unicode` or None
- :returns:
- The URL of the shared link.
+ :return:
+ The updated object with s shared link.
+ Returns a new object of the same type, without modifying the original object passed as self.
:rtype:
- `unicode`
+ :class:`Item`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
data = {
@@ -216,7 +226,64 @@ def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_downlo
if password is not None:
data['shared_link']['password'] = password
- item = self.update_info(data, etag=etag)
+ return self.update_info(data, etag=etag)
+
+ def get_shared_link(
+ self,
+ access=None,
+ etag=None,
+ unshared_at=None,
+ allow_download=None,
+ allow_preview=None,
+ password=None,
+ ):
+ """
+ Get a shared link for the item with the given access permissions.
+ This url leads to a Box.com shared link page, where the item can be previewed, downloaded, etc.
+
+ :param access:
+ Determines who can access the shared link. May be open, company, or collaborators. If no access is
+ specified, the default access will be used.
+ :type access:
+ `unicode` or None
+ :param etag:
+ If specified, instruct the Box API to create the link only if the current version's etag matches.
+ :type etag:
+ `unicode` or None
+ :param unshared_at:
+ The date on which this link should be disabled. May only be set if the current user is not a free user
+ and has permission to set expiration dates.
+ :type unshared_at:
+ :class:`datetime.date` or None
+ :param allow_download:
+ Whether or not the item being shared can be downloaded when accessed via the shared link.
+ If this parameter is None, the default setting will be used.
+ :type allow_download:
+ `bool` or None
+ :param allow_preview:
+ Whether or not the item being shared can be previewed when accessed via the shared link.
+ If this parameter is None, the default setting will be used.
+ :type allow_preview:
+ `bool` or None
+ :param password:
+ The password required to view this link. If no password is specified then no password will be set.
+ Please notice that this is a premium feature, which might not be available to your app.
+ :type password:
+ `unicode` or None
+ :returns:
+ The URL of the shared link.
+ :rtype:
+ `unicode`
+ :raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
+ """
+ item = self.create_shared_link(
+ access=access,
+ etag=etag,
+ unshared_at=unshared_at,
+ allow_download=allow_download,
+ allow_preview=allow_preview,
+ password=password,
+ )
return item.shared_link['url']
def remove_shared_link(self, etag=None):
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..c34b498
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[bdist_wheel]
+# This flag says that the code is written to work on both Python 2 and Python
+# 3. If at all possible, it is good practice to do this. If you cannot, you
+# will need to generate wheels for each Python version that you support.
+universal=1
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 23b01bc..b3dc950 100644
--- a/setup.py
+++ b/setup.py
@@ -61,7 +61,7 @@ def main():
install_requires.append('ordereddict>=1.1')
setup(
name='boxsdk',
- version='1.4.0',
+ version='1.4.1',
description='Official Box Python SDK',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Box',
| Add support for getting file shared link download URLs
The [get_shared_link()](https://github.com/box/box-python-sdk/blob/master/boxsdk/object/item.py#L162) method returns:
```python
return item.shared_link['url']
```
However sometimes there may be a use case for getting a **direct link** to a file, which should also be present in the response from the [Box API](https://box-content.readme.io/reference#create-a-shared-link-for-a-file):
```json
{
"type": "file",
"id": "10559150999",
...
"shared_link": {
"url": "https://foo.box.com/s/7mcmdlavtye5o5i0ue8xmtwh2sx5bv8p",
"download_url":"https://foo.box.com/shared/static/7mcmdlavtye5o5i0ue8xmtwh2sx5bv8p.png",
...
}
}
```
Currently the Box Python SDK has a common `get_shared_link` method on the `Item` class, and since `download_url` is not a valid attribute for a shared link on a folder this probably should be added *only* in the context of `File`.
I'm beginning to implement this on my fork with the intent to submit back upstream. What seems like the least terrible approach would be to clone the `get_shared_link` method from the `Item` class into the `File` class, rename it `get_download_link` (or `get_download_url`?), and change the return value, but this seems like a lot of code duplication. Is there a better way?
Any recommendations on approaches or proper patterns to stick to would be most appreciated!
**
[Description of one specific use case for this feature](https://community.box.com/t5/Admin-Forum/quot-We-re-sorry-but-we-can-t-preview-zip-files-quot-is/m-p/11336) | box/box-python-sdk | diff --git a/test/unit/object/conftest.py b/test/unit/object/conftest.py
index 0d625af..65e3863 100644
--- a/test/unit/object/conftest.py
+++ b/test/unit/object/conftest.py
@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import unicode_literals
+from datetime import date
import os
from mock import Mock
import pytest
@@ -162,3 +163,28 @@ def file_size(request):
def mock_group(mock_box_session, mock_group_id):
group = Group(mock_box_session, mock_group_id)
return group
+
+
[email protected](params=(True, False, None))
+def shared_link_can_download(request):
+ return request.param
+
+
[email protected](params=(True, False, None))
+def shared_link_can_preview(request):
+ return request.param
+
+
[email protected](params=('open', None))
+def shared_link_access(request):
+ return request.param
+
+
[email protected](params=('hunter2', None))
+def shared_link_password(request):
+ return request.param
+
+
[email protected](params=(date(2015, 5, 5), None))
+def shared_link_unshared_at(request):
+ return request.param
diff --git a/test/unit/object/test_file.py b/test/unit/object/test_file.py
index ecbd4b2..44a8ec4 100644
--- a/test/unit/object/test_file.py
+++ b/test/unit/object/test_file.py
@@ -234,3 +234,43 @@ def test_preflight_check(
expect_json_response=False,
data=expected_data,
)
+
+
+def test_get_shared_link_download_url(
+ test_file,
+ mock_box_session,
+ shared_link_access,
+ shared_link_unshared_at,
+ shared_link_password,
+ shared_link_can_preview,
+ test_url,
+ etag,
+ if_match_header,
+):
+ # pylint:disable=redefined-outer-name, protected-access
+ expected_url = test_file.get_url()
+ mock_box_session.put.return_value.json.return_value = {'shared_link': {'url': None, 'download_url': test_url}}
+ expected_data = {'shared_link': {}}
+ if shared_link_access is not None:
+ expected_data['shared_link']['access'] = shared_link_access
+ if shared_link_unshared_at is not None:
+ expected_data['shared_link']['unshared_at'] = shared_link_unshared_at.isoformat()
+ if shared_link_can_preview is not None:
+ expected_data['shared_link']['permissions'] = permissions = {}
+ permissions['can_preview'] = shared_link_can_preview
+ if shared_link_password is not None:
+ expected_data['shared_link']['password'] = shared_link_password
+ url = test_file.get_shared_link_download_url(
+ etag=etag,
+ access=shared_link_access,
+ unshared_at=shared_link_unshared_at,
+ password=shared_link_password,
+ allow_preview=shared_link_can_preview,
+ )
+ mock_box_session.put.assert_called_once_with(
+ expected_url,
+ data=json.dumps(expected_data),
+ headers=if_match_header,
+ params=None,
+ )
+ assert url == test_url
diff --git a/test/unit/object/test_item.py b/test/unit/object/test_item.py
index abc080f..6226cef 100644
--- a/test/unit/object/test_item.py
+++ b/test/unit/object/test_item.py
@@ -1,7 +1,6 @@
# coding: utf-8
from __future__ import unicode_literals
-from datetime import date
import json
import pytest
@@ -56,31 +55,6 @@ def test_move_item(test_item_and_response, mock_box_session, test_folder, mock_o
assert isinstance(move_response, test_item.__class__)
[email protected](params=(True, False, None))
-def shared_link_can_download(request):
- return request.param
-
-
[email protected](params=(True, False, None))
-def shared_link_can_preview(request):
- return request.param
-
-
[email protected](params=('open', None))
-def shared_link_access(request):
- return request.param
-
-
[email protected](params=('hunter2', None))
-def shared_link_password(request):
- return request.param
-
-
[email protected](params=(date(2015, 5, 5), None))
-def shared_link_unshared_at(request):
- return request.param
-
-
def test_get_shared_link(
test_item_and_response,
mock_box_session,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"mock",
"sqlalchemy",
"bottle",
"jsonpatch"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | async-timeout==4.0.2
attrs==22.2.0
bottle==0.13.2
-e git+https://github.com/box/box-python-sdk.git@8b6afa5cdecd1b622658542c0f4eb5e003a37258#egg=boxsdk
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
execnet==1.9.0
greenlet==2.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
jsonpatch==1.32
jsonpointer==2.3
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycparser==2.21
PyJWT==2.4.0
pyparsing==3.1.4
pytest==7.0.1
pytest-xdist==3.0.2
redis==4.3.6
requests==2.27.1
requests-toolbelt==1.0.0
six==1.17.0
SQLAlchemy==1.4.54
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: box-python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- async-timeout==4.0.2
- attrs==22.2.0
- bottle==0.13.2
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- execnet==1.9.0
- greenlet==2.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jsonpatch==1.32
- jsonpointer==2.3
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyjwt==2.4.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-xdist==3.0.2
- redis==4.3.6
- requests==2.27.1
- requests-toolbelt==1.0.0
- six==1.17.0
- sqlalchemy==1.4.54
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/box-python-sdk
| [
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-None-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-True-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-True-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-False-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-False-etag]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-None-None]",
"test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-None-etag]"
]
| [
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-0-True]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-0-False]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-100-True]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-100-False]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-0-True]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-0-False]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-100-True]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-100-False]"
]
| [
"test/unit/object/test_file.py::test_delete_file[None]",
"test/unit/object/test_file.py::test_delete_file[etag]",
"test/unit/object/test_file.py::test_download_to",
"test/unit/object/test_file.py::test_get_content",
"test/unit/object/test_file.py::test_update_contents[None-True-True-True]",
"test/unit/object/test_file.py::test_update_contents[None-True-True-False]",
"test/unit/object/test_file.py::test_update_contents[None-True-False-True]",
"test/unit/object/test_file.py::test_update_contents[None-True-False-False]",
"test/unit/object/test_file.py::test_update_contents[None-False-True-True]",
"test/unit/object/test_file.py::test_update_contents[None-False-True-False]",
"test/unit/object/test_file.py::test_update_contents[None-False-False-True]",
"test/unit/object/test_file.py::test_update_contents[None-False-False-False]",
"test/unit/object/test_file.py::test_update_contents[etag-True-True-True]",
"test/unit/object/test_file.py::test_update_contents[etag-True-True-False]",
"test/unit/object/test_file.py::test_update_contents[etag-True-False-True]",
"test/unit/object/test_file.py::test_update_contents[etag-True-False-False]",
"test/unit/object/test_file.py::test_update_contents[etag-False-True-True]",
"test/unit/object/test_file.py::test_update_contents[etag-False-True-False]",
"test/unit/object/test_file.py::test_update_contents[etag-False-False-True]",
"test/unit/object/test_file.py::test_update_contents[etag-False-False-False]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-0-True]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-0-False]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-100-True]",
"test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-100-False]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-0-True]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-0-False]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-100-True]",
"test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-100-False]",
"test/unit/object/test_file.py::test_lock[True]",
"test/unit/object/test_file.py::test_lock[False]",
"test/unit/object/test_file.py::test_unlock",
"test/unit/object/test_file.py::test_preflight_check[100-foo.txt-{\"size\":",
"test/unit/object/test_file.py::test_preflight_check[200-None-{\"size\":",
"test/unit/object/test_item.py::test_update_info[file-None]",
"test/unit/object/test_item.py::test_update_info[file-etag]",
"test/unit/object/test_item.py::test_update_info[folder-None]",
"test/unit/object/test_item.py::test_update_info[folder-etag]",
"test/unit/object/test_item.py::test_rename_item[file]",
"test/unit/object/test_item.py::test_rename_item[folder]",
"test/unit/object/test_item.py::test_copy_item[file]",
"test/unit/object/test_item.py::test_copy_item[folder]",
"test/unit/object/test_item.py::test_move_item[file]",
"test/unit/object/test_item.py::test_move_item[folder]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-None-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-True-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-True-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-False-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-False-etag]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-None-None]",
"test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-None-etag]",
"test/unit/object/test_item.py::test_remove_shared_link[file-None]",
"test/unit/object/test_item.py::test_remove_shared_link[file-etag]",
"test/unit/object/test_item.py::test_remove_shared_link[folder-None]",
"test/unit/object/test_item.py::test_remove_shared_link[folder-etag]",
"test/unit/object/test_item.py::test_get[file-None-None]",
"test/unit/object/test_item.py::test_get[file-None-fields1]",
"test/unit/object/test_item.py::test_get[file-etag-None]",
"test/unit/object/test_item.py::test_get[file-etag-fields1]",
"test/unit/object/test_item.py::test_get[folder-None-None]",
"test/unit/object/test_item.py::test_get[folder-None-fields1]",
"test/unit/object/test_item.py::test_get[folder-etag-None]",
"test/unit/object/test_item.py::test_get[folder-etag-fields1]"
]
| []
| Apache License 2.0 | 423 | [
"boxsdk/object/file.py",
"HISTORY.rst",
"setup.py",
"boxsdk/object/item.py",
"setup.cfg"
]
| [
"boxsdk/object/file.py",
"HISTORY.rst",
"setup.py",
"boxsdk/object/item.py",
"setup.cfg"
]
|
|
marshmallow-code__apispec-50 | 684da986e6c77232cc3e8618a51e271e9cd474f8 | 2016-02-10 21:48:12 | 684da986e6c77232cc3e8618a51e271e9cd474f8 | diff --git a/apispec/core.py b/apispec/core.py
index 029d311..fbb3cc3 100644
--- a/apispec/core.py
+++ b/apispec/core.py
@@ -136,11 +136,21 @@ class APISpec(object):
"""Add a new path object to the spec.
https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#paths-object-
+
+ :param str|Path|None path: URL Path component or Path instance
+ :param dict|None operations: describes the http methods and options for `path`
+ :param dict kwargs: parameters used by any path helpers see :meth:`register_path_helper`
"""
- if path and 'basePath' in self.options:
+ p = path
+ if isinstance(path, Path):
+ p = path.path
+ if p and 'basePath' in self.options:
pattern = '^{0}'.format(re.escape(self.options['basePath']))
- path = re.sub(pattern, '', path)
- path = Path(path=path, operations=operations)
+ p = re.sub(pattern, '', p)
+ if isinstance(path, Path):
+ path.path = p
+ else:
+ path = Path(path=p, operations=operations)
# Execute plugins' helpers
for func in self._path_helpers:
try:
| How should I add an existing Path instance to an APISpec instance
I tried to do something like:
```python
spec = APISpec(**kwargs)
spec.add_path(Path(**kwargs))
```
And I received the following error (as I should have)
```
File "/Users/Josh/Developer/Kaplan/AWS-Lambda-APIGateway-POC/env/lib/python2.7/site-packages/apispec/core.py", line 169, in add_path
self._paths.setdefault(path.path, path).update(path)
TypeError: unhashable type: 'Path'
```
Is there an easy way to add an existing `Path` object or do I need to duplicate the logic of `self._paths.setdefault(path.path, path).update(path)`?
If this functionality seems worthwhile, I can submit a PR to update `APISpec.add_path` to accept `Path` objects. | marshmallow-code/apispec | diff --git a/tests/test_core.py b/tests/test_core.py
index 7efe15b..07dbf47 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -179,6 +179,24 @@ class TestPath:
assert '/pets' in spec._paths
assert '/v1/pets' not in spec._paths
+ def test_add_path_accepts_path(self, spec):
+ route = '/pet/{petId}'
+ route_spec = self.paths[route]
+ path = Path(path=route, operations={'get': route_spec['get']})
+ spec.add_path(path)
+
+ p = spec._paths[path.path]
+ assert path.path == p.path
+ assert 'get' in p
+
+ def test_add_path_strips_path_base_path(self, spec):
+ spec.options['basePath'] = '/v1'
+ path = Path(path='/v1/pets')
+ spec.add_path(path)
+ assert '/pets' in spec._paths
+ assert '/v1/pets' not in spec._paths
+
+
def test_add_parameters(self, spec):
route_spec = self.paths['/pet/{petId}']['get']
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/marshmallow-code/apispec.git@684da986e6c77232cc3e8618a51e271e9cd474f8#egg=apispec
backports.tarfile==1.2.0
blinker==1.9.0
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
cryptography==44.0.2
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
flake8==2.4.1
Flask==3.1.0
id==1.5.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
invoke==2.2.0
itsdangerous==2.2.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
Jinja2==3.1.6
keyring==25.6.0
markdown-it-py==3.0.0
MarkupSafe==3.0.2
marshmallow==3.26.1
mccabe==0.3.1
mdurl==0.1.2
mock==5.2.0
more-itertools==10.6.0
nh3==0.2.21
packaging==24.2
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
pyflakes==0.8.1
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
PyYAML==6.0.2
readme_renderer==44.0
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
SecretStorage==3.3.3
tomli==2.2.1
tox==4.25.0
twine==6.1.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
webargs==8.6.0
Werkzeug==3.1.3
zipp==3.21.0
| name: apispec
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-tarfile==1.2.0
- blinker==1.9.0
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- cryptography==44.0.2
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==2.4.1
- flask==3.1.0
- id==1.5.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- invoke==2.2.0
- itsdangerous==2.2.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- jinja2==3.1.6
- keyring==25.6.0
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- marshmallow==3.26.1
- mccabe==0.3.1
- mdurl==0.1.2
- mock==5.2.0
- more-itertools==10.6.0
- nh3==0.2.21
- packaging==24.2
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pyflakes==0.8.1
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pyyaml==6.0.2
- readme-renderer==44.0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- secretstorage==3.3.3
- tomli==2.2.1
- tox==4.25.0
- twine==6.1.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- webargs==8.6.0
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/apispec
| [
"tests/test_core.py::TestPath::test_add_path_accepts_path",
"tests/test_core.py::TestPath::test_add_path_strips_path_base_path"
]
| []
| [
"tests/test_core.py::TestMetadata::test_swagger_version",
"tests/test_core.py::TestMetadata::test_swagger_metadata",
"tests/test_core.py::TestDefinitions::test_definition",
"tests/test_core.py::TestDefinitions::test_definition_stores_enum",
"tests/test_core.py::TestPath::test_add_path",
"tests/test_core.py::TestPath::test_add_path_merges_paths",
"tests/test_core.py::TestPath::test_add_path_ensures_path_parameters_required",
"tests/test_core.py::TestPath::test_add_path_with_no_path_raises_error",
"tests/test_core.py::TestPath::test_add_path_strips_base_path",
"tests/test_core.py::TestPath::test_add_parameters",
"tests/test_core.py::TestExtensions::test_setup_plugin",
"tests/test_core.py::TestExtensions::test_setup_plugin_doesnt_exist",
"tests/test_core.py::TestExtensions::test_register_definition_helper",
"tests/test_core.py::TestExtensions::test_register_path_helper",
"tests/test_core.py::TestExtensions::test_multiple_path_helpers_w_different_signatures",
"tests/test_core.py::TestExtensions::test_multiple_definition_helpers_w_different_signatures",
"tests/test_core.py::TestDefinitionHelpers::test_definition_helpers_are_used",
"tests/test_core.py::TestDefinitionHelpers::test_multiple_definition_helpers",
"tests/test_core.py::TestPathHelpers::test_path_helper_is_used",
"tests/test_core.py::TestResponseHelpers::test_response_helper_is_used"
]
| []
| MIT License | 424 | [
"apispec/core.py"
]
| [
"apispec/core.py"
]
|
|
scieloorg__xylose-88 | 09b42b365b375904f5d7102277e3f4e4a3d59e7f | 2016-02-12 14:15:41 | 09b42b365b375904f5d7102277e3f4e4a3d59e7f | diff --git a/setup.py b/setup.py
index 7516cbd..d1e1950 100755
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ except ImportError:
setup(
name="xylose",
- version='0.42',
+ version='0.43',
description="A SciELO library to abstract a JSON data structure that is a product of the ISIS2JSON conversion using the ISIS2JSON type 3 data model.",
author="SciELO",
author_email="[email protected]",
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index 03c2510..9b4cf43 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -195,10 +195,8 @@ class Journal(object):
This method retrieves the original language of the given article.
This method deals with the legacy fields (v400).
"""
- if not 'v400' in self.data:
- return None
- return self.data['v400'][0]['_']
+ return self.data.get('v400', [{'_': None}])[0]['_']
def url(self, language='en'):
"""
@@ -235,15 +233,6 @@ class Journal(object):
if 'v854' in self.data:
return [area['_'] for area in self.data['v854']]
- @property
- def abbreviated_title(self):
- """
- This method retrieves the journal abbreviated title of the given article, if it exists.
- This method deals with the legacy fields (150).
- """
- if 'v150' in self.data:
- return self.data['v150'][0]['_']
-
@property
def wos_citation_indexes(self):
"""
@@ -273,8 +262,7 @@ class Journal(object):
This method deals with the legacy fields (480).
"""
- if 'v480' in self.data:
- return self.data['v480'][0]['_']
+ return self.data.get('v480', [{'_': None}])[0]['_']
@property
def publisher_loc(self):
@@ -284,8 +272,7 @@ class Journal(object):
This method deals with the legacy fields (490).
"""
- if 'v490' in self.data:
- return self.data['v490'][0]['_']
+ return self.data.get('v490', [{'_': None}])[0]['_']
@property
def title(self):
@@ -295,8 +282,30 @@ class Journal(object):
This method deals with the legacy fields (100).
"""
- if 'v100' in self.data:
- return self.data['v100'][0]['_']
+ return self.data.get('v100', [{'_': None}])[0]['_']
+
+ @property
+ def subtitle(self):
+ """
+ This method retrieves the journal subtitle.
+ This method deals with the legacy fields (v110).
+ """
+
+ return self.data.get('v110', [{'_': None}])[0]['_']
+
+ @property
+ def fulltitle(self):
+ """
+ This method retrieves the join of the journal title plus the subtitle.
+ This method deals with the legacy fields (v100, v110).
+ """
+
+ data = []
+
+ data.append(self.title)
+ data.append(self.subtitle)
+
+ return ' - '.join([i for i in data if i])
@property
def title_nlm(self):
@@ -306,8 +315,25 @@ class Journal(object):
This method deals with the legacy fields (421).
"""
- if 'v421' in self.data:
- return self.data['v421'][0]['_']
+ return self.data.get('v421', [{'_': None}])[0]['_']
+
+ @property
+ def abbreviated_title(self):
+ """
+ This method retrieves the journal abbreviated title of the given article, if it exists.
+ This method deals with the legacy fields (150).
+ """
+
+ return self.data.get('v150', [{'_': None}])[0]['_']
+
+ @property
+ def abbreviated_iso_title(self):
+ """
+ This method retrieves the journal abbreviated title of the given article, if it exists.
+ This method deals with the legacy fields (151).
+ """
+
+ return self.data.get('v151', [{'_': None}])[0]['_']
@property
def acronym(self):
@@ -317,8 +343,7 @@ class Journal(object):
This method deals with the legacy fields (68).
"""
- if 'v68' in self.data:
- return self.data['v68'][0]['_'].lower()
+ return self.data.get('v68', [{'_': None}])[0]['_']
@property
def periodicity(self):
@@ -401,6 +426,7 @@ class Journal(object):
return tools.get_date(self.data['v941'][0]['_'])
+
class Article(object):
def __init__(self, data, iso_format=None):
| Incluir metodo para recuperar subtitulo do periódico v110
Incluir metodo para recuperar subtitulo do periódico v110 (subtitle)
Incluir metodo para recuperar titulo do periódico concatenado com subtitulo do periódico v110 (full_title) | scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index 6ade9eb..7405ad9 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -623,11 +623,47 @@ class JournalTests(unittest.TestCase):
def test_journal_title_nlm(self):
self.fulldoc['title']['v421'] = [{u'_': u'Acta Limnologica Brasiliensia NLM'}]
-
+
journal = Journal(self.fulldoc['title'])
self.assertEqual(journal.title_nlm, u'Acta Limnologica Brasiliensia NLM')
+ def test_journal_fulltitle(self):
+ self.fulldoc['title']['v100'] = [{u'_': u'Title'}]
+ self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}]
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.fulltitle, u'Title - SubTitle')
+
+ def test_journal_fulltitle_without_title(self):
+ del(self.fulldoc['title']['v100'])
+ self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}]
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.fulltitle, u'SubTitle')
+
+ def test_journal_fulltitle_without_subtitle(self):
+ self.fulldoc['title']['v100'] = [{u'_': u'Title'}]
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.fulltitle, u'Title')
+
+ def test_journal_subtitle(self):
+ self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}]
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.subtitle, u'SubTitle')
+
+ def test_journal_without_subtitle(self):
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.subtitle, None)
+
def test_without_journal_title_nlm(self):
journal = self.journal
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 0.42 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc pandoc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
-e git+https://github.com/scieloorg/xylose.git@09b42b365b375904f5d7102277e3f4e4a3d59e7f#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_without_subtitle"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_is_ahead",
"tests/test_document.py::ArticleTests::test_issue",
"tests/test_document.py::ArticleTests::test_issue_label_field_v4",
"tests/test_document.py::ArticleTests::test_issue_label_without_field_v4",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_journal_acronym",
"tests/test_document.py::ArticleTests::test_journal_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_publisher_loc",
"tests/test_document.py::ArticleTests::test_publisher_name",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_supplement_issue",
"tests/test_document.py::ArticleTests::test_supplement_volume",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_volume",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_journal_acronym",
"tests/test_document.py::ArticleTests::test_without_journal_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_publisher_loc",
"tests/test_document.py::ArticleTests::test_without_publisher_name",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_suplement_issue",
"tests/test_document.py::ArticleTests::test_without_supplement_volume",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_volume",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 426 | [
"setup.py",
"xylose/scielodocument.py"
]
| [
"setup.py",
"xylose/scielodocument.py"
]
|
|
dask__dask-986 | d82cf2ac3fa3a61912b7934afe7b2fe9e14cc4ff | 2016-02-12 18:20:20 | 6dc9229362f2d3b1dfa466a8a63831c3c832b4be | diff --git a/dask/bag/core.py b/dask/bag/core.py
index fcc3119fb..2c4c9115b 100644
--- a/dask/bag/core.py
+++ b/dask/bag/core.py
@@ -5,11 +5,11 @@ import itertools
import math
import bz2
import os
+import uuid
from fnmatch import fnmatchcase
from glob import glob
from collections import Iterable, Iterator, defaultdict
from functools import wraps, partial
-from itertools import repeat
from ..utils import ignoring
@@ -23,16 +23,12 @@ with ignoring(ImportError):
from ..base import Base, normalize_token, tokenize
from ..compatibility import (apply, BytesIO, unicode, urlopen, urlparse,
- StringIO, GzipFile, BZ2File)
+ GzipFile)
from ..core import list2, quote, istask, get_dependencies, reverse_dict
from ..multiprocessing import get as mpget
from ..optimize import fuse, cull, inline
from ..utils import (file_size, infer_compression, open, system_encoding,
- takes_multiple_arguments, textblock)
-
-names = ('bag-%d' % i for i in itertools.count(1))
-tokens = ('-%d' % i for i in itertools.count(1))
-load_names = ('load-%d' % i for i in itertools.count(1))
+ takes_multiple_arguments, textblock, funcname)
no_default = '__no__default__'
@@ -164,7 +160,7 @@ def to_textfiles(b, path, name_function=str, compression='infer',
compression = infer_compression(path)
return compression
- name = next(names)
+ name = 'to-textfiles-' + uuid.uuid4().hex
dsk = dict(((name, i), (write, (b.name, i), path, get_compression(path),
encoding))
for i, path in enumerate(paths))
@@ -199,7 +195,7 @@ class Item(Base):
return [self.key]
def apply(self, func):
- name = next(names)
+ name = 'apply-{0}-{1}'.format(funcname(func), tokenize(self, func))
dsk = {name: (func, self.key)}
return Item(merge(self.dask, dsk), name)
@@ -254,7 +250,7 @@ class Bag(Base):
>>> list(b.map(lambda x: x * 10)) # doctest: +SKIP
[0, 10, 20, 30, 40]
"""
- name = next(names)
+ name = 'map-{0}-{1}'.format(funcname(func), tokenize(self, func))
if takes_multiple_arguments(func):
func = partial(apply, func)
dsk = dict(((name, i), (reify, (map, func, (self.name, i))))
@@ -276,7 +272,8 @@ class Bag(Base):
>>> list(b.filter(iseven)) # doctest: +SKIP
[0, 2, 4]
"""
- name = next(names)
+ name = 'filter-{0}-{1}'.format(funcname(predicate),
+ tokenize(self, predicate))
dsk = dict(((name, i), (reify, (filter, predicate, (self.name, i))))
for i in range(self.npartitions))
return type(self)(merge(self.dask, dsk), name, self.npartitions)
@@ -292,7 +289,8 @@ class Bag(Base):
>>> list(b.remove(iseven)) # doctest: +SKIP
[1, 3]
"""
- name = next(names)
+ name = 'remove-{0}-{1}'.format(funcname(predicate),
+ tokenize(self, predicate))
dsk = dict(((name, i), (reify, (remove, predicate, (self.name, i))))
for i in range(self.npartitions))
return type(self)(merge(self.dask, dsk), name, self.npartitions)
@@ -305,7 +303,8 @@ class Bag(Base):
>>> b.map_partitions(myfunc) # doctest: +SKIP
"""
- name = next(names)
+ name = 'map-partitions-{0}-{1}'.format(funcname(func),
+ tokenize(self, func))
dsk = dict(((name, i), (func, (self.name, i)))
for i in range(self.npartitions))
return type(self)(merge(self.dask, dsk), name, self.npartitions)
@@ -320,7 +319,7 @@ class Bag(Base):
>>> list(b.pluck('credits').pluck(0)) # doctest: +SKIP
[1, 10]
"""
- name = next(names)
+ name = 'pluck-' + tokenize(self, key, default)
key = quote(key)
if default == no_default:
dsk = dict(((name, i), (list, (pluck, key, (self.name, i))))
@@ -391,8 +390,10 @@ class Bag(Base):
Bag.foldby
"""
- a = next(names)
- b = next(names)
+ token = tokenize(self, binop, combine, initial)
+ combine = combine or binop
+ a = 'foldbinop-{0}-{1}'.format(funcname(binop), token)
+ b = 'foldcombine-{0}-{1}'.format(funcname(combine), token)
initial = quote(initial)
if initial is not no_default:
dsk = dict(((a, i), (reduce, binop, (self.name, i), initial))
@@ -400,7 +401,7 @@ class Bag(Base):
else:
dsk = dict(((a, i), (reduce, binop, (self.name, i)))
for i in range(self.npartitions))
- dsk2 = {b: (reduce, combine or binop, list(dsk.keys()))}
+ dsk2 = {b: (reduce, combine, list(dsk.keys()))}
return Item(merge(self.dask, dsk, dsk2), b)
def frequencies(self, split_every=None):
@@ -426,8 +427,9 @@ class Bag(Base):
>>> list(b.topk(2, lambda x: -x)) # doctest: +SKIP
[3, 4]
"""
- a = next(names)
- b = next(names)
+ token = tokenize(self, k, key)
+ a = 'topk-a-' + token
+ b = 'topk-b-' + token
if key:
if callable(key) and takes_multiple_arguments(key):
key = partial(apply, key)
@@ -448,9 +450,10 @@ class Bag(Base):
>>> sorted(b.distinct())
['Alice', 'Bob']
"""
- a = next(names)
+ token = tokenize(self)
+ a = 'distinct-a-' + token
+ b = 'distinct-b-' + token
dsk = dict(((a, i), (set, key)) for i, key in enumerate(self._keys()))
- b = next(names)
dsk2 = {(b, 0): (apply, set.union, quote(list(dsk.keys())))}
return type(self)(merge(self.dask, dsk, dsk2), b, 1)
@@ -482,19 +485,22 @@ class Bag(Base):
"""
if split_every is None:
split_every = 8
- a = next(names)
- b = next(names)
+ token = tokenize(self, perpartition, aggregate, split_every)
+ a = 'reduction-part-{0}-{1}'.format(funcname(perpartition), token)
dsk = dict(((a, i), (perpartition, (self.name, i)))
for i in range(self.npartitions))
k = self.npartitions
b = a
+ fmt = 'reduction-agg-{0}-'.format(funcname(aggregate)) + '-{0}-' + token
+ depth = 0
while k > 1:
- c = next(names)
+ c = fmt.format(depth)
dsk2 = dict(((c, i), (aggregate, [(b, j) for j in inds]))
for i, inds in enumerate(partition_all(split_every, range(k))))
dsk.update(dsk2)
k = len(dsk2)
b = c
+ depth += 1
if out_type is Item:
dsk[c] = dsk.pop((c, 0))
@@ -502,7 +508,6 @@ class Bag(Base):
else:
return Bag(merge(self.dask, dsk), c, 1)
-
@wraps(sum)
def sum(self, split_every=None):
return self.reduction(sum, sum, split_every=split_every)
@@ -578,7 +583,7 @@ class Bag(Base):
assert not isinstance(other, Bag)
if on_other is None:
on_other = on_self
- name = next(names)
+ name = 'join-' + tokenize(self, other, on_self, on_other)
dsk = dict(((name, i), (list, (join, on_other, other,
on_self, (self.name, i))))
for i in range(self.npartitions))
@@ -587,7 +592,7 @@ class Bag(Base):
def product(self, other):
""" Cartesian product between two bags """
assert isinstance(other, Bag)
- name = next(names)
+ name = 'product-' + tokenize(self, other)
n, m = self.npartitions, other.npartitions
dsk = dict(((name, i*m + j),
(list, (itertools.product, (self.name, i),
@@ -667,8 +672,9 @@ class Bag(Base):
toolz.reduceby
pyspark.combineByKey
"""
- a = next(names)
- b = next(names)
+ token = tokenize(self, key, binop, initial, combine, combine_initial)
+ a = 'foldby-a-' + token
+ b = 'foldby-b-' + token
if combine is None:
combine = binop
if initial is not no_default:
@@ -705,7 +711,7 @@ class Bag(Base):
>>> b.take(3) # doctest: +SKIP
(0, 1, 2)
"""
- name = next(names)
+ name = 'take-' + tokenize(self, k)
dsk = {(name, 0): (list, (take, k, (self.name, 0)))}
b = Bag(merge(self.dask, dsk), name, 1)
if compute:
@@ -726,7 +732,7 @@ class Bag(Base):
>>> list(b.concat())
[1, 2, 3]
"""
- name = next(names)
+ name = 'concat-' + tokenize(self)
dsk = dict(((name, i), (list, (toolz.concat, (self.name, i))))
for i in range(self.npartitions))
return type(self)(merge(self.dask, dsk), name, self.npartitions)
@@ -751,22 +757,23 @@ class Bag(Base):
"""
if npartitions is None:
npartitions = self.npartitions
+ token = tokenize(self, grouper, npartitions, blocksize)
import partd
- p = ('partd' + next(tokens),)
+ p = ('partd-' + token,)
try:
dsk1 = {p: (partd.Python, (partd.Snappy, partd.File()))}
except AttributeError:
dsk1 = {p: (partd.Python, partd.File())}
# Partition data on disk
- name = next(names)
+ name = 'groupby-part-{0}-{1}'.format(funcname(grouper), token)
dsk2 = dict(((name, i), (partition, grouper, (self.name, i),
npartitions, p, blocksize))
for i in range(self.npartitions))
# Barrier
- barrier_token = 'barrier' + next(tokens)
+ barrier_token = 'groupby-barrier-' + token
def barrier(args):
return 0
@@ -774,7 +781,7 @@ class Bag(Base):
dsk3 = {barrier_token: (barrier, list(dsk2))}
# Collect groups
- name = next(names)
+ name = 'groupby-collect-' + token
dsk4 = dict(((name, i),
(collect, grouper, i, p, barrier_token))
for i in range(npartitions))
@@ -816,7 +823,7 @@ class Bag(Base):
columns = sorted(head)
elif isinstance(head, (tuple, list)):
columns = list(range(len(head)))
- name = next(names)
+ name = 'to_dataframe-' + tokenize(self, columns)
DataFrame = partial(pd.DataFrame, columns=columns)
dsk = dict(((name, i), (DataFrame, (list2, (self.name, i))))
for i in range(self.npartitions))
@@ -885,7 +892,7 @@ def from_filenames(filenames, chunkbytes=None, compression='infer',
full_filenames = [os.path.abspath(f) for f in filenames]
- name = 'from-filename' + next(tokens)
+ name = 'from-filenames-' + uuid.uuid4().hex
# Make sure `linesep` is not a byte string because `io.TextIOWrapper` in
# python versions other than 2.7 dislike byte strings for the `newline`
@@ -992,7 +999,7 @@ def from_s3(bucket_name, paths='*', aws_access_key=None, aws_secret_key=None,
get_key = partial(_get_key, bucket_name, conn_args)
- name = next(load_names)
+ name = 'from_s3-' + uuid.uuid4().hex
dsk = dict(((name, i), (list, (get_key, k))) for i, k in enumerate(paths))
return Bag(dsk, name, len(paths))
@@ -1035,7 +1042,7 @@ def from_hdfs(path, hdfs=None, host='localhost', port='50070', user_name=None):
if not filenames:
raise ValueError("No files found for path %s" % path)
- name = next(names)
+ name = 'from_hdfs-' + uuid.uuid4().hex
dsk = dict()
for i, fn in enumerate(filenames):
ext = fn.split('.')[-1]
@@ -1106,7 +1113,7 @@ def from_sequence(seq, partition_size=None, npartitions=None):
partition_size = int(len(seq) / 100)
parts = list(partition_all(partition_size, seq))
- name = next(load_names)
+ name = 'from_sequence-' + tokenize(seq, partition_size)
d = dict(((name, i), part) for i, part in enumerate(parts))
return Bag(d, name, len(d))
@@ -1131,7 +1138,8 @@ def from_castra(x, columns=None, index=False):
if columns is None:
columns = x.columns
- name = 'from-castra-' + next(tokens)
+ name = 'from-castra-' + tokenize(os.path.getmtime(x.path), x.path,
+ columns, index)
dsk = dict(((name, i), (load_castra_partition, x, part, columns, index))
for i, part in enumerate(x.partitions))
return Bag(dsk, name, len(x.partitions))
@@ -1181,7 +1189,7 @@ def from_url(urls):
"""
if isinstance(urls, str):
urls = [urls]
- name = next(load_names)
+ name = 'from_url-' + uuid.uuid4().hex
dsk = {}
for i, u in enumerate(urls):
dsk[(name, i)] = (list, (urlopen, u))
@@ -1208,7 +1216,7 @@ def concat(bags):
>>> list(c)
[1, 2, 3, 4, 5, 6]
"""
- name = next(names)
+ name = 'concat-' + tokenize(*bags)
counter = itertools.count(0)
dsk = dict(((name, next(counter)), key)
for bag in bags for key in sorted(bag._keys()))
| dask.bag does not use hashed keys
We should use `tokenize` rather than the current `tokens` within dask.bag
cc @jcrist | dask/dask | diff --git a/dask/bag/tests/test_bag.py b/dask/bag/tests/test_bag.py
index 6b0273e34..6f46f4434 100644
--- a/dask/bag/tests/test_bag.py
+++ b/dask/bag/tests/test_bag.py
@@ -59,6 +59,7 @@ def test_map():
expected = merge(dsk, dict(((c.name, i), (reify, (map, inc, (b.name, i))))
for i in range(b.npartitions)))
assert c.dask == expected
+ assert c.name == b.map(inc).name
def test_map_function_with_multiple_arguments():
@@ -108,10 +109,14 @@ def test_filter():
(reify, (filter, iseven, (b.name, i))))
for i in range(b.npartitions)))
assert c.dask == expected
+ assert c.name == b.filter(iseven).name
def test_remove():
- assert list(b.remove(lambda x: x % 2 == 0)) == [1, 3] * 3
+ f = lambda x: x % 2 == 0
+ c = b.remove(f)
+ assert list(c) == [1, 3] * 3
+ assert c.name == b.remove(f).name
def test_iter():
@@ -126,17 +131,26 @@ def test_pluck():
assert set(b.pluck(0)) == set([1, 2, 3, 4])
assert set(b.pluck(1)) == set([10, 20, 30, 40])
assert set(b.pluck([1, 0])) == set([(10, 1), (20, 2), (30, 3), (40, 4)])
+ assert b.pluck([1, 0]).name == b.pluck([1, 0]).name
def test_pluck_with_default():
b = db.from_sequence(['Hello', '', 'World'])
assert raises(IndexError, lambda: list(b.pluck(0)))
assert list(b.pluck(0, None)) == ['H', None, 'W']
+ assert b.pluck(0, None).name == b.pluck(0, None).name
+ assert b.pluck(0).name != b.pluck(0, None).name
def test_fold():
- assert b.fold(add).compute() == sum(L)
- assert b.fold(add, initial=10).compute() == sum(L) + 10 * b.npartitions
+ c = b.fold(add)
+ assert c.compute() == sum(L)
+ assert c.key == b.fold(add).key
+
+ c2 = b.fold(add, initial=10)
+ assert c2.key != c.key
+ assert c2.compute() == sum(L) + 10 * b.npartitions
+ assert c2.key == b.fold(add, initial=10).key
c = db.from_sequence(range(5), npartitions=3)
def binop(acc, x):
@@ -144,7 +158,9 @@ def test_fold():
acc.add(x)
return acc
- assert c.fold(binop, set.union, initial=set()).compute() == set(c)
+ d = c.fold(binop, set.union, initial=set())
+ assert d.compute() == set(c)
+ assert d.key == c.fold(binop, set.union, initial=set()).key
d = db.from_sequence('hello')
assert set(d.fold(lambda a, b: ''.join([a, b]), initial='').compute()) == set('hello')
@@ -156,23 +172,31 @@ def test_fold():
def test_distinct():
assert sorted(b.distinct()) == [0, 1, 2, 3, 4]
+ assert b.distinct().name == b.distinct().name
def test_frequencies():
- assert dict(b.frequencies()) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3}
- assert dict(b.frequencies(split_every=2)) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3}
+ c = b.frequencies()
+ assert dict(c) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3}
+ c2 = b.frequencies(split_every=2)
+ assert dict(c2) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3}
+ assert c.name == b.frequencies().name
+ assert c.name != c2.name
+ assert c2.name == b.frequencies(split_every=2).name
def test_topk():
assert list(b.topk(4)) == [4, 4, 4, 3]
assert list(b.topk(4, key=lambda x: -x).compute(get=dask.get)) == \
[0, 0, 0, 1]
+ assert b.topk(4).name == b.topk(4).name
def test_topk_with_non_callable_key():
b = db.from_sequence([(1, 10), (2, 9), (3, 8)], npartitions=2)
assert list(b.topk(2, key=1)) == [(1, 10), (2, 9)]
assert list(b.topk(2, key=0)) == [(3, 8), (2, 9)]
+ assert b.topk(2, key=1).name == b.topk(2, key=1).name
def test_topk_with_multiarg_lambda():
@@ -183,6 +207,7 @@ def test_topk_with_multiarg_lambda():
def test_lambdas():
assert list(b.map(lambda x: x + 1)) == list(b.map(inc))
+
def test_reductions():
assert int(b.count()) == 15
assert int(b.sum()) == 30
@@ -190,6 +215,8 @@ def test_reductions():
assert int(b.min()) == 0
assert int(b.any()) == True
assert int(b.all()) == False # some zeros exist
+ assert b.all().key == b.all().key
+ assert b.all().key != b.any().key
def test_tree_reductions():
@@ -208,30 +235,39 @@ def test_tree_reductions():
assert c.compute() == d.compute()
assert len(c.dask) > len(d.dask)
+ assert c.key != d.key
+ assert c.key == b.sum(split_every=2).key
+ assert c.key != b.sum().key
+
def test_mean():
assert b.mean().compute(get=dask.get) == 2.0
assert float(b.mean()) == 2.0
+
def test_std():
assert b.std().compute(get=dask.get) == math.sqrt(2.0)
assert float(b.std()) == math.sqrt(2.0)
+
def test_var():
assert b.var().compute(get=dask.get) == 2.0
assert float(b.var()) == 2.0
def test_join():
- assert list(b.join([1, 2, 3], on_self=isodd, on_other=iseven)) == \
- list(join(iseven, [1, 2, 3], isodd, list(b)))
+ c = b.join([1, 2, 3], on_self=isodd, on_other=iseven)
+ assert list(c) == list(join(iseven, [1, 2, 3], isodd, list(b)))
assert list(b.join([1, 2, 3], isodd)) == \
list(join(isodd, [1, 2, 3], isodd, list(b)))
+ assert c.name == b.join([1, 2, 3], on_self=isodd, on_other=iseven).name
+
def test_foldby():
c = b.foldby(iseven, add, 0, add, 0)
assert (reduceby, iseven, add, (b.name, 0), 0) in list(c.dask.values())
assert set(c) == set(reduceby(iseven, lambda acc, x: acc + x, L, 0).items())
+ assert c.name == b.foldby(iseven, add, 0, add, 0).name
c = b.foldby(iseven, lambda acc, x: acc + x)
assert set(c) == set(reduceby(iseven, lambda acc, x: acc + x, L, 0).items())
@@ -239,6 +275,8 @@ def test_foldby():
def test_map_partitions():
assert list(b.map_partitions(len)) == [5, 5, 5]
+ assert b.map_partitions(len).name == b.map_partitions(len).name
+ assert b.map_partitions(lambda a: len(a) + 1).name != b.map_partitions(len).name
def test_lazify_task():
@@ -296,6 +334,7 @@ def test_map_is_lazy():
from dask.bag.core import map
assert isinstance(map(lambda x: x, [1, 2, 3]), Iterator)
+
def test_can_use_dict_to_make_concrete():
assert isinstance(dict(b.frequencies()), dict)
@@ -318,6 +357,8 @@ def test_from_castra():
list(default) == [(i, str(i)) for i in range(100)])
assert list(with_columns) == list(range(100))
assert list(with_index) == list(zip(range(100), range(100)))
+ assert default.name != with_columns.name != with_index.name
+ assert with_index.name == db.from_castra(c, 'x', index=True).name
@pytest.mark.slow
@@ -477,6 +518,8 @@ def test_product():
z = x.product(y)
assert set(z) == set([(i, j) for i in [1, 2, 3, 4] for j in [10, 20, 30]])
+ assert z.name != b2.name
+ assert z.name == x.product(y).name
def test_partition_collect():
@@ -491,14 +534,16 @@ def test_partition_collect():
def test_groupby():
- c = b.groupby(lambda x: x)
+ c = b.groupby(identity)
result = dict(c)
assert result == {0: [0, 0 ,0],
1: [1, 1, 1],
2: [2, 2, 2],
3: [3, 3, 3],
4: [4, 4, 4]}
- assert b.groupby(lambda x: x).npartitions == b.npartitions
+ assert c.npartitions == b.npartitions
+ assert c.name == b.groupby(identity).name
+ assert c.name != b.groupby(lambda x: x + 1).name
def test_groupby_with_indexer():
@@ -507,6 +552,7 @@ def test_groupby_with_indexer():
assert valmap(sorted, result) == {1: [[1, 2, 3], [1, 4, 9]],
2: [[2, 3, 4]]}
+
def test_groupby_with_npartitions_changed():
result = b.groupby(lambda x: x, npartitions=1)
result2 = dict(result)
@@ -525,6 +571,10 @@ def test_concat():
c = db.concat([a, b])
assert list(c) == [1, 2, 3, 4, 5, 6]
+ assert c.name == db.concat([a, b]).name
+ assert b.concat().name != a.concat().name
+ assert b.concat().name == b.concat().name
+
b = db.from_sequence([1, 2, 3]).map(lambda x: x * [1, 2, 3])
assert list(b.concat()) == [1, 2, 3] * sum([1, 2, 3])
@@ -570,6 +620,10 @@ def test_to_dataframe():
assert (df2.compute().values == df.compute().values).all()
+ assert df2._name == b.to_dataframe()._name
+ assert df2._name != df._name
+
+
def test_to_textfiles():
b = db.from_sequence(['abc', '123', 'xyz'], npartitions=2)
dir = mkdtemp()
@@ -648,6 +702,8 @@ def test_string_namespace():
assert list(b.str.match('*Smith')) == ['Alice Smith', 'Charlie Smith']
assert raises(AttributeError, lambda: b.str.sfohsofhf)
+ assert b.str.match('*Smith').name == b.str.match('*Smith').name
+ assert b.str.match('*Smith').name != b.str.match('*John').name
def test_string_namespace_with_unicode():
@@ -743,6 +799,7 @@ def test_from_imperative():
from dask.imperative import value
a, b, c = value([1, 2, 3]), value([4, 5, 6]), value([7, 8, 9])
bb = from_imperative([a, b, c])
+ assert bb.name == from_imperative([a, b, c]).name
assert isinstance(bb, Bag)
assert list(bb) == [1, 2, 3, 4, 5, 6, 7, 8, 9]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@d82cf2ac3fa3a61912b7934afe7b2fe9e14cc4ff#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- distributed=2021.3.0=py36h06a4308_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- msgpack-python=1.0.2=py36hff7bd54_1
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/bag/tests/test_bag.py::test_map",
"dask/bag/tests/test_bag.py::test_filter",
"dask/bag/tests/test_bag.py::test_remove",
"dask/bag/tests/test_bag.py::test_pluck",
"dask/bag/tests/test_bag.py::test_pluck_with_default",
"dask/bag/tests/test_bag.py::test_fold",
"dask/bag/tests/test_bag.py::test_distinct",
"dask/bag/tests/test_bag.py::test_frequencies",
"dask/bag/tests/test_bag.py::test_topk",
"dask/bag/tests/test_bag.py::test_topk_with_non_callable_key",
"dask/bag/tests/test_bag.py::test_reductions",
"dask/bag/tests/test_bag.py::test_tree_reductions",
"dask/bag/tests/test_bag.py::test_join",
"dask/bag/tests/test_bag.py::test_foldby",
"dask/bag/tests/test_bag.py::test_map_partitions",
"dask/bag/tests/test_bag.py::test_product",
"dask/bag/tests/test_bag.py::test_groupby",
"dask/bag/tests/test_bag.py::test_concat",
"dask/bag/tests/test_bag.py::test_string_namespace"
]
| []
| [
"dask/bag/tests/test_bag.py::test_Bag",
"dask/bag/tests/test_bag.py::test_keys",
"dask/bag/tests/test_bag.py::test_map_function_with_multiple_arguments",
"dask/bag/tests/test_bag.py::test_map_with_constructors",
"dask/bag/tests/test_bag.py::test_map_with_builtins",
"dask/bag/tests/test_bag.py::test_iter",
"dask/bag/tests/test_bag.py::test_topk_with_multiarg_lambda",
"dask/bag/tests/test_bag.py::test_lambdas",
"dask/bag/tests/test_bag.py::test_mean",
"dask/bag/tests/test_bag.py::test_std",
"dask/bag/tests/test_bag.py::test_var",
"dask/bag/tests/test_bag.py::test_lazify_task",
"dask/bag/tests/test_bag.py::test_lazify",
"dask/bag/tests/test_bag.py::test_inline_singleton_lists",
"dask/bag/tests/test_bag.py::test_take",
"dask/bag/tests/test_bag.py::test_map_is_lazy",
"dask/bag/tests/test_bag.py::test_can_use_dict_to_make_concrete",
"dask/bag/tests/test_bag.py::test_from_filenames",
"dask/bag/tests/test_bag.py::test_from_filenames_gzip",
"dask/bag/tests/test_bag.py::test_from_filenames_bz2",
"dask/bag/tests/test_bag.py::test_from_filenames_large",
"dask/bag/tests/test_bag.py::test_from_filenames_encoding",
"dask/bag/tests/test_bag.py::test_from_filenames_large_gzip",
"dask/bag/tests/test_bag.py::test__parse_s3_URI",
"dask/bag/tests/test_bag.py::test_from_sequence",
"dask/bag/tests/test_bag.py::test_from_long_sequence",
"dask/bag/tests/test_bag.py::test_partition_collect",
"dask/bag/tests/test_bag.py::test_groupby_with_indexer",
"dask/bag/tests/test_bag.py::test_groupby_with_npartitions_changed",
"dask/bag/tests/test_bag.py::test_concat_after_map",
"dask/bag/tests/test_bag.py::test_args",
"dask/bag/tests/test_bag.py::test_to_dataframe",
"dask/bag/tests/test_bag.py::test_to_textfiles",
"dask/bag/tests/test_bag.py::test_to_textfiles_encoding",
"dask/bag/tests/test_bag.py::test_to_textfiles_inputs",
"dask/bag/tests/test_bag.py::test_bz2_stream",
"dask/bag/tests/test_bag.py::test_string_namespace_with_unicode",
"dask/bag/tests/test_bag.py::test_str_empty_split",
"dask/bag/tests/test_bag.py::test_stream_decompress",
"dask/bag/tests/test_bag.py::test_map_with_iterator_function",
"dask/bag/tests/test_bag.py::test_ensure_compute_output_is_concrete",
"dask/bag/tests/test_bag.py::test_bag_class_extend",
"dask/bag/tests/test_bag.py::test_gh715",
"dask/bag/tests/test_bag.py::test_bag_compute_forward_kwargs",
"dask/bag/tests/test_bag.py::test_to_imperative",
"dask/bag/tests/test_bag.py::test_from_imperative"
]
| []
| BSD 3-Clause "New" or "Revised" License | 427 | [
"dask/bag/core.py"
]
| [
"dask/bag/core.py"
]
|
|
collective__icalendar-185 | 6888bbe02042cd65b12a6d855b527a964a4b823b | 2016-02-14 13:25:31 | 6888bbe02042cd65b12a6d855b527a964a4b823b | untitaker: Could be a property that checks bool value of `self.errors`.
stlaz: I would rather like to see it a property dependant on self.errors, too.
You could do something like:
~~~python
class Component:
@property
def is_broken(self):
return bool(self.errors)
~~~ | diff --git a/CHANGES.rst b/CHANGES.rst
index 5fbf217..405ebcb 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -11,6 +11,7 @@ New:
Fixes:
- Fix testsuite for use with ``dateutil>=2.5``. Refs #195.
+- Reintroduce cal.Component.is_broken that was removed with 3.9.2 [geier]
3.9.2 (2016-02-05)
diff --git a/src/icalendar/cal.py b/src/icalendar/cal.py
index 9828c54..8448dba 100644
--- a/src/icalendar/cal.py
+++ b/src/icalendar/cal.py
@@ -106,6 +106,10 @@ class Component(CaselessDict):
"""
return True if not (list(self.values()) + self.subcomponents) else False # noqa
+ @property
+ def is_broken(self):
+ return bool(self.errors)
+
#############################
# handling of property values
| incompatible changes in 3.9.2
With 70a7b5a16748afbf0d48ca180c2b7613fdd7e7d0 we introduced some backwards incompatible changes:
* `Component.is_broken` got replaced with `Component.errors`
* events with a `RDATE;VALUE=PERIOD:19970101T180000Z/19970102T070000Z,19970109T180000Z/PT5H30M` component still had an `RDATE` with `VALUE=PERIOD` param before, now they are `RDATE:None`
While I do agree with both changes, I think they should have been deferred to the 4.0.0 release. Because we don't have `VALUE=PERIOD` anyway I think we can leave this one as it is, but I believe we should bring back `Component.is_broken` for the 3.9.3 release.
| collective/icalendar | diff --git a/src/icalendar/tests/test_fixed_issues.py b/src/icalendar/tests/test_fixed_issues.py
index ae29535..6b375d3 100644
--- a/src/icalendar/tests/test_fixed_issues.py
+++ b/src/icalendar/tests/test_fixed_issues.py
@@ -200,6 +200,7 @@ X
END:VEVENT"""
event = icalendar.Calendar.from_ical(ical_str)
self.assertTrue(isinstance(event, icalendar.Event))
+ self.assertTrue(event.is_broken) # REMOVE FOR NEXT MAJOR RELEASE
self.assertEqual(
event.errors,
[(None, "Content line could not be parsed into parts: 'X': Invalid content line")] # noqa
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_git_commit_hash",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 3.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements_docs.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
-e git+https://github.com/collective/icalendar.git@6888bbe02042cd65b12a6d855b527a964a4b823b#egg=icalendar
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
swebench_matterhorn @ file:///swebench_matterhorn
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: icalendar
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- swebench-matterhorn==0.0.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/icalendar
| [
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_104__ignore_exceptions"
]
| []
| [
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_index_error_issue",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_100",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_101",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_104__no_ignore_exceptions",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_112",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_116",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_142",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_143",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_157",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_168",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_178",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_53",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_55",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_58",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_64",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_70",
"src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_82"
]
| []
| BSD License | 430 | [
"src/icalendar/cal.py",
"CHANGES.rst"
]
| [
"src/icalendar/cal.py",
"CHANGES.rst"
]
|
scopely-devops__skew-92 | a379c1344b8cb630a5f9e9e8ff5e7ab0f7423df0 | 2016-02-15 14:16:07 | 73848f12aeb8c630a93a3b125c7c0a716d972b57 | diff --git a/skew/resources/resource.py b/skew/resources/resource.py
index 93bbdf9..92514cd 100644
--- a/skew/resources/resource.py
+++ b/skew/resources/resource.py
@@ -17,6 +17,8 @@ import jmespath
import skew.awsclient
+from botocore.exceptions import ClientError
+
LOG = logging.getLogger(__name__)
@@ -46,7 +48,13 @@ class Resource(object):
if extra_args:
kwargs.update(extra_args)
LOG.debug('enum_op=%s' % enum_op)
- data = client.call(enum_op, query=path, **kwargs)
+ try:
+ data = client.call(enum_op, query=path, **kwargs)
+ except ClientError as e:
+ data = {}
+ # if the error is because the resource was not found, be quiet
+ if 'NotFound' not in e.response['Error']['Code']:
+ raise
LOG.debug(data)
resources = []
if data:
| Wildcard searches for ec2 instances result in exception
In version 0.16.1, when running a `skew.scan()` for a wildcard account and or region the following error is encountered:
```
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidInstanceID.NotFound) when calling the DescribeInstances operation: The instance ID <SNIP INSTANCE ID>' does not exist
```
The call for this looks like:
`skew.scan('arn:aws:ec2:*:*:instance/<SPECIFIC INSTANCE ID>')`
If I call this with the specific region and instance id, then the query succeeds.
The full stack trace after sniping out my code lines:
```
#!/usr/bin/env python
(...)
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 319, in __iter__
for scheme in self.scheme.enumerate(context, **self.kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 244, in enumerate
context, **kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 229, in enumerate
context, **kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 214, in enumerate
context, **kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 195, in enumerate
context, **kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 146, in enumerate
context, **kwargs):
File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 128, in enumerate
self._arn, region, account, resource_id, **kwargs))
File "/usr/local/lib/python2.7/dist-packages/skew/resources/resource.py", line 49, in enumerate
data = client.call(enum_op, query=path, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/skew/awsclient.py", line 116, in call
data = results.build_full_result()
File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 271, in build_full_result
for response in self:
File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 85, in __iter__
response = self._make_request(current_kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 157, in _make_request
return self._method(**current_kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 310, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 407, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidInstanceID.NotFound) when calling the DescribeInstances operation: The instance ID ' <SNIP INSTANCE ID>' does not exist
``` | scopely-devops/skew | diff --git a/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json b/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json
new file mode 100644
index 0000000..b8d59f6
--- /dev/null
+++ b/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json
@@ -0,0 +1,13 @@
+{
+ "status_code": 400,
+ "data": {
+ "ResponseMetadata": {
+ "HTTPStatusCode": 400,
+ "RequestId": "c54d7e0e-ccfc-4a93-a2e5-862de7716e5d"
+ },
+ "Error": {
+ "Message": "The instance ID 'i-eedf6728' does not exist",
+ "Code": "InvalidInstanceID.NotFound"
+ }
+ }
+}
\ No newline at end of file
diff --git a/tests/unit/test_arn.py b/tests/unit/test_arn.py
index fcbd8b4..8522ffd 100644
--- a/tests/unit/test_arn.py
+++ b/tests/unit/test_arn.py
@@ -67,6 +67,17 @@ class TestARN(unittest.TestCase):
r = l[0]
self.assertEqual(r.filtered_data, 't2.small')
+ def test_ec2_instance_not_found(self):
+ placebo_cfg = {
+ 'placebo': placebo,
+ 'placebo_dir': self._get_response_path('instances_3'),
+ 'placebo_mode': 'playback'}
+ arn = scan('arn:aws:ec2:us-west-2:123456789012:instance/i-87654321',
+ **placebo_cfg)
+ # Fetch all Instance resources
+ l = list(arn)
+ self.assertEqual(len(l), 0)
+
def test_ec2_volumes(self):
placebo_cfg = {
'placebo': placebo,
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
iniconfig==2.1.0
jmespath==1.0.1
mock==1.0.1
nose==1.3.4
packaging==24.2
placebo==0.4.3
platformdirs==4.3.7
pluggy==1.5.0
py==1.11.0
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==3.11
s3transfer==0.11.4
six==1.17.0
-e git+https://github.com/scopely-devops/skew.git@a379c1344b8cb630a5f9e9e8ff5e7ab0f7423df0#egg=skew
tomli==2.2.1
tox==1.8.1
urllib3==1.26.20
virtualenv==20.29.3
| name: skew
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- iniconfig==2.1.0
- jmespath==1.0.1
- mock==1.0.1
- nose==1.3.4
- packaging==24.2
- placebo==0.4.3
- platformdirs==4.3.7
- pluggy==1.5.0
- py==1.11.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- s3transfer==0.11.4
- six==1.17.0
- tomli==2.2.1
- tox==1.8.1
- urllib3==1.26.20
- virtualenv==20.29.3
prefix: /opt/conda/envs/skew
| [
"tests/unit/test_arn.py::TestARN::test_ec2_instance_not_found"
]
| []
| [
"tests/unit/test_arn.py::TestARN::test_cloudformation_stacks",
"tests/unit/test_arn.py::TestARN::test_ec2",
"tests/unit/test_arn.py::TestARN::test_ec2_keypairs",
"tests/unit/test_arn.py::TestARN::test_ec2_network_acls",
"tests/unit/test_arn.py::TestARN::test_ec2_routetable",
"tests/unit/test_arn.py::TestARN::test_ec2_securitygroup",
"tests/unit/test_arn.py::TestARN::test_ec2_volumes",
"tests/unit/test_arn.py::TestARN::test_ec2_vpcs",
"tests/unit/test_arn.py::TestARN::test_elb_loadbalancer",
"tests/unit/test_arn.py::TestARN::test_iam_groups",
"tests/unit/test_arn.py::TestARN::test_iam_users",
"tests/unit/test_arn.py::TestARN::test_s3_buckets"
]
| []
| Apache License 2.0 | 432 | [
"skew/resources/resource.py"
]
| [
"skew/resources/resource.py"
]
|
|
docker__docker-py-942 | c3a66cc5999a5435b81769ac758d411d34c995c4 | 2016-02-15 19:45:01 | 4c34be5d4ab8a5a017950712e9c96b56d78d1c58 | dnephin: Looks good, just one consideration for backwards compatibility. | diff --git a/docker/api/container.py b/docker/api/container.py
index ceac173f..8aa9aa2c 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -193,12 +193,14 @@ class ContainerApiMixin(object):
@utils.check_resource
def logs(self, container, stdout=True, stderr=True, stream=False,
- timestamps=False, tail='all', since=None):
+ timestamps=False, tail='all', since=None, follow=None):
if utils.compare_version('1.11', self._version) >= 0:
+ if follow is None:
+ follow = stream
params = {'stderr': stderr and 1 or 0,
'stdout': stdout and 1 or 0,
'timestamps': timestamps and 1 or 0,
- 'follow': stream and 1 or 0,
+ 'follow': follow and 1 or 0,
}
if utils.compare_version('1.13', self._version) >= 0:
if tail != 'all' and (not isinstance(tail, int) or tail < 0):
diff --git a/docs/api.md b/docs/api.md
index 00ccabca..32952bf3 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -677,6 +677,7 @@ output as it happens.
* timestamps (bool): Show timestamps
* tail (str or int): Output specified number of lines at the end of logs: `"all"` or `number`. Default `"all"`
* since (datetime or int): Show logs since a given datetime or integer epoch (in seconds)
+* follow (bool): Follow log output
**Returns** (generator or str):
| logs() separate param for stream and follow
From: https://github.com/docker/compose/pull/2720/files#r52222296
Current the `follow` param is set based on `stream`
I think `follow=True` does imply `stream=True`, but `stream=True` doesn't imply `follow=True`, you may still want to stream without following. | docker/docker-py | diff --git a/tests/integration/container_test.py b/tests/integration/container_test.py
index 1714599b..142299d3 100644
--- a/tests/integration/container_test.py
+++ b/tests/integration/container_test.py
@@ -666,7 +666,7 @@ Line2'''
logs = self.client.logs(id, tail=1)
self.assertEqual(logs, 'Line2\n'.encode(encoding='ascii'))
- def test_logs_streaming(self):
+ def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
BUSYBOX, 'echo {0}'.format(snippet)
@@ -675,7 +675,7 @@ Line2'''
self.tmp_containers.append(id)
self.client.start(id)
logs = six.binary_type()
- for chunk in self.client.logs(id, stream=True):
+ for chunk in self.client.logs(id, stream=True, follow=True):
logs += chunk
exitcode = self.client.wait(id)
diff --git a/tests/unit/container_test.py b/tests/unit/container_test.py
index c2b25734..d66eeede 100644
--- a/tests/unit/container_test.py
+++ b/tests/unit/container_test.py
@@ -1119,6 +1119,36 @@ class ContainerTest(DockerClientTest):
)
def test_log_streaming(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True,
+ follow=False)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
+ def test_log_following(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
+ follow=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=False
+ )
+
+ def test_log_following_backwards(self):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True)
@@ -1132,12 +1162,27 @@ class ContainerTest(DockerClientTest):
stream=True
)
+ def test_log_streaming_and_following(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True,
+ follow=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
def test_log_tail(self):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- tail=10)
+ follow=False, tail=10)
fake_request.assert_called_with(
'GET',
@@ -1153,7 +1198,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- since=ts)
+ follow=False, since=ts)
fake_request.assert_called_with(
'GET',
@@ -1170,7 +1215,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- since=time)
+ follow=False, since=time)
fake_request.assert_called_with(
'GET',
@@ -1188,7 +1233,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client._stream_raw_result',
m):
self.client.logs(fake_api.FAKE_CONTAINER_ID,
- stream=True)
+ follow=True, stream=True)
self.assertTrue(m.called)
fake_request.assert_called_with(
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/docker/docker-py.git@c3a66cc5999a5435b81769ac758d411d34c995c4#egg=docker_py
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.5.3
six==1.17.0
tomli==2.2.1
websocket_client==0.32.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.5.3
- six==1.17.0
- tomli==2.2.1
- websocket-client==0.32.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/container_test.py::ContainerTest::test_log_following",
"tests/unit/container_test.py::ContainerTest::test_log_since",
"tests/unit/container_test.py::ContainerTest::test_log_since_with_datetime",
"tests/unit/container_test.py::ContainerTest::test_log_streaming",
"tests/unit/container_test.py::ContainerTest::test_log_streaming_and_following",
"tests/unit/container_test.py::ContainerTest::test_log_tail",
"tests/unit/container_test.py::ContainerTest::test_log_tty"
]
| []
| [
"tests/unit/container_test.py::StartContainerTest::test_start_container",
"tests/unit/container_test.py::StartContainerTest::test_start_container_none",
"tests/unit/container_test.py::StartContainerTest::test_start_container_privileged",
"tests/unit/container_test.py::StartContainerTest::test_start_container_regression_573",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_ro",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_rw",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_dict_instead_of_id",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_links",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_links_as_list_of_tuples",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf_compat",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_multiple_links",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_port_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_empty_volumes_from",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_privileged",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_added_capabilities",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_list",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode_and_ro_error",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_ro",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_rw",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cgroup_parent",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpu_shares",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpuset",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_devices",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_dropped_capabilities",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_entrypoint",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_dict",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_list",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links_as_list_of_tuples",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf_compat",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mac_address",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_int",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_g_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_k_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_m_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_wrong_value",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_multiple_links",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_named_volume",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_port_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_ports",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_restart_policy",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stdin_open",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stop_signal",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volume_string",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volumes_from",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_working_dir",
"tests/unit/container_test.py::CreateContainerTest::test_create_named_container",
"tests/unit/container_test.py::ContainerTest::test_container_stats",
"tests/unit/container_test.py::ContainerTest::test_container_top",
"tests/unit/container_test.py::ContainerTest::test_container_top_with_psargs",
"tests/unit/container_test.py::ContainerTest::test_diff",
"tests/unit/container_test.py::ContainerTest::test_diff_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_export",
"tests/unit/container_test.py::ContainerTest::test_export_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_inspect_container",
"tests/unit/container_test.py::ContainerTest::test_inspect_container_undefined_id",
"tests/unit/container_test.py::ContainerTest::test_kill_container",
"tests/unit/container_test.py::ContainerTest::test_kill_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_kill_container_with_signal",
"tests/unit/container_test.py::ContainerTest::test_list_containers",
"tests/unit/container_test.py::ContainerTest::test_log_following_backwards",
"tests/unit/container_test.py::ContainerTest::test_logs",
"tests/unit/container_test.py::ContainerTest::test_logs_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_pause_container",
"tests/unit/container_test.py::ContainerTest::test_port",
"tests/unit/container_test.py::ContainerTest::test_remove_container",
"tests/unit/container_test.py::ContainerTest::test_remove_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_rename_container",
"tests/unit/container_test.py::ContainerTest::test_resize_container",
"tests/unit/container_test.py::ContainerTest::test_restart_container",
"tests/unit/container_test.py::ContainerTest::test_restart_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_stop_container",
"tests/unit/container_test.py::ContainerTest::test_stop_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_unpause_container",
"tests/unit/container_test.py::ContainerTest::test_wait",
"tests/unit/container_test.py::ContainerTest::test_wait_with_dict_instead_of_id"
]
| []
| Apache License 2.0 | 433 | [
"docs/api.md",
"docker/api/container.py"
]
| [
"docs/api.md",
"docker/api/container.py"
]
|
harlowja__fasteners-23 | 8b63aafd5a9cde3e506810b5df52174d016edd2d | 2016-02-15 22:24:40 | 8b63aafd5a9cde3e506810b5df52174d016edd2d | diff --git a/fasteners/process_lock.py b/fasteners/process_lock.py
index b5b7405..72e4f4d 100644
--- a/fasteners/process_lock.py
+++ b/fasteners/process_lock.py
@@ -214,30 +214,44 @@ class _InterProcessLock(object):
return os.path.exists(self.path)
def trylock(self):
- raise NotImplementedError()
+ self._trylock(self.lockfile)
def unlock(self):
+ self._unlock(self.lockfile)
+
+ @staticmethod
+ def _trylock():
+ raise NotImplementedError()
+
+ @staticmethod
+ def _unlock():
raise NotImplementedError()
class _WindowsLock(_InterProcessLock):
"""Interprocess lock implementation that works on windows systems."""
- def trylock(self):
- msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
+ @staticmethod
+ def _trylock(lockfile):
+ fileno = lockfile.fileno()
+ msvcrt.locking(fileno, msvcrt.LK_NBLCK, 1)
- def unlock(self):
- msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
+ @staticmethod
+ def _unlock(lockfile):
+ fileno = lockfile.fileno()
+ msvcrt.locking(fileno, msvcrt.LK_UNLCK, 1)
class _FcntlLock(_InterProcessLock):
"""Interprocess lock implementation that works on posix systems."""
- def trylock(self):
- fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ @staticmethod
+ def _trylock(lockfile):
+ fcntl.lockf(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
- def unlock(self):
- fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
+ @staticmethod
+ def _unlock(lockfile):
+ fcntl.lockf(lockfile, fcntl.LOCK_UN)
if os.name == 'nt':
| Process Lock tests assume POSIX
The `test_process_lock` module assumes we're on a POSIX system.
It *looks* like it can be fixed pretty easily, but I really don't know much about the details of locking on various platforms.
Here we import `fcntl`: https://github.com/harlowja/fasteners/blob/master/fasteners/tests/test_process_lock.py#L19
And here we use it: https://github.com/harlowja/fasteners/blob/master/fasteners/tests/test_process_lock.py#L127
And here it looks like we could be using this instead: https://github.com/harlowja/fasteners/blob/master/fasteners/process_lock.py#L227 | harlowja/fasteners | diff --git a/fasteners/tests/test_process_lock.py b/fasteners/tests/test_process_lock.py
index 9e96589..d31632d 100644
--- a/fasteners/tests/test_process_lock.py
+++ b/fasteners/tests/test_process_lock.py
@@ -15,12 +15,12 @@
# License for the specific language governing permissions and limitations
# under the License.
+import contextlib
import errno
-import fcntl
import multiprocessing
import os
import shutil
-import signal
+import sys
import tempfile
import threading
import time
@@ -28,6 +28,8 @@ import time
from fasteners import process_lock as pl
from fasteners import test
+WIN32 = os.name == 'nt'
+
class BrokenLock(pl.InterProcessLock):
def __init__(self, name, errno_code):
@@ -43,6 +45,87 @@ class BrokenLock(pl.InterProcessLock):
raise err
[email protected]
+def scoped_child_processes(children, timeout=0.1, exitcode=0):
+ for child in children:
+ child.daemon = True
+ child.start()
+ yield
+ start = time.time()
+ timed_out = 0
+
+ for child in children:
+ child.join(max(timeout - (time.time() - start), 0))
+ if child.is_alive():
+ timed_out += 1
+ child.terminate()
+
+ if timed_out:
+ msg = "{} child processes killed due to timeout\n".format(timed_out)
+ sys.stderr.write(msg)
+
+ if exitcode is not None:
+ for child in children:
+ c_code = child.exitcode
+ msg = "Child exitcode {} != {}"
+ assert c_code == exitcode, msg.format(c_code, exitcode)
+
+
+def try_lock(lock_file):
+ try:
+ my_lock = pl.InterProcessLock(lock_file)
+ my_lock.lockfile = open(lock_file, 'w')
+ my_lock.trylock()
+ my_lock.unlock()
+ os._exit(1)
+ except IOError:
+ os._exit(0)
+
+
+def lock_files(lock_path, handles_dir, num_handles=50):
+ with pl.InterProcessLock(lock_path):
+
+ # Open some files we can use for locking
+ handles = []
+ for n in range(num_handles):
+ path = os.path.join(handles_dir, ('file-%s' % n))
+ handles.append(open(path, 'w'))
+
+ # Loop over all the handles and try locking the file
+ # without blocking, keep a count of how many files we
+ # were able to lock and then unlock. If the lock fails
+ # we get an IOError and bail out with bad exit code
+ count = 0
+ for handle in handles:
+ try:
+ pl.InterProcessLock._trylock(handle)
+ count += 1
+ pl.InterProcessLock._unlock(handle)
+ except IOError:
+ os._exit(2)
+ finally:
+ handle.close()
+
+ # Check if we were able to open all files
+ if count != num_handles:
+ raise AssertionError("Unable to open all handles")
+
+
+def inter_processlock_helper(lockname, lock_filename, pipe):
+ lock2 = pl.InterProcessLock(lockname)
+ lock2.lockfile = open(lock_filename, 'w')
+ have_lock = False
+ while not have_lock:
+ try:
+ lock2.trylock()
+ have_lock = True
+ except IOError:
+ pass
+ # Hold the lock and wait for the parent
+ pipe.send(None)
+ pipe.recv()
+
+
class ProcessLockTest(test.TestCase):
def setUp(self):
super(ProcessLockTest, self).setUp()
@@ -59,27 +142,13 @@ class ProcessLockTest(test.TestCase):
lock_file = os.path.join(self.lock_dir, 'lock')
lock = pl.InterProcessLock(lock_file)
- def try_lock():
- try:
- my_lock = pl.InterProcessLock(lock_file)
- my_lock.lockfile = open(lock_file, 'w')
- my_lock.trylock()
- my_lock.unlock()
- os._exit(1)
- except IOError:
- os._exit(0)
-
def attempt_acquire(count):
- children = []
- for i in range(count):
- child = multiprocessing.Process(target=try_lock)
- child.start()
- children.append(child)
- exit_codes = []
- for child in children:
- child.join()
- exit_codes.append(child.exitcode)
- return sum(exit_codes)
+ children = [
+ multiprocessing.Process(target=try_lock, args=(lock_file,))
+ for i in range(count)]
+ with scoped_child_processes(children, timeout=10, exitcode=None):
+ pass
+ return sum(c.exitcode for c in children)
self.assertTrue(lock.acquire())
try:
@@ -108,49 +177,17 @@ class ProcessLockTest(test.TestCase):
def _do_test_lock_externally(self, lock_dir):
lock_path = os.path.join(lock_dir, "lock")
- def lock_files(handles_dir):
- with pl.InterProcessLock(lock_path):
-
- # Open some files we can use for locking
- handles = []
- for n in range(50):
- path = os.path.join(handles_dir, ('file-%s' % n))
- handles.append(open(path, 'w'))
-
- # Loop over all the handles and try locking the file
- # without blocking, keep a count of how many files we
- # were able to lock and then unlock. If the lock fails
- # we get an IOError and bail out with bad exit code
- count = 0
- for handle in handles:
- try:
- fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
- count += 1
- fcntl.flock(handle, fcntl.LOCK_UN)
- except IOError:
- os._exit(2)
- finally:
- handle.close()
-
- # Check if we were able to open all files
- self.assertEqual(50, count)
-
handles_dir = tempfile.mkdtemp()
self.tmp_dirs.append(handles_dir)
- children = []
- for n in range(50):
- pid = os.fork()
- if pid:
- children.append(pid)
- else:
- try:
- lock_files(handles_dir)
- finally:
- os._exit(0)
- for child in children:
- (pid, status) = os.waitpid(child, 0)
- if pid:
- self.assertEqual(0, status)
+
+ num_handles = 50
+ num_processes = 50
+ args = [lock_path, handles_dir, num_handles]
+ children = [multiprocessing.Process(target=lock_files, args=args)
+ for _ in range(num_processes)]
+
+ with scoped_child_processes(children, timeout=30, exitcode=0):
+ pass
def test_lock_externally(self):
self._do_test_lock_externally(self.lock_dir)
@@ -180,16 +217,20 @@ class ProcessLockTest(test.TestCase):
def test_interprocess_lock(self):
lock_file = os.path.join(self.lock_dir, 'lock')
+ lock_name = 'foo'
+
+ child_pipe, them = multiprocessing.Pipe()
+ child = multiprocessing.Process(
+ target=inter_processlock_helper, args=(lock_name, lock_file, them))
+
+ with scoped_child_processes((child,)):
- pid = os.fork()
- if pid:
# Make sure the child grabs the lock first
+ if not child_pipe.poll(5):
+ self.fail('Timed out waiting for child to grab lock')
+
start = time.time()
- while not os.path.exists(lock_file):
- if time.time() - start > 5:
- self.fail('Timed out waiting for child to grab lock')
- time.sleep(0)
- lock1 = pl.InterProcessLock('foo')
+ lock1 = pl.InterProcessLock(lock_name)
lock1.lockfile = open(lock_file, 'w')
# NOTE(bnemec): There is a brief window between when the lock file
# is created and when it actually becomes locked. If we happen to
@@ -206,26 +247,10 @@ class ProcessLockTest(test.TestCase):
break
else:
self.fail('Never caught expected lock exception')
- # We don't need to wait for the full sleep in the child here
- os.kill(pid, signal.SIGKILL)
- else:
- try:
- lock2 = pl.InterProcessLock('foo')
- lock2.lockfile = open(lock_file, 'w')
- have_lock = False
- while not have_lock:
- try:
- lock2.trylock()
- have_lock = True
- except IOError:
- pass
- finally:
- # NOTE(bnemec): This is racy, but I don't want to add any
- # synchronization primitives that might mask a problem
- # with the one we're trying to test here.
- time.sleep(.5)
- os._exit(0)
+ child_pipe.send(None)
+
+ @test.testtools.skipIf(WIN32, "Windows cannot open file handles twice")
def test_non_destructive(self):
lock_file = os.path.join(self.lock_dir, 'not-destroyed')
with open(lock_file, 'w') as f:
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"testtools",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
-e git+https://github.com/harlowja/fasteners.git@8b63aafd5a9cde3e506810b5df52174d016edd2d#egg=fasteners
fixtures==4.0.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pbr==6.1.1
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
six==1.17.0
testtools==2.6.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: fasteners
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- fixtures==4.0.1
- monotonic==1.6
- nose==1.3.7
- pbr==6.1.1
- six==1.17.0
- testtools==2.6.0
prefix: /opt/conda/envs/fasteners
| [
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_externally",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_externally_lock_dir_not_exist"
]
| []
| [
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_bad_acquire",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_bad_release",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_interprocess_lock",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_acquire_release_file_lock",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_file_exists",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_nested_synchronized_external_works",
"fasteners/tests/test_process_lock.py::ProcessLockTest::test_non_destructive"
]
| []
| Apache License 2.0 | 434 | [
"fasteners/process_lock.py"
]
| [
"fasteners/process_lock.py"
]
|
|
nose-devs__nose2-268 | bbf5897eb1aa224100e86ba594042e4399fd2f5f | 2016-02-16 09:53:50 | bbf5897eb1aa224100e86ba594042e4399fd2f5f | little-dude: It's still failing when using the `uses` decorator. Added a test for this. | diff --git a/nose2/suite.py b/nose2/suite.py
index b52e0cb..f107489 100644
--- a/nose2/suite.py
+++ b/nose2/suite.py
@@ -22,6 +22,7 @@ class LayerSuite(unittest.BaseTestSuite):
self.wasSetup = False
def run(self, result):
+ self.handle_previous_test_teardown(result)
if not self._safeMethodCall(self.setUp, result):
return
try:
@@ -37,6 +38,21 @@ class LayerSuite(unittest.BaseTestSuite):
if self.wasSetup:
self._safeMethodCall(self.tearDown, result)
+ def handle_previous_test_teardown(self, result):
+ try:
+ prev = result._previousTestClass
+ except AttributeError:
+ return
+ layer_attr = getattr(prev, 'layer', None)
+ if isinstance(layer_attr, LayerSuite):
+ return
+ try:
+ suite_obj = unittest.suite.TestSuite()
+ suite_obj._tearDownPreviousClass(None, result)
+ suite_obj._handleModuleTearDown(result)
+ finally:
+ delattr(result, '_previousTestClass')
+
def setUp(self):
if self.layer is None:
return
| such and normal test cases: mixed up call order of it.has_setup and setUpClass
As it is a little bit complicated to explain, I have created a working example as a gist: https://gist.github.com/jrast/109f70f9b4c52bab4252
I have several "normal" UnitTestCases and a few such tests. The problem is that the setUpClass calls are mixed up with the setup calls in such tests, as can be seen in the log file. The documentation states that I can not use Layer and setUpClass functions in the same TestCase, but it does not state that I cant use them side by side wich is the case in my example. So the question is: is this a bug or is there a missing hint in the documentation.
Either way I think this is a dangerous behaviour because test fixtures made in a setup call can be modified from outside of the current test case wich can lead to very unpredictable behaviour and wrong test results. One example of wrong behaviour is shown with the UniqueResource class wich is locked in the setup functions and unlocked in the teardown functions. As soon as the calls get mixed up a exception is thrown because the resource is already locked.
I realy hope this is not just a missing hint/warning in the docs, otherwise I have to rethink about how to structure my tests... | nose-devs/nose2 | diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/__init__.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py
new file mode 100644
index 0000000..ae24633
--- /dev/null
+++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py
@@ -0,0 +1,60 @@
+import unittest
+import logging
+log = logging.getLogger(__name__)
+
+
+class UniqueResource(object):
+ _instance = None
+ used = False
+
+ def __new__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super(UniqueResource, cls).__new__(
+ cls, *args, **kwargs)
+ return cls._instance
+
+ def lock(self):
+ if not self.used:
+ self.used = True
+ else:
+ raise Exception("Resource allready used")
+
+ def unlock(self):
+ if self.used:
+ self.used = False
+ else:
+ raise Exception("Resource already unlocked")
+
+
+class NormalTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ log.info("Called setUpClass in NormalTest")
+ cls.unique_resource = UniqueResource()
+ cls.unique_resource.lock()
+
+ @classmethod
+ def tearDownClass(cls):
+ log.info("Called tearDownClass in NormalTest")
+ cls.unique_resource.unlock()
+
+ def test(self):
+ self.assertTrue(self.unique_resource.used)
+
+
+class NormalTestTwo(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ log.info("Called setUpClass in NormalTestTwo")
+ cls.unique_resource = UniqueResource()
+ cls.unique_resource.lock()
+
+ @classmethod
+ def tearDownClass(cls):
+ log.info("Called tearDownClass in NormalTestTwo")
+ cls.unique_resource.unlock()
+
+ def test(self):
+ self.assertTrue(self.unique_resource.used)
diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py
new file mode 100644
index 0000000..4fb9c8c
--- /dev/null
+++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py
@@ -0,0 +1,67 @@
+import unittest
+import logging
+from .common import UniqueResource, NormalTest, NormalTestTwo
+log = logging.getLogger(__name__)
+
+
+class Layer1(object):
+
+ @classmethod
+ def setUp(cls):
+ log.info("Called setup in layer 1")
+ cls.unique_resource = UniqueResource()
+ cls.unique_resource.lock()
+
+ @classmethod
+ def tearDown(cls):
+ log.info("Called teardown in layer 2")
+ cls.unique_resource.unlock()
+
+
+class Layer2(object):
+
+ @classmethod
+ def setUp(cls):
+ log.info("Called setup in layer 2")
+ cls.unique_resource = UniqueResource()
+ cls.unique_resource.lock()
+
+ @classmethod
+ def tearDown(cls):
+ log.info("Called teardown in layer 2")
+ cls.unique_resource.unlock()
+
+
+class Layer3(Layer2):
+
+ @classmethod
+ def setUp(cls):
+ log.info("Called setup in layer 3")
+
+ @classmethod
+ def tearDown(cls):
+ log.info("Called teardown in layer 3")
+
+
+class LayerTest1(unittest.TestCase):
+
+ layer = Layer1
+
+ def test(self):
+ self.assertTrue(self.layer.unique_resource.used)
+
+
+class LayerTest2(unittest.TestCase):
+
+ layer = Layer2
+
+ def test(self):
+ self.assertTrue(self.layer.unique_resource.used)
+
+
+class LayerTest3(unittest.TestCase):
+
+ layer = Layer2
+
+ def test(self):
+ self.assertTrue(self.layer.unique_resource.used)
diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py
new file mode 100644
index 0000000..14268fe
--- /dev/null
+++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py
@@ -0,0 +1,24 @@
+from nose2.tools import such
+import logging
+from .common import UniqueResource, NormalTest, NormalTestTwo
+log = logging.getLogger(__name__)
+
+
+with such.A('system with setup') as it:
+
+ @it.has_setup
+ def setup():
+ log.info("Called setup in such test")
+ it.unique_resource = UniqueResource()
+ it.unique_resource.lock()
+
+ @it.has_teardown
+ def teardown():
+ log.info("Called teardown in such test")
+ it.unique_resource.unlock()
+
+ @it.should('do something')
+ def test(case):
+ it.assertTrue(it.unique_resource.used)
+
+it.createTests(globals())
diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py
new file mode 100644
index 0000000..ee15921
--- /dev/null
+++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py
@@ -0,0 +1,51 @@
+from nose2.tools import such
+import logging
+from .common import UniqueResource, NormalTest, NormalTestTwo
+log = logging.getLogger(__name__)
+
+
+class Layer1(object):
+
+ description = 'Layer1'
+
+ @classmethod
+ def setUp(cls):
+ log.info("Called setup in layer 1")
+ it.unique_resource = UniqueResource()
+ it.unique_resource.lock()
+
+ @classmethod
+ def tearDown(cls):
+ log.info("Called teardown in layer 2")
+ it.unique_resource.unlock()
+
+
+class Layer2(object):
+
+ description = 'Layer2'
+
+ @classmethod
+ def setUp(cls):
+ log.info("Called setup in layer 2")
+
+ @classmethod
+ def tearDown(cls):
+ log.info("Called teardown in layer 2")
+
+with such.A('system with setup') as it:
+
+ it.uses(Layer1)
+
+ @it.should('do something')
+ def test(case):
+ it.assertTrue(it.unique_resource.used)
+
+ with it.having('another setup'):
+
+ it.uses(Layer2)
+
+ @it.should('do something else')
+ def test(case):
+ it.assertTrue(it.unique_resource.used)
+
+it.createTests(globals())
diff --git a/nose2/tests/functional/test_layers_plugin.py b/nose2/tests/functional/test_layers_plugin.py
index e072854..9666dba 100644
--- a/nose2/tests/functional/test_layers_plugin.py
+++ b/nose2/tests/functional/test_layers_plugin.py
@@ -120,3 +120,14 @@ Base
self.assertTestRunOutputMatches(proc, stderr='ERROR: LayerSuite')
self.assertTestRunOutputMatches(proc, stderr='FAIL')
self.assertTestRunOutputMatches(proc, stderr='Bad Error in Layer setUp!')
+
+ def test_layers_and_non_layers(self):
+ proc = self.runIn(
+ 'scenario/',
+ 'layers_and_non_layers',
+ '-v',
+ '--plugin=nose2.plugins.layers',
+ )
+ self.assertTestRunOutputMatches(proc, stderr='Ran 12 tests in')
+ self.assertTestRunOutputMatches(proc, stderr='OK')
+ self.assertEqual(proc.poll(), 0)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose2",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cov-core==1.15.0
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/nose-devs/nose2.git@bbf5897eb1aa224100e86ba594042e4399fd2f5f#egg=nose2
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
six==1.17.0
tomli==2.2.1
| name: nose2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cov-core==1.15.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/nose2
| [
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layers_and_non_layers"
]
| [
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest1::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest2::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest3::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::A",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::Layer1",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::A",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::having"
]
| [
"nose2/tests/functional/support/scenario/layers_and_non_layers/common.py::NormalTest::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/common.py::NormalTestTwo::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::NormalTest::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::NormalTestTwo::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::NormalTest::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::NormalTestTwo::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::NormalTest::test",
"nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::NormalTestTwo::test",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layer_reporter_error_output",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layer_reporter_output",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layers_and_attributes",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_methods_run_once_per_class",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_runs_layer_fixtures",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_scenario_fails_without_plugin",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_setup_fail",
"nose2/tests/functional/test_layers_plugin.py::TestLayers::test_teardown_fail"
]
| []
| BSD | 435 | [
"nose2/suite.py"
]
| [
"nose2/suite.py"
]
|
nose-devs__nose2-269 | bbf5897eb1aa224100e86ba594042e4399fd2f5f | 2016-02-16 12:45:43 | bbf5897eb1aa224100e86ba594042e4399fd2f5f | little-dude: I need to add a test for this before merging.
@slemesle let me know if you want to be the author of this commit, since you proposed the fix. | diff --git a/nose2/plugins/junitxml.py b/nose2/plugins/junitxml.py
index c90450e..e61a08b 100644
--- a/nose2/plugins/junitxml.py
+++ b/nose2/plugins/junitxml.py
@@ -4,15 +4,71 @@ Output test reports in junit-xml format.
This plugin implements :func:`startTest`, :func:`testOutcome` and
:func:`stopTestRun` to compile and then output a test report in
junit-xml format. By default, the report is written to a file called
-``nose2-junit.xml`` in the current working directory.
+``nose2-junit.xml`` in the current working directory.
You can configure the output filename by setting ``path`` in a ``[junit-xml]``
section in a config file. Unicode characters which are invalid in XML 1.0
-are replaced with the ``U+FFFD`` replacement character. In the case that your
-software throws an error with an invalid byte string.
+are replaced with the ``U+FFFD`` replacement character. In the case that your
+software throws an error with an invalid byte string.
-By default, the ranges of discouraged characters are replaced as well. This can be
-changed by setting the ``keep_restricted`` configuration variable to ``True``.
+By default, the ranges of discouraged characters are replaced as well. This can
+be changed by setting the ``keep_restricted`` configuration variable to
+``True``.
+
+By default, the arguments of parametrized and generated tests are not printed.
+For instance, the following code:
+
+.. code-block:: python
+
+ # a.py
+
+ from nose2 import tools
+
+ def test_gen():
+ def check(a, b):
+ assert a == b, '{}!={}'.format(a,b)
+
+ yield check, 99, 99
+ yield check, -1, -1
+
+ @tools.params('foo', 'bar')
+ def test_params(arg):
+ assert arg in ['foo', 'bar', 'baz']
+
+Produces this XML by default:
+
+.. code-block:: xml
+
+ <testcase classname="a" name="test_gen:1" time="0.000171">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_gen:2" time="0.000202">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_params:1" time="0.000159">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_params:2" time="0.000163">
+ <system-out />
+ </testcase>
+
+But if ``test_fullname`` is ``True``, then the following XML is
+produced:
+
+.. code-block:: xml
+
+ <testcase classname="a" name="test_gen:1 (99, 99)" time="0.000213">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_gen:2 (-1, -1)" time="0.000194">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_params:1 ('foo')" time="0.000178">
+ <system-out />
+ </testcase>
+ <testcase classname="a" name="test_params:2 ('bar')" time="0.000187">
+ <system-out />
+ </testcase>
"""
# Based on unittest2/plugins/junitxml.py,
@@ -39,10 +95,12 @@ class JUnitXmlReporter(events.Plugin):
def __init__(self):
self.path = os.path.realpath(
self.config.as_str('path', default='nose2-junit.xml'))
- self.keep_restricted = self.config.as_bool('keep_restricted',
- default=False)
- self.test_properties = self.config.as_str('test_properties',
- default=None)
+ self.keep_restricted = self.config.as_bool(
+ 'keep_restricted', default=False)
+ self.test_properties = self.config.as_str(
+ 'test_properties', default=None)
+ self.test_fullname = self.config.as_bool(
+ 'test_fullname', default=False)
if self.test_properties is not None:
self.test_properties_path = os.path.realpath(self.test_properties)
self.errors = 0
@@ -60,11 +118,15 @@ class JUnitXmlReporter(events.Plugin):
def testOutcome(self, event):
"""Add test outcome to xml tree"""
test = event.test
- testid = test.id().split('\n')[0]
- # split into module, class, method parts... somehow
+ testid_lines = test.id().split('\n')
+ testid = testid_lines[0]
parts = testid.split('.')
classname = '.'.join(parts[:-1])
method = parts[-1]
+ # for generated test cases
+ if len(testid_lines) > 1 and self.test_fullname:
+ test_args = ':'.join(testid_lines[1:])
+ method = '%s (%s)' % (method, test_args)
testcase = ET.SubElement(self.tree, 'testcase')
testcase.set('time', "%.6f" % self._time())
| junit-xml does not output generated tests parameters
I use generated tests with meaningfull parameters and I wish I could see them exported in junit-xml report.
The patch looks pretty simple (junitxml.py):
```python
def testOutcome(self, event):
"""Add test outcome to xml tree"""
test = event.test
testid_lines = test.id().split('\n')
testid = testid_lines[0]
# split into module, class, method parts... somehow
parts = testid.split('.')
classname = '.'.join(parts[:-1])
method = parts[-1]
if len(testid_lines) > 1:
method = '%s:' % method + ':'.join(testid_lines[1:])
```
With this one we can have all parameters as requested.
| nose-devs/nose2 | diff --git a/nose2/tests/unit/test_junitxml.py b/nose2/tests/unit/test_junitxml.py
index 6ffef43..706daa2 100644
--- a/nose2/tests/unit/test_junitxml.py
+++ b/nose2/tests/unit/test_junitxml.py
@@ -186,6 +186,20 @@ class TestJunitXmlPlugin(TestCase):
self.assertEqual(xml[0].get('name'), 'test_gen:1')
self.assertEqual(xml[1].get('name'), 'test_gen:2')
+ def test_generator_test_full_name_correct(self):
+ gen = generators.Generators(session=self.session)
+ gen.register()
+ self.plugin.test_fullname = True
+ event = events.LoadFromTestCaseEvent(self.loader, self.case)
+ self.session.hooks.loadTestsFromTestCase(event)
+ cases = event.extraTests
+ for case in cases:
+ case(self.result)
+ xml = self.plugin.tree.findall('testcase')
+ self.assertEqual(len(xml), 2)
+ self.assertEqual(xml[0].get('name'), 'test_gen:1 (1, 1)')
+ self.assertEqual(xml[1].get('name'), 'test_gen:2 (1, 2)')
+
def test_params_test_name_correct(self):
# param test loading is a bit more complex than generator
# loading. XXX -- can these be reconciled so they both
@@ -213,6 +227,31 @@ class TestJunitXmlPlugin(TestCase):
self.assertEqual(params[1].get('name'), 'test_params:2')
self.assertEqual(params[2].get('name'), 'test_params:3')
+ def test_params_test_full_name_correct(self):
+ plug1 = parameters.Parameters(session=self.session)
+ plug1.register()
+ plug2 = testcases.TestCaseLoader(session=self.session)
+ plug2.register()
+ # need module to fire top-level event
+
+ class Mod(object):
+ pass
+
+ m = Mod()
+ m.Test = self.case
+ event = events.LoadFromModuleEvent(self.loader, m)
+ self.plugin.test_fullname = True
+ self.session.hooks.loadTestsFromModule(event)
+ for case in event.extraTests:
+ case(self.result)
+ xml = self.plugin.tree.findall('testcase')
+ self.assertEqual(len(xml), 12)
+ params = [x for x in xml if x.get('name').startswith('test_params')]
+ self.assertEqual(len(params), 3)
+ self.assertEqual(params[0].get('name'), 'test_params:1 (1)')
+ self.assertEqual(params[1].get('name'), 'test_params:2 (2)')
+ self.assertEqual(params[2].get('name'), 'test_params:3 (3)')
+
def test_writes_xml_file_at_end(self):
test = self.case('test')
test(self.result)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose2",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
cov-core==1.15.0
coverage==7.2.7
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
-e git+https://github.com/nose-devs/nose2.git@bbf5897eb1aa224100e86ba594042e4399fd2f5f#egg=nose2
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: nose2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cov-core==1.15.0
- coverage==7.2.7
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/nose2
| [
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_full_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_full_name_correct"
]
| []
| [
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b_keep",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_keep",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_includes_traceback",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_failure_includes_traceback",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_skip_includes_skipped",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_success_added_to_xml",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_writes_xml_file_at_end",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_empty_system_err_without_logcapture",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_log_message_in_system_err_with_logcapture",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_file_path_is_not_affected_by_chdir_in_test"
]
| []
| BSD | 436 | [
"nose2/plugins/junitxml.py"
]
| [
"nose2/plugins/junitxml.py"
]
|
scrapy__scrapy-1786 | 41588397c04356f2b0c393b61ed68271a08d6ccd | 2016-02-17 15:59:16 | a975a50558cd78a1573bee2e957afcb419fd1bd6 | diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py
index 4880cc7b9..c667b141d 100644
--- a/scrapy/responsetypes.py
+++ b/scrapy/responsetypes.py
@@ -59,7 +59,8 @@ class ResponseTypes(object):
def from_content_disposition(self, content_disposition):
try:
- filename = to_native_str(content_disposition).split(';')[1].split('=')[1]
+ filename = to_native_str(content_disposition,
+ encoding='latin-1', errors='replace').split(';')[1].split('=')[1]
filename = filename.strip('"\'')
return self.from_filename(filename)
except IndexError:
| PY3: error decoding Content-Disposition header
This request
```
scrapy shell 'http://npe.com.cn/plus/save_to_doc.php?id=1666'
```
raises this error:
```
Traceback (most recent call last):
File "/Users/kmike/envs/dl/bin/scrapy", line 9, in <module>
load_entry_point('Scrapy', 'console_scripts', 'scrapy')()
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 142, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 88, in _run_print_help
func(*a, **kw)
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 149, in _run_command
cmd.run(args, opts)
File "/Users/kmike/svn/scrapy/scrapy/commands/shell.py", line 71, in run
shell.start(url=url)
File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 47, in start
self.fetch(url, spider)
File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 112, in fetch
reactor, self._schedule, request, spider)
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/threads.py", line 122, in blockingCallFromThread
result.raiseException()
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 368, in raiseException
raise self.value.with_traceback(self.tb)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte
```
The error points to a wrong location (similar to #1760); the real traceback is
```
Traceback (most recent call last):
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 1126, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 389, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/Users/kmike/svn/scrapy/scrapy/core/downloader/middleware.py", line 43, in process_request
defer.returnValue((yield download_func(request=request,spider=spider)))
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 588, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/Users/kmike/svn/scrapy/scrapy/core/downloader/handlers/http11.py", line 272, in _cb_bodydone
respcls = responsetypes.from_args(headers=headers, url=url)
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 110, in from_args
cls = self.from_headers(headers)
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 78, in from_headers
cls = self.from_content_disposition(headers[b'Content-Disposition'])
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 62, in from_content_disposition
filename = to_native_str(content_disposition).split(';')[1].split('=')[1]
File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 129, in to_native_str
return to_unicode(text, encoding, errors)
File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 107, in to_unicode
return text.decode(encoding, errors)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte
```
It looks like Content-Disposition is decoded using utf-8, but the encoding was not UTF-8. | scrapy/scrapy | diff --git a/tests/test_responsetypes.py b/tests/test_responsetypes.py
index 2374d518f..118136ac4 100644
--- a/tests/test_responsetypes.py
+++ b/tests/test_responsetypes.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
import unittest
from scrapy.responsetypes import responsetypes
@@ -20,8 +21,14 @@ class ResponseTypesTest(unittest.TestCase):
def test_from_content_disposition(self):
mappings = [
- ('attachment; filename="data.xml"', XmlResponse),
- ('attachment; filename=data.xml', XmlResponse),
+ (b'attachment; filename="data.xml"', XmlResponse),
+ (b'attachment; filename=data.xml', XmlResponse),
+ (u'attachment;filename=data£.tar.gz'.encode('utf-8'), Response),
+ (u'attachment;filename=dataµ.tar.gz'.encode('latin-1'), Response),
+ (u'attachment;filename=data高.doc'.encode('gbk'), Response),
+ (u'attachment;filename=دورهdata.html'.encode('cp720'), HtmlResponse),
+ (u'attachment;filename=日本語版Wikipedia.xml'.encode('iso2022_jp'), XmlResponse),
+
]
for source, cls in mappings:
retcls = responsetypes.from_content_disposition(source)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@41588397c04356f2b0c393b61ed68271a08d6ccd#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_responsetypes.py::ResponseTypesTest::test_from_content_disposition"
]
| []
| [
"tests/test_responsetypes.py::ResponseTypesTest::test_custom_mime_types_loaded",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_args",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_body",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_content_type",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_filename",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_headers"
]
| []
| BSD 3-Clause "New" or "Revised" License | 438 | [
"scrapy/responsetypes.py"
]
| [
"scrapy/responsetypes.py"
]
|
|
zalando-stups__senza-178 | 6ef4b11c9246de1010b1c62176a79c61178f0a9b | 2016-02-18 12:36:42 | 35b73f49b8cb58e7892908413bdf2a61cfe3058e | diff --git a/senza/components/auto_scaling_group.py b/senza/components/auto_scaling_group.py
index 39fb634..c3c58c2 100644
--- a/senza/components/auto_scaling_group.py
+++ b/senza/components/auto_scaling_group.py
@@ -145,13 +145,14 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a
asg_properties["MinSize"] = as_conf["Minimum"]
asg_properties["DesiredCapacity"] = max(int(as_conf["Minimum"]), int(as_conf.get('DesiredCapacity', 1)))
+ scaling_adjustment = int(as_conf.get("ScalingAdjustment", 1))
# ScaleUp policy
definition["Resources"][asg_name + "ScaleUp"] = {
"Type": "AWS::AutoScaling::ScalingPolicy",
"Properties": {
"AdjustmentType": "ChangeInCapacity",
- "ScalingAdjustment": "1",
- "Cooldown": "60",
+ "ScalingAdjustment": str(scaling_adjustment),
+ "Cooldown": str(as_conf.get("Cooldown", "60")),
"AutoScalingGroupName": {
"Ref": asg_name
}
@@ -163,8 +164,8 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a
"Type": "AWS::AutoScaling::ScalingPolicy",
"Properties": {
"AdjustmentType": "ChangeInCapacity",
- "ScalingAdjustment": "-1",
- "Cooldown": "60",
+ "ScalingAdjustment": str((-1) * scaling_adjustment),
+ "Cooldown": str(as_conf.get("Cooldown", "60")),
"AutoScalingGroupName": {
"Ref": asg_name
}
@@ -295,15 +296,18 @@ def metric_network(asg_name, definition, configuration, args, info, force):
def metric_cpu(asg_name, definition, configuration, args, info, force):
+ period = int(configuration.get("Period", 300))
+ evaluation_periods = int(configuration.get("EvaluationPeriods", 2))
+ statistic = configuration.get("Statistic", "Average")
if "ScaleUpThreshold" in configuration:
definition["Resources"][asg_name + "CPUAlarmHigh"] = {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"MetricName": "CPUUtilization",
"Namespace": "AWS/EC2",
- "Period": "300",
- "EvaluationPeriods": "2",
- "Statistic": "Average",
+ "Period": str(period),
+ "EvaluationPeriods": str(evaluation_periods),
+ "Statistic": statistic,
"Threshold": configuration["ScaleUpThreshold"],
"ComparisonOperator": "GreaterThanThreshold",
"Dimensions": [
@@ -312,7 +316,11 @@ def metric_cpu(asg_name, definition, configuration, args, info, force):
"Value": {"Ref": asg_name}
}
],
- "AlarmDescription": "Scale-up if CPU > {0}% for 10 minutes".format(configuration["ScaleUpThreshold"]),
+ "AlarmDescription": "Scale-up if CPU > {}% for {} minutes ({})".format(
+ configuration["ScaleUpThreshold"],
+ (period / 60) * evaluation_periods,
+ statistic
+ ),
"AlarmActions": [
{"Ref": asg_name + "ScaleUp"}
]
@@ -325,9 +333,9 @@ def metric_cpu(asg_name, definition, configuration, args, info, force):
"Properties": {
"MetricName": "CPUUtilization",
"Namespace": "AWS/EC2",
- "Period": "300",
- "EvaluationPeriods": "2",
- "Statistic": "Average",
+ "Period": str(period),
+ "EvaluationPeriods": str(evaluation_periods),
+ "Statistic": statistic,
"Threshold": configuration["ScaleDownThreshold"],
"ComparisonOperator": "LessThanThreshold",
"Dimensions": [
@@ -336,8 +344,11 @@ def metric_cpu(asg_name, definition, configuration, args, info, force):
"Value": {"Ref": asg_name}
}
],
- "AlarmDescription": "Scale-down if CPU < {0}% for 10 minutes".format(
- configuration["ScaleDownThreshold"]),
+ "AlarmDescription": "Scale-down if CPU < {}% for {} minutes ({})".format(
+ configuration["ScaleDownThreshold"],
+ (period / 60) * evaluation_periods,
+ statistic
+ ),
"AlarmActions": [
{"Ref": asg_name + "ScaleDown"}
]
| Make some scaling properties configurable
* Period
* Cooldown
* Scaling adjustment | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 1e14885..beb6f5d 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -396,6 +396,59 @@ def test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dic
assert expected_user_data == generate_user_data(configuration)
+def test_component_auto_scaling_group_configurable_properties():
+ definition = {"Resources": {}}
+ configuration = {
+ 'Name': 'Foo',
+ 'InstanceType': 't2.micro',
+ 'Image': 'foo',
+ 'AutoScaling': {
+ 'Minimum': 2,
+ 'Maximum': 10,
+ 'MetricType': 'CPU',
+ 'Period': 60,
+ 'ScaleUpThreshold': 50,
+ 'ScaleDownThreshold': 20,
+ 'EvaluationPeriods': 1,
+ 'Cooldown': 30,
+ 'Statistic': 'Maximum'
+ }
+ }
+
+ args = MagicMock()
+ args.region = "foo"
+
+ info = {
+ 'StackName': 'FooStack',
+ 'StackVersion': 'FooVersion'
+ }
+
+ result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock())
+
+ assert result["Resources"]["FooScaleUp"] is not None
+ assert result["Resources"]["FooScaleUp"]["Properties"] is not None
+ assert result["Resources"]["FooScaleUp"]["Properties"]["ScalingAdjustment"] == "1"
+ assert result["Resources"]["FooScaleUp"]["Properties"]["Cooldown"] == "30"
+
+ assert result["Resources"]["FooScaleDown"] is not None
+ assert result["Resources"]["FooScaleDown"]["Properties"] is not None
+ assert result["Resources"]["FooScaleDown"]["Properties"]["Cooldown"] == "30"
+ assert result["Resources"]["FooScaleDown"]["Properties"]["ScalingAdjustment"] == "-1"
+
+ assert result["Resources"]["Foo"] is not None
+ assert result["Resources"]["Foo"]["Properties"] is not None
+ assert result["Resources"]["Foo"]["Properties"]["HealthCheckType"] == "EC2"
+ assert result["Resources"]["Foo"]["Properties"]["MinSize"] == 2
+ assert result["Resources"]["Foo"]["Properties"]["DesiredCapacity"] == 2
+ assert result["Resources"]["Foo"]["Properties"]["MaxSize"] == 10
+
+ expected_desc = "Scale-down if CPU < 20% for 1.0 minutes (Maximum)"
+ assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["Statistic"] == "Maximum"
+ assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["Period"] == "60"
+ assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["EvaluationPeriods"] == "1"
+ assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["AlarmDescription"] == expected_desc
+
+
def test_component_auto_scaling_group_metric_type():
definition = {"Resources": {}}
configuration = {
@@ -410,7 +463,7 @@ def test_component_auto_scaling_group_metric_type():
'EvaluationPeriods': 10,
'ScaleUpThreshold': '50 TB',
'ScaleDownThreshold': '10',
- 'Statistic': 'Maximum',
+ 'Statistic': 'Maximum'
}
}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@6ef4b11c9246de1010b1c62176a79c61178f0a9b#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties"
]
| []
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_normalize_network_threshold"
]
| []
| Apache License 2.0 | 439 | [
"senza/components/auto_scaling_group.py"
]
| [
"senza/components/auto_scaling_group.py"
]
|
|
scieloorg__xylose-91 | 6c03b8cee93a3ef4a1f57906f1b7e28b350edab4 | 2016-02-18 15:56:42 | 6c03b8cee93a3ef4a1f57906f1b7e28b350edab4 | diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index becd530..15d73da 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -69,8 +69,17 @@ class Journal(object):
"""
This method creates an object level attributes (print_issn and/or
electronic issn), according to the given metadata.
- This method deal with the legacy datamodel fields (935, 400, 35) where:
+ This method deal with the legacy datamodel fields (935, 400, 35, 435) where:
"""
+
+ if 'v435' in self.data:
+ for item in self.data['v435']:
+ if 't' in item and item['t'] == 'PRINT':
+ self.print_issn = item['_']
+ if 't' in item and item['t'] == 'ONLIN':
+ self.electronic_issn = item['_']
+ return None
+
if not 'v35' in self.data:
return None
| Considerar campo v435 para obtenção dos tipos de ISSNs
Incluir na análise o campo v435 para obtenção do ISSN na classe Journal.
Ex.: Biota Neotropica
35 "ONLIN"
400 "1676-0603"
**435 "1678-6424^tPRINT"
435 "1676-0611^tONLIN"**
935 "1676-0611"
| scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index 8b47b82..fa8d4c2 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -284,6 +284,19 @@ class JournalTests(unittest.TestCase):
self.assertEqual(journal.update_date, '2012-08-24')
+ def test_load_issn_with_v435(self):
+ self.fulldoc['title']['v35'] = [{u'_': u'PRINT'}]
+ self.fulldoc['title']['v400'] = [{u'_': u'2222-2222'}]
+ self.fulldoc['title']['v435'] = [
+ {u'_': u'0000-0000', 't': 'ONLIN'},
+ {u'_': u'9999-9999', 't': 'PRINT'}
+ ]
+
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertEqual(journal.print_issn, u'9999-9999')
+ self.assertEqual(journal.electronic_issn, u'0000-0000')
+
def test_load_issn_with_v935_without_v35(self):
del(self.fulldoc['title']['v35'])
self.fulldoc['title']['v400'] = [{u'_': u'2222-2222'}]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.44 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"nose",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
-e git+https://github.com/scieloorg/xylose.git@6c03b8cee93a3ef4a1f57906f1b7e28b350edab4#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::JournalTests::test_load_issn_with_v435"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_journal_without_subtitle",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_in_months",
"tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_periodicity_in_months",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_is_ahead",
"tests/test_document.py::ArticleTests::test_issue",
"tests/test_document.py::ArticleTests::test_issue_label_field_v4",
"tests/test_document.py::ArticleTests::test_issue_label_without_field_v4",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_journal_acronym",
"tests/test_document.py::ArticleTests::test_journal_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_publisher_loc",
"tests/test_document.py::ArticleTests::test_publisher_name",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_supplement_issue",
"tests/test_document.py::ArticleTests::test_supplement_volume",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_volume",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_journal_acronym",
"tests/test_document.py::ArticleTests::test_without_journal_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_publisher_loc",
"tests/test_document.py::ArticleTests::test_without_publisher_name",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_suplement_issue",
"tests/test_document.py::ArticleTests::test_without_supplement_volume",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_volume",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 440 | [
"xylose/scielodocument.py"
]
| [
"xylose/scielodocument.py"
]
|
|
scieloorg__xylose-93 | b0fdfc8085316f491e0ad49d1abcba33641549b6 | 2016-02-18 18:46:17 | b0fdfc8085316f491e0ad49d1abcba33641549b6 | diff --git a/setup.py b/setup.py
index d1e1950..03b10eb 100755
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ except ImportError:
setup(
name="xylose",
- version='0.43',
+ version='0.46',
description="A SciELO library to abstract a JSON data structure that is a product of the ISIS2JSON conversion using the ISIS2JSON type 3 data model.",
author="SciELO",
author_email="[email protected]",
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index 15d73da..a446175 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -802,9 +802,9 @@ class Article(object):
def data_model_version(self, fullpath=False):
"""
This method retrieves the document version
- This method deals with the legacy fields (601).
+ This method deals with the legacy fields (120).
"""
- if 'v601' in self.data['article']:
+ if 'xml' in self.data['article'].get('v120', [{'_': ''}])[0]['_'].lower():
return 'xml'
return 'html'
| Ajuste em metodo que indica modelo de dados HTML ou XML. | scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index fa8d4c2..f9c49bf 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -985,6 +985,27 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.file_code(), '0034-8910-rsp-47-04-0675')
+ def test_data_model_version_html(self):
+ del(self.fulldoc['article']['v120'])
+
+ article = Article(self.fulldoc)
+
+ self.assertEqual(article.data_model_version, u'html')
+
+ def test_data_model_version_html_1(self):
+ self.fulldoc['article']['v120'] = [{'_': '4.0'}]
+
+ article = Article(self.fulldoc)
+
+ self.assertEqual(article.data_model_version, u'html')
+
+ def test_data_model_version_xml(self):
+ self.fulldoc['article']['v120'] = [{'_': 'XML_1.0'}]
+
+ article = Article(self.fulldoc)
+
+ self.assertEqual(article.data_model_version, u'xml')
+
def test_wos_subject_areas(self):
self.fulldoc['title']['v854'] = [{u'_': u'MARINE & FRESHWATER BIOLOGY'}, {u'_': u'OCEANOGRAPHY'}]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 0.45 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"mocker",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mocker==1.1.1
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/scieloorg/xylose.git@b0fdfc8085316f491e0ad49d1abcba33641549b6#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- mocker==1.1.1
- nose==1.3.7
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::ArticleTests::test_data_model_version_xml"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_journal_without_subtitle",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_load_issn_with_v435",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_in_months",
"tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_periodicity_in_months",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_data_model_version_html",
"tests/test_document.py::ArticleTests::test_data_model_version_html_1",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_is_ahead",
"tests/test_document.py::ArticleTests::test_issue",
"tests/test_document.py::ArticleTests::test_issue_label_field_v4",
"tests/test_document.py::ArticleTests::test_issue_label_without_field_v4",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_journal_acronym",
"tests/test_document.py::ArticleTests::test_journal_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_publisher_loc",
"tests/test_document.py::ArticleTests::test_publisher_name",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_supplement_issue",
"tests/test_document.py::ArticleTests::test_supplement_volume",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_volume",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_journal_acronym",
"tests/test_document.py::ArticleTests::test_without_journal_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_publisher_loc",
"tests/test_document.py::ArticleTests::test_without_publisher_name",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_suplement_issue",
"tests/test_document.py::ArticleTests::test_without_supplement_volume",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_volume",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 441 | [
"setup.py",
"xylose/scielodocument.py"
]
| [
"setup.py",
"xylose/scielodocument.py"
]
|
|
scrapy__scrapy-1793 | 7ce32422c4e266b0799c06c1b8263d28fc0d9df7 | 2016-02-18 21:57:39 | a975a50558cd78a1573bee2e957afcb419fd1bd6 | diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py
index 4880cc7b9..c667b141d 100644
--- a/scrapy/responsetypes.py
+++ b/scrapy/responsetypes.py
@@ -59,7 +59,8 @@ class ResponseTypes(object):
def from_content_disposition(self, content_disposition):
try:
- filename = to_native_str(content_disposition).split(';')[1].split('=')[1]
+ filename = to_native_str(content_disposition,
+ encoding='latin-1', errors='replace').split(';')[1].split('=')[1]
filename = filename.strip('"\'')
return self.from_filename(filename)
except IndexError:
| PY3: error decoding Content-Disposition header
This request
```
scrapy shell 'http://npe.com.cn/plus/save_to_doc.php?id=1666'
```
raises this error:
```
Traceback (most recent call last):
File "/Users/kmike/envs/dl/bin/scrapy", line 9, in <module>
load_entry_point('Scrapy', 'console_scripts', 'scrapy')()
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 142, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 88, in _run_print_help
func(*a, **kw)
File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 149, in _run_command
cmd.run(args, opts)
File "/Users/kmike/svn/scrapy/scrapy/commands/shell.py", line 71, in run
shell.start(url=url)
File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 47, in start
self.fetch(url, spider)
File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 112, in fetch
reactor, self._schedule, request, spider)
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/threads.py", line 122, in blockingCallFromThread
result.raiseException()
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 368, in raiseException
raise self.value.with_traceback(self.tb)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte
```
The error points to a wrong location (similar to #1760); the real traceback is
```
Traceback (most recent call last):
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 1126, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 389, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/Users/kmike/svn/scrapy/scrapy/core/downloader/middleware.py", line 43, in process_request
defer.returnValue((yield download_func(request=request,spider=spider)))
File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 588, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/Users/kmike/svn/scrapy/scrapy/core/downloader/handlers/http11.py", line 272, in _cb_bodydone
respcls = responsetypes.from_args(headers=headers, url=url)
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 110, in from_args
cls = self.from_headers(headers)
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 78, in from_headers
cls = self.from_content_disposition(headers[b'Content-Disposition'])
File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 62, in from_content_disposition
filename = to_native_str(content_disposition).split(';')[1].split('=')[1]
File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 129, in to_native_str
return to_unicode(text, encoding, errors)
File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 107, in to_unicode
return text.decode(encoding, errors)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte
```
It looks like Content-Disposition is decoded using utf-8, but the encoding was not UTF-8. | scrapy/scrapy | diff --git a/tests/test_responsetypes.py b/tests/test_responsetypes.py
index 2374d518f..118136ac4 100644
--- a/tests/test_responsetypes.py
+++ b/tests/test_responsetypes.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
import unittest
from scrapy.responsetypes import responsetypes
@@ -20,8 +21,14 @@ class ResponseTypesTest(unittest.TestCase):
def test_from_content_disposition(self):
mappings = [
- ('attachment; filename="data.xml"', XmlResponse),
- ('attachment; filename=data.xml', XmlResponse),
+ (b'attachment; filename="data.xml"', XmlResponse),
+ (b'attachment; filename=data.xml', XmlResponse),
+ (u'attachment;filename=data£.tar.gz'.encode('utf-8'), Response),
+ (u'attachment;filename=dataµ.tar.gz'.encode('latin-1'), Response),
+ (u'attachment;filename=data高.doc'.encode('gbk'), Response),
+ (u'attachment;filename=دورهdata.html'.encode('cp720'), HtmlResponse),
+ (u'attachment;filename=日本語版Wikipedia.xml'.encode('iso2022_jp'), XmlResponse),
+
]
for source, cls in mappings:
retcls = responsetypes.from_content_disposition(source)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libxml2-dev libxslt1-dev zlib1g-dev libffi-dev libssl-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
Automat==22.10.0
certifi==2021.5.30
cffi==1.15.1
constantly==15.1.0
cryptography==40.0.2
cssselect==1.1.0
hyperlink==21.0.0
idna==3.10
importlib-metadata==4.8.3
incremental==22.10.0
iniconfig==1.1.1
lxml==5.3.1
packaging==21.3
parsel==1.6.0
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21
PyDispatcher==2.0.7
pyOpenSSL==23.2.0
pyparsing==3.1.4
pytest==7.0.1
queuelib==1.6.2
-e git+https://github.com/scrapy/scrapy.git@7ce32422c4e266b0799c06c1b8263d28fc0d9df7#egg=Scrapy
service-identity==21.1.0
six==1.17.0
tomli==1.2.3
Twisted==22.4.0
typing_extensions==4.1.1
w3lib==2.0.1
zipp==3.6.0
zope.interface==5.5.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- automat==22.10.0
- cffi==1.15.1
- constantly==15.1.0
- cryptography==40.0.2
- cssselect==1.1.0
- hyperlink==21.0.0
- idna==3.10
- importlib-metadata==4.8.3
- incremental==22.10.0
- iniconfig==1.1.1
- lxml==5.3.1
- packaging==21.3
- parsel==1.6.0
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycparser==2.21
- pydispatcher==2.0.7
- pyopenssl==23.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- queuelib==1.6.2
- service-identity==21.1.0
- six==1.17.0
- tomli==1.2.3
- twisted==22.4.0
- typing-extensions==4.1.1
- w3lib==2.0.1
- zipp==3.6.0
- zope-interface==5.5.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_responsetypes.py::ResponseTypesTest::test_from_content_disposition"
]
| []
| [
"tests/test_responsetypes.py::ResponseTypesTest::test_custom_mime_types_loaded",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_args",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_body",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_content_type",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_filename",
"tests/test_responsetypes.py::ResponseTypesTest::test_from_headers"
]
| []
| BSD 3-Clause "New" or "Revised" License | 442 | [
"scrapy/responsetypes.py"
]
| [
"scrapy/responsetypes.py"
]
|
|
bukzor__RefactorLib-24 | 181ff5525a5904eb7cf31653e80723450b7fc45e | 2016-02-19 07:17:40 | 181ff5525a5904eb7cf31653e80723450b7fc45e | diff --git a/.travis.yml b/.travis.yml
index bb006db..c471dc8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,10 +2,15 @@ language: python
env: # These should match the tox env list
- TOXENV=py26
- TOXENV=py27
+ - TOXENV=py34
install:
- - pip install coveralls tox --use-mirrors
+ - pip install coveralls tox
- npm install reflect
script: tox
after_success:
- coveralls
sudo: false
+cache:
+ directories:
+ - $HOME/.cache/pip
+ - $HOME/.pre-commit
diff --git a/refactorlib/cheetah/node.py b/refactorlib/cheetah/node.py
index 755fbfa..48e163f 100644
--- a/refactorlib/cheetah/node.py
+++ b/refactorlib/cheetah/node.py
@@ -1,20 +1,28 @@
"""
cheetah-specific additions to the lxml element node class.
"""
+import six
+
from lxml import etree
from refactorlib.node import RefactorLibNodeBase, one
class CheetahNodeBase(RefactorLibNodeBase):
def find_calls(self, func_name):
+ if isinstance(func_name, bytes):
+ func_name = func_name.decode('UTF-8')
return self.xpath(
'.//Placeholder'
'[./CheetahVarNameChunks/CallArgString]'
- '[./CheetahVarNameChunks/DottedName="%s"]' % func_name
+ '[./CheetahVarNameChunks/DottedName="{0}"]'.format(
+ func_name,
+ )
) + self.xpath(
'.//CheetahVar'
'[./CheetahVarBody/CheetahVarNameChunks/CallArgString]'
- '[./CheetahVarBody/CheetahVarNameChunks/DottedName="%s"]' % func_name
+ '[./CheetahVarBody/CheetahVarNameChunks/DottedName="{0}"]'.format(
+ func_name,
+ )
)
def find_decorators(self, dec_name):
@@ -72,6 +80,8 @@ class CheetahNodeBase(RefactorLibNodeBase):
return comment
def is_in_context(self, directive_string):
+ if isinstance(directive_string, bytes):
+ directive_string = directive_string.decode('UTF-8')
try:
directive_name, var = directive_string.split(None, 1)
except ValueError:
@@ -86,7 +96,7 @@ class CheetahNodeBase(RefactorLibNodeBase):
directive.name == directive_name and
(
directive.var is None and var is None or
- directive.var.totext(with_tail=False) == var
+ directive.var.totext(with_tail=False).decode('UTF-8') == var
)
):
return True
@@ -158,7 +168,7 @@ class CheetahVariable(CheetahNodeBase):
args = self.args
if not args: # no arguments.
- assert args_container.totext().strip('(\n\t )') == '', args_container.totext()
+ assert args_container.totext().strip(b'(\n\t )') == b'', args_container.totext()
self.remove_self()
return
@@ -225,7 +235,7 @@ class CheetahDecorator(CheetahNodeBase):
class CheetahDirective(CheetahNodeBase):
def replace_directive(self, other):
- if isinstance(other, basestring):
+ if isinstance(other, six.string_types):
var = self.makeelement('CheetahVar')
try:
directive, var.text = other.split(None, 1)
@@ -251,7 +261,7 @@ class CheetahDirective(CheetahNodeBase):
@property
def is_multiline_directive(self):
return (
- self.totext().strip().endswith(':') or
+ self.totext().strip().endswith(b':') or
not self.xpath(
'./self::Directive[starts-with(., "#end")] or '
'./SimpleExprDirective or '
diff --git a/refactorlib/cheetah/parse.py b/refactorlib/cheetah/parse.py
index ac908b1..2c86bbc 100644
--- a/refactorlib/cheetah/parse.py
+++ b/refactorlib/cheetah/parse.py
@@ -153,7 +153,7 @@ def detect_encoding(source):
def parse(cheetah_content, encoding=None):
- # yelp_cheetah requires unicode
+ # yelp_cheetah requires text
if type(cheetah_content) is bytes:
cheetah_content = cheetah_content.decode('UTF-8')
diff --git a/refactorlib/cli/xmlstrip.py b/refactorlib/cli/xmlstrip.py
index 376d498..79c4e0b 100644
--- a/refactorlib/cli/xmlstrip.py
+++ b/refactorlib/cli/xmlstrip.py
@@ -7,7 +7,7 @@ This is the inverse operation of `xmlfrom`.
def xmlstrip(filename):
from lxml.etree import XML, tostring
- tree = XML(open(filename).read()).getroottree()
+ tree = XML(open(filename, 'rb').read()).getroottree()
encoding = tree.docinfo.encoding
return tostring(tree, method='text', encoding=encoding)
diff --git a/refactorlib/javascript/parse.py b/refactorlib/javascript/parse.py
index 73038bf..107eb65 100644
--- a/refactorlib/javascript/parse.py
+++ b/refactorlib/javascript/parse.py
@@ -1,3 +1,5 @@
+import six
+
from refactorlib.util import static
@@ -5,8 +7,7 @@ DEBUG = False
def parse(javascript_contents, encoding='ascii'):
- """
- Given some javascript contents, as a unicode string, return the lxml representation.
+ """Given some javascript contents, as a text string, return the lxml representation.
"reflectjs" below refers to the Mozilla Reflect protocol:
* https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
* https://npmjs.org/package/reflect
@@ -36,7 +37,10 @@ def reflectjs_parse(javascript_contents):
from refactorlib.util import Popen, PIPE
from os.path import join
from simplejson import loads
- from simplejson.ordered_dict import OrderedDict
+ try:
+ from collections import OrderedDict
+ except ImportError:
+ from ordereddict import OrderedDict
reflectjs_script = join(TOP, 'javascript/reflectjs.js')
reflectjs = Popen([find_nodejs(), reflectjs_script], stdin=PIPE, stdout=PIPE)
@@ -56,7 +60,6 @@ def reflectjs_to_dictnode(tree):
text or tail, and may have some overlap issues.
"""
from refactorlib.dictnode import DictNode
- from types import NoneType
root_dictnode = DictNode(parent=None)
stack = [(tree, root_dictnode)]
@@ -78,15 +81,17 @@ def reflectjs_to_dictnode(tree):
else:
attrs[val['type']] = val['name']
elif attr == 'value':
- attrs[attr] = unicode(val)
+ attrs[attr] = six.text_type(val)
# We would normally lose this type information, as lxml
# wants everything to be a string.
attrs['type'] = type(val).__name__
- elif isinstance(val, unicode):
+ elif isinstance(val, six.text_type):
attrs[attr] = val
- elif isinstance(val, (bool, NoneType, str)):
+ elif isinstance(val, bytes):
+ attrs[attr] = val.decode('UTF-8')
+ elif isinstance(val, (bool, type(None))):
# TODO: figure out what happens with non-ascii data.
- attrs[attr] = unicode(val)
+ attrs[attr] = six.text_type(val)
else: # Should never happen
assert False
@@ -97,5 +102,5 @@ def reflectjs_to_dictnode(tree):
children=[DictNode(parent=dictnode) for child in children],
attrs=attrs,
))
- stack.extend(reversed(zip(children, dictnode['children'])))
+ stack.extend(reversed(list(zip(children, dictnode['children']))))
return root_dictnode
diff --git a/refactorlib/parse.py b/refactorlib/parse.py
index 8e93315..d641074 100644
--- a/refactorlib/parse.py
+++ b/refactorlib/parse.py
@@ -5,18 +5,15 @@ def parse(filename, filetype=None, encoding=None):
from refactorlib.filetypes import FILETYPES
filetype = FILETYPES.detect_filetype(filename, filetype)
- source = open(filename).read()
+ source = open(filename, 'rb').read()
# If no encoding was explicitly specified, see if we can parse
# it out from the contents of the file.
if encoding is None:
encoding = filetype.encoding_detector(source)
- if encoding:
- source = unicode(source, encoding)
- else:
- # I don't see why encoding=None is different from not specifying the encoding.
- source = unicode(source)
+ encoding = encoding if encoding else 'UTF-8'
+ source = source.decode(encoding)
return filetype.parser(source, encoding)
@@ -36,10 +33,7 @@ def dictnode_to_lxml(tree, node_lookup=None, encoding=None):
if not node_lookup:
from refactorlib.node import node_lookup
- from lxml.etree import XMLParser, fromstring
- lxml_parser_object = XMLParser(encoding=encoding)
- lxml_parser_object.set_element_class_lookup(node_lookup)
- Element = lxml_parser_object.makeelement
+ from lxml.etree import Element, XMLParser
root = None
stack = [(tree, root)]
@@ -50,7 +44,10 @@ def dictnode_to_lxml(tree, node_lookup=None, encoding=None):
if parent is None:
# We use this roundabout method becuase the encoding is always set
# to 'UTF8' if we use parser.makeelement()
- lxmlnode = fromstring('<trash></trash>', parser=lxml_parser_object)
+ parser = XMLParser(encoding=encoding)
+ parser.set_element_class_lookup(node_lookup)
+ parser.feed(b'<trash></trash>')
+ lxmlnode = parser.close()
lxmlnode.tag = node['name']
lxmlnode.attrib.update(node.get('attrs', {}))
root = lxmlnode
diff --git a/refactorlib/python/parse.py b/refactorlib/python/parse.py
index 28a477b..a3cbc27 100644
--- a/refactorlib/python/parse.py
+++ b/refactorlib/python/parse.py
@@ -3,7 +3,7 @@ import re
# regex taken from inducer/pudb's detect_encoding
-pythonEncodingDirectiveRE = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)")
+encoding_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)".encode('UTF-8'))
def detect_encoding(source):
@@ -11,20 +11,19 @@ def detect_encoding(source):
Given some python contents as a byte string, return the name of the encoding, or else None.
"""
# According to the PEP0263, the encoding directive must appear on one of the first two lines of the file
- top_lines = source.split('\n', 2)[:2]
+ top_lines = source.split(b'\n', 2)[:2]
for line in top_lines:
- encodingMatch = pythonEncodingDirectiveRE.search(line)
- if encodingMatch:
- return encodingMatch.group(1)
+ encoding_match = encoding_re.search(line)
+ if encoding_match:
+ return encoding_match.group(1).decode('UTF-8')
# We didn't find anything.
return None
def parse(python_contents, encoding):
- """
- Given some python contents as a unicode string, return the lxml representation.
+ """Given some python contents as a text string, return the lxml representation.
"""
lib2to3_python = lib2to3_parse(python_contents)
dictnode_python = lib2to3_to_dictnode(lib2to3_python)
diff --git a/refactorlib/util.py b/refactorlib/util.py
index 9b9d983..1c5e01d 100644
--- a/refactorlib/util.py
+++ b/refactorlib/util.py
@@ -42,7 +42,7 @@ class Popen(_Popen):
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
"""
- output, _ = self.communicate(stdin)
+ output, _ = self.communicate(stdin.encode('UTF-8'))
retcode = self.poll()
if retcode:
cmd = self.args
diff --git a/setup.py b/setup.py
index 8397031..98690bc 100755
--- a/setup.py
+++ b/setup.py
@@ -19,6 +19,8 @@ setuptools.setup(
install_requires=[
'cached-property',
'lxml>=2.2', # We run with 2.2.4.0
+ 'ordereddict',
+ 'six',
],
extras_require={
'javascript': ['simplejson'],
@@ -48,6 +50,8 @@ setuptools.setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
diff --git a/tox.ini b/tox.ini
index 9bd7b2c..2737f17 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,11 +1,10 @@
[tox]
project = refactorlib
# These should match the travis env list
-envlist = py26,py27
+envlist = py26,py27,py34
skipsdist = true
[testenv]
-install_command = pip install --use-wheel {opts} {packages}
deps = -rrequirements-dev.txt
commands =
coverage erase
| Support python3 | bukzor/RefactorLib | diff --git a/testing/util.py b/testing/util.py
index 0170f44..fa9afce 100644
--- a/testing/util.py
+++ b/testing/util.py
@@ -1,6 +1,8 @@
"""
A home for the 'yellow code' of testing.
"""
+from __future__ import unicode_literals
+
from os.path import join
@@ -16,7 +18,6 @@ def example_dir(func):
def get_examples(func):
-
from os import listdir
from os.path import isfile
@@ -77,13 +78,13 @@ def parametrize(arg_finder):
def assert_same_content(old_file, new_content, extra_suffix=''):
new_file = ''.join((old_file, extra_suffix, FAILURE_SUFFIX))
try:
- open(new_file, 'w').write(new_content)
+ open(new_file, 'wb').write(new_content)
except IOError as e:
if e.errno == 2: # No such file.
from os import makedirs
from os.path import dirname
makedirs(dirname(new_file))
- open(new_file, 'w').write(new_content)
+ open(new_file, 'wb').write(new_content)
else:
raise
@@ -91,15 +92,16 @@ def assert_same_content(old_file, new_content, extra_suffix=''):
def assert_same_file_content(old_file, new_file):
- old_content = open(old_file).readlines()
- new_content = open(new_file).readlines()
+ old_content = open(old_file, 'rb').readlines()
+ new_content = open(new_file, 'rb').readlines()
diffs = diff(old_content, new_content)
if diffs:
diffs = 'Results differ:\n--- %s\n+++ %s\n%s' % (old_file, new_file, diffs)
- # py.test derps on non-utf8 bytes, so I force unicode like so:
- diffs = diffs.decode('UTF-8', 'replace')
+ # py.test derps on non-utf8 bytes, so I force text like so:
+ if isinstance(diffs, bytes):
+ diffs = diffs.decode('UTF-8', 'replace')
raise AssertionError(diffs)
else:
from os import unlink
@@ -112,7 +114,7 @@ def diff(old_content, new_content, n=3):
diffdata = tuple(diff(old_content, new_content))
difflines = set()
for lineno, line in enumerate(diffdata):
- if not line.startswith(' '): # Ignore the similar lines.
+ if not line.startswith(str(' ')): # Ignore the similar lines.
difflines.update(range(lineno - n, lineno + n + 1))
return '\n'.join(
diff --git a/tests/cheetah/add_comment_test.py b/tests/cheetah/add_comment_test.py
index 15ecc6c..33f78c5 100644
--- a/tests/cheetah/add_comment_test.py
+++ b/tests/cheetah/add_comment_test.py
@@ -30,7 +30,7 @@ def test_can_add_comments():
calls[0].add_comment('2 esc')
calls[0].add_comment('3 esc')
- assert '''
+ assert b'''
#def foo()
## 1 esc
## 2 esc
diff --git a/tests/cheetah/directive_test.py b/tests/cheetah/directive_test.py
index 2adf1fe..01071b0 100644
--- a/tests/cheetah/directive_test.py
+++ b/tests/cheetah/directive_test.py
@@ -14,19 +14,19 @@ def test_find_end_directive(example, output):
new_output = []
for directive in lxmlnode.xpath('//Directive'):
new_output.append(
- 'Directive: %s' % tree.getpath(directive),
+ b'Directive: ' + tree.getpath(directive).encode('UTF-8'),
)
if directive.is_multiline_directive:
new_output.append(
- 'End: %s' % tree.getpath(directive.get_end_directive()),
+ b'End: ' + tree.getpath(directive.get_end_directive()).encode('UTF-8'),
)
else:
new_output.append(
- 'Single-line: %s' % directive.totext()
+ b'Single-line: ' + directive.totext()
)
- new_output.append('')
+ new_output.append(b'')
- new_output = '\n'.join(new_output)
+ new_output = b'\n'.join(new_output)
assert_same_content(output, new_output)
@@ -40,7 +40,7 @@ def test_replace_directive(example, output):
if directive.var is None:
directive.replace_directive('#{{{%s}}}' % directive.name)
else:
- directive.replace_directive('#{{{%s}}} [%s]' % (directive.name, directive.var.totext(with_tail=False)))
+ directive.replace_directive('#{{{%s}}} [%s]' % (directive.name, directive.var.totext(with_tail=False).decode('UTF-8')))
new_output = lxmlnode.totext()
assert_same_content(output, new_output)
@@ -67,11 +67,11 @@ def test_get_enclosing_blocks(example, output):
new_output = []
for context, directive in sorted(unique_contexts.items()):
new_output.append(
- 'Directive: %s' % tree.getpath(directive)
+ b'Directive: ' + tree.getpath(directive).encode('UTF-8')
)
for c in context:
- new_output.append(' ' + c)
- new_output.append('')
+ new_output.append(b' ' + c.encode('UTF-8'))
+ new_output.append(b'')
- new_output = '\n'.join(new_output)
+ new_output = b'\n'.join(new_output)
assert_same_content(output, new_output)
diff --git a/tests/cheetah/is_in_context_test.py b/tests/cheetah/is_in_context_test.py
index 456c9ba..0a127ae 100644
--- a/tests/cheetah/is_in_context_test.py
+++ b/tests/cheetah/is_in_context_test.py
@@ -1,3 +1,5 @@
+import six
+
from testing.util import parametrize, get_output, assert_same_content
from . import xfailif_no_cheetah
@@ -11,9 +13,9 @@ def test_is_in_context(example, output):
top_level_directives = lxmlnode.xpath('/cheetah/*/*[1][self::Directive]')
top_level_directives = [
- "#%s %s" % (d.name, d.var.totext(with_tail=False))
- if d.var else
- "#%s" % d.name
+ b'#' + d.name.encode('UTF-8') + b' ' + d.var.totext(with_tail=False)
+ if d.var is not None else
+ b'#' + d.name.encode('UTF-8')
for d in top_level_directives
]
@@ -23,13 +25,14 @@ def test_is_in_context(example, output):
new_output = []
for placeholder in lxmlnode.xpath('//Placeholder'):
new_output.append(
- 'Placeholder: %s' % placeholder.totext(with_tail=False)
+ b'Placeholder: ' + placeholder.totext(with_tail=False)
)
for d in top_level_directives:
new_output.append(
- ' %s %s' % (d, placeholder.is_in_context(d))
+ b' ' + d + b' ' +
+ six.text_type(placeholder.is_in_context(d)).encode('UTF-8')
)
- new_output.append('')
+ new_output.append(b'')
- new_output = '\n'.join(new_output)
+ new_output = b'\n'.join(new_output)
assert_same_content(output, new_output)
diff --git a/tests/cheetah/parse_test.py b/tests/cheetah/parse_test.py
index 418865e..059804a 100644
--- a/tests/cheetah/parse_test.py
+++ b/tests/cheetah/parse_test.py
@@ -6,7 +6,7 @@ from . import xfailif_no_cheetah
@xfailif_no_cheetah
@parametrize(get_examples)
def test_can_make_round_trip(example):
- text = open(example).read()
+ text = open(example, 'rb').read()
example = parse(example)
assert text == example.totext()
diff --git a/tests/cheetah/remove_call_test.py b/tests/cheetah/remove_call_test.py
index 8f36629..1084902 100644
--- a/tests/cheetah/remove_call_test.py
+++ b/tests/cheetah/remove_call_test.py
@@ -12,7 +12,7 @@ def test_can_find_calls():
calls = example.find_calls('foo')
assert len(calls) == 1
- assert calls[0].totext() == '$foo()'
+ assert calls[0].totext() == b'$foo()'
@xfailif_no_cheetah
diff --git a/tests/javascript/parse_test.py b/tests/javascript/parse_test.py
index 2c86c0d..2122e8c 100644
--- a/tests/javascript/parse_test.py
+++ b/tests/javascript/parse_test.py
@@ -27,7 +27,7 @@ else:
def test_can_make_round_trip(example):
text = open(example).read()
example = parse(example)
- assert text == example.totext()
+ assert text == example.totext().decode('UTF-8')
@xfailif_no_js
diff --git a/tests/python/parse_test.py b/tests/python/parse_test.py
index a0158b6..5677255 100644
--- a/tests/python/parse_test.py
+++ b/tests/python/parse_test.py
@@ -4,7 +4,7 @@ from refactorlib.parse import parse
@parametrize(get_examples)
def test_can_make_round_trip(example):
- text = open(example).read()
+ text = open(example, 'rb').read()
example = parse(example)
assert text == example.totext()
@@ -12,7 +12,7 @@ def test_can_make_round_trip(example):
@parametrize(get_examples)
def test_encoding_detection(example):
from refactorlib.python.parse import detect_encoding
- text = open(example).read()
+ text = open(example, 'rb').read()
example = parse(example)
detected_encoding = detect_encoding(text)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 10
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[cheetah,javascript]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
cached-property==2.0.1
cfgv==3.4.0
coverage==7.8.0
dill==0.3.9
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
identify==2.6.9
iniconfig==2.1.0
isort==6.0.1
lxml==5.3.1
MarkupSafe==3.0.2
mccabe==0.7.0
mock==5.2.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
pycodestyle==2.13.0
pyflakes==3.3.1
pylint==3.3.6
pytest==8.3.5
PyYAML==6.0.2
-e git+https://github.com/bukzor/RefactorLib.git@181ff5525a5904eb7cf31653e80723450b7fc45e#egg=refactorlib
simplejson==3.20.1
six==1.17.0
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
virtualenv==20.29.3
yelp_cheetah==0.12.1
| name: RefactorLib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- astroid==3.3.9
- cached-property==2.0.1
- cfgv==3.4.0
- coverage==7.8.0
- dill==0.3.9
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- identify==2.6.9
- iniconfig==2.1.0
- isort==6.0.1
- lxml==5.3.1
- markupsafe==3.0.2
- mccabe==0.7.0
- mock==5.2.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pylint==3.3.6
- pytest==8.3.5
- pyyaml==6.0.2
- simplejson==3.20.1
- six==1.17.0
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
- virtualenv==20.29.3
- yelp-cheetah==0.12.1
prefix: /opt/conda/envs/RefactorLib
| [
"tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_find_end_directive/single-line.txt]",
"tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_find_end_directive/simple.txt]",
"tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_find_end_directive/nested.txt]",
"tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_replace_directive/single-line.tmpl]",
"tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_replace_directive/simple.tmpl]",
"tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_replace_directive/nested.tmpl]",
"tests/cheetah/is_in_context_test.py::test_is_in_context[tests/cheetah/is_in_context_data/simple.tmpl-tests/cheetah/is_in_context_data/test_is_in_context/simple.txt]",
"tests/cheetah/is_in_context_test.py::test_is_in_context[tests/cheetah/is_in_context_data/indirect.tmpl-tests/cheetah/is_in_context_data/test_is_in_context/indirect.txt]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/example1.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/continuation.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/all_directives.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/unicode.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/single-line.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/simple_call.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/simple.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested_call.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested2.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/multiline.tmpl]",
"tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/complex_call.tmpl]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/example1.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/example1.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/continuation.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/continuation.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/all_directives.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/all_directives.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/unicode.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/unicode.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/single-line.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/single-line.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/simple_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple_call.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/simple.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested_call.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested2.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested2.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/multiline.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/multiline.xml]",
"tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/complex_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/complex_call.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/example1.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/example1.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/continuation.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/continuation.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/all_directives.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/all_directives.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/unicode.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/unicode.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/single-line.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/single-line.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/simple_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple_call.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/simple.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested_call.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested2.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested2.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/multiline.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/multiline.xml]",
"tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/complex_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/complex_call.xml]",
"tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/simple.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/simple.tmpl]",
"tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/multiline.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/multiline.tmpl]",
"tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/unicode2.py]",
"tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/unicode.py]",
"tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/example2_utf8.py]",
"tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/example1.py]",
"tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/comment_only.py]",
"tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/unicode2.py]",
"tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/unicode.py]",
"tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/example1.py]",
"tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/unicode2.py-tests/python/parse_data/test_matches_known_good_parsing/unicode2.xml]",
"tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/unicode.py-tests/python/parse_data/test_matches_known_good_parsing/unicode.xml]",
"tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/example2_utf8.py-tests/python/parse_data/test_matches_known_good_parsing/example2_utf8.xml]",
"tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/example1.py-tests/python/parse_data/test_matches_known_good_parsing/example1.xml]",
"tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/comment_only.py-tests/python/parse_data/test_matches_known_good_parsing/comment_only.xml]",
"tests/python/parse_test.py::test_cli_output[tests/python/parse_data/unicode2.py-tests/python/parse_data/test_matches_known_good_parsing/unicode2.xml]",
"tests/python/parse_test.py::test_cli_output[tests/python/parse_data/unicode.py-tests/python/parse_data/test_matches_known_good_parsing/unicode.xml]",
"tests/python/parse_test.py::test_cli_output[tests/python/parse_data/example2_utf8.py-tests/python/parse_data/test_matches_known_good_parsing/example2_utf8.xml]",
"tests/python/parse_test.py::test_cli_output[tests/python/parse_data/example1.py-tests/python/parse_data/test_matches_known_good_parsing/example1.xml]",
"tests/python/parse_test.py::test_cli_output[tests/python/parse_data/comment_only.py-tests/python/parse_data/test_matches_known_good_parsing/comment_only.xml]"
]
| [
"tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/example2_utf8.py]",
"tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/comment_only.py]"
]
| [
"tests/cheetah/add_comment_test.py::test_can_add_comments",
"tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/single-line.txt]",
"tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/simple.txt]",
"tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/nested.txt]",
"tests/cheetah/remove_call_test.py::test_can_find_calls",
"tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/nested2.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/nested2.tmpl]",
"tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/nested.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/nested.tmpl]"
]
| []
| null | 443 | [
"refactorlib/util.py",
"refactorlib/javascript/parse.py",
"refactorlib/parse.py",
"setup.py",
"refactorlib/cli/xmlstrip.py",
".travis.yml",
"tox.ini",
"refactorlib/cheetah/node.py",
"refactorlib/cheetah/parse.py",
"refactorlib/python/parse.py"
]
| [
"refactorlib/util.py",
"refactorlib/javascript/parse.py",
"refactorlib/parse.py",
"setup.py",
"refactorlib/cli/xmlstrip.py",
".travis.yml",
"tox.ini",
"refactorlib/cheetah/node.py",
"refactorlib/cheetah/parse.py",
"refactorlib/python/parse.py"
]
|
|
getlogbook__logbook-195 | bb0f4fbeec318a140780b1ac8781599474cf2666 | 2016-02-21 11:29:15 | bb0f4fbeec318a140780b1ac8781599474cf2666 | diff --git a/logbook/utils.py b/logbook/utils.py
index 5416c52..7851f48 100644
--- a/logbook/utils.py
+++ b/logbook/utils.py
@@ -3,17 +3,14 @@ import functools
import sys
import threading
-from .base import Logger
+from .base import Logger, DEBUG
from .helpers import string_types
-from logbook import debug as logbook_debug
class _SlowContextNotifier(object):
- def __init__(self, threshold, logger_func, args, kwargs):
- self.logger_func = logger_func
- self.args = args
- self.kwargs = kwargs or {}
+ def __init__(self, threshold, func):
+ self.func = func
self.evt = threading.Event()
self.threshold = threshold
self.thread = threading.Thread(target=self._notifier)
@@ -21,7 +18,7 @@ class _SlowContextNotifier(object):
def _notifier(self):
self.evt.wait(timeout=self.threshold)
if not self.evt.is_set():
- self.logger_func(*self.args, **self.kwargs)
+ self.func()
def __enter__(self):
self.thread.start()
@@ -32,18 +29,36 @@ class _SlowContextNotifier(object):
self.thread.join()
-def logged_if_slow(message, threshold=1, func=logbook_debug, args=None,
- kwargs=None):
- """Logs a message (by default using the global debug logger) if a certain
- context containing a set of operations is too slow
+_slow_logger = Logger('Slow')
- >>> with logged_if_slow('too slow!'):
- ... ...
- .. versionadded:: 0.12
+def logged_if_slow(*args, **kwargs):
+ """Context manager that logs if operations within take longer than
+ `threshold` seconds.
+
+ :param threshold: Number of seconds (or fractions thereof) allwoed before
+ logging occurs. The default is 1 second.
+ :param logger: :class:`~logbook.Logger` to use. The default is a 'slow'
+ logger.
+ :param level: Log level. The default is `DEBUG`.
+ :param func: (Deprecated). Function to call to perform logging.
+
+ The remaining parameters are passed to the
+ :meth:`~logbook.base.LoggerMixin.log` method.
"""
- full_args = (message, ) if args is None else (message, ) + tuple(args)
- return _SlowContextNotifier(threshold, func, full_args, kwargs)
+ threshold = kwargs.pop('threshold', 1)
+ func = kwargs.pop('func', None)
+ if func is None:
+ logger = kwargs.pop('logger', _slow_logger)
+ level = kwargs.pop('level', DEBUG)
+ func = functools.partial(logger.log, level, *args, **kwargs)
+ else:
+ if 'logger' in kwargs or 'level' in kwargs:
+ raise TypeError("If using deprecated func parameter, 'logger' and"
+ " 'level' arguments cannot be passed.")
+ func = functools.partial(func, *args, **kwargs)
+
+ return _SlowContextNotifier(threshold, func)
class _Local(threading.local):
diff --git a/tox.ini b/tox.ini
index b3d3493..07a3f90 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,6 +6,7 @@ skipsdist=True
whitelist_externals=
rm
deps=
+ py{26,27}: mock
pytest
Cython
changedir={toxinidir}
@@ -18,6 +19,7 @@ commands=
[testenv:pypy]
deps=
+ mock
pytest
commands=
{envpython} {toxinidir}/setup.py develop
| logged_if_slow API
I think the logged_if_slow API is a bit awkard. I think we should change it for v1.0.
Current API:
```
def logged_if_slow(message, threshold=1, func=logbook_debug, args=None, kwargs=None)
```
Problems:
- Always uses default logger
- Have to pass log function, why not a level?
- args and kwargs are normal parameters (rather than `*args`, `**kwargs`).
Proposed API:
```
def logged_if_slow(*args, **kwargs):
threshold = kwargs.pop('threshold', 1)
logger = kwargs.pop('logger', _default_logger)
level = kwargs.pop('level', logbook.DEBUG)
# Context manager would call:
logger.log(level, *args, **kwargs)
```
Then the context manager can simply call
```
logger.log(level, *args, **kwargs)
```
Also: are there valid reasons to use threading and events to handle the threshold rather than just storing a `time.time()` value and comparing in `__exit__`? I don't know how the threading timeout works, so I'm just curious about the implementation! | getlogbook/logbook | diff --git a/tests/test_utils.py b/tests/test_utils.py
index 3d1443f..f4ca5b8 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -8,23 +8,59 @@ from time import sleep
_THRESHOLD = 0.1
+try:
+ from unittest.mock import Mock, call
+except ImportError:
+ from mock import Mock, call
-def test_logged_if_slow_reached(logger, test_handler):
+
+def test_logged_if_slow_reached(test_handler):
with test_handler.applicationbound():
with logged_if_slow('checking...', threshold=_THRESHOLD):
- sleep(2*_THRESHOLD)
+ sleep(2 * _THRESHOLD)
assert len(test_handler.records) == 1
[record] = test_handler.records
assert record.message == 'checking...'
-def test_logged_if_slow_did_not_reached(logger, test_handler):
+def test_logged_if_slow_did_not_reached(test_handler):
with test_handler.applicationbound():
with logged_if_slow('checking...', threshold=_THRESHOLD):
- sleep(_THRESHOLD/2)
+ sleep(_THRESHOLD / 2)
assert len(test_handler.records) == 0
+def test_logged_if_slow_logger():
+ logger = Mock()
+
+ with logged_if_slow('checking...', threshold=_THRESHOLD, logger=logger):
+ sleep(2 * _THRESHOLD)
+
+ assert logger.log.call_args == call(logbook.DEBUG, 'checking...')
+
+
+def test_logged_if_slow_level(test_handler):
+ with test_handler.applicationbound():
+ with logged_if_slow('checking...', threshold=_THRESHOLD,
+ level=logbook.WARNING):
+ sleep(2 * _THRESHOLD)
+
+ assert test_handler.records[0].level == logbook.WARNING
+
+
+def test_logged_if_slow_deprecated(logger, test_handler):
+ with test_handler.applicationbound():
+ with logged_if_slow('checking...', threshold=_THRESHOLD,
+ func=logbook.error):
+ sleep(2 * _THRESHOLD)
+
+ assert test_handler.records[0].level == logbook.ERROR
+ assert test_handler.records[0].message == 'checking...'
+
+ with pytest.raises(TypeError):
+ logged_if_slow('checking...', logger=logger, func=logger.error)
+
+
def test_deprecated_func_called(capture):
assert deprecated_func(1, 2) == 3
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libzmq3-dev"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
Cython==3.0.12
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/getlogbook/logbook.git@bb0f4fbeec318a140780b1ac8781599474cf2666#egg=Logbook
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
| name: logbook
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- cython==3.0.12
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/logbook
| [
"tests/test_utils.py::test_logged_if_slow_logger",
"tests/test_utils.py::test_logged_if_slow_level"
]
| []
| [
"tests/test_utils.py::test_logged_if_slow_reached",
"tests/test_utils.py::test_logged_if_slow_did_not_reached",
"tests/test_utils.py::test_logged_if_slow_deprecated",
"tests/test_utils.py::test_deprecated_func_called",
"tests/test_utils.py::test_deprecation_message",
"tests/test_utils.py::test_deprecation_with_message",
"tests/test_utils.py::test_no_deprecations",
"tests/test_utils.py::test_class_deprecation[_no_decorator]",
"tests/test_utils.py::test_class_deprecation[classmethod]",
"tests/test_utils.py::test_deprecations_different_sources",
"tests/test_utils.py::test_deprecations_same_sources",
"tests/test_utils.py::test_deprecation_message_different_sources",
"tests/test_utils.py::test_deprecation_message_same_sources",
"tests/test_utils.py::test_deprecation_message_full_warning",
"tests/test_utils.py::test_name_doc",
"tests/test_utils.py::test_doc_update",
"tests/test_utils.py::test_deprecatd_docstring"
]
| []
| BSD License | 446 | [
"tox.ini",
"logbook/utils.py"
]
| [
"tox.ini",
"logbook/utils.py"
]
|
|
typesafehub__conductr-cli-110 | 5f065ac8f67877d0c393f26f789b22c270cb24d9 | 2016-02-22 03:34:24 | 76f795642d4d2220be0eddf75bcf8e933a7b6821 | markusjura: The current code would fail if the `value` would contain a `=` sign, e.g.
```
secret = ah5k=sdh
```
We should split the line by key value only by the first `=` sign.
huntc: Good catch! I shall fix tomorrow! | diff --git a/conductr_cli/resolvers/bintray_resolver.py b/conductr_cli/resolvers/bintray_resolver.py
index cfb0ce9..2186cb6 100644
--- a/conductr_cli/resolvers/bintray_resolver.py
+++ b/conductr_cli/resolvers/bintray_resolver.py
@@ -5,13 +5,14 @@ from requests.exceptions import HTTPError
import json
import logging
import os
+import re
import requests
BINTRAY_API_BASE_URL = 'https://api.bintray.com'
BINTRAY_DOWNLOAD_BASE_URL = 'https://dl.bintray.com'
-BINTRAY_DOWNLOAD_REALM = 'Bintray'
BINTRAY_CREDENTIAL_FILE_PATH = '{}/.bintray/.credentials'.format(os.path.expanduser('~'))
+BINTRAY_PROPERTIES_RE = re.compile('^(\S+)\s*=\s*([\S]+)$')
def resolve_bundle(cache_dir, uri):
@@ -23,8 +24,7 @@ def resolve_bundle(cache_dir, uri):
bundle_download_url = bintray_download_url(bintray_username, bintray_password, org, repo, package_name,
compatibility_version, digest)
if bundle_download_url:
- auth = (BINTRAY_DOWNLOAD_REALM, bintray_username, bintray_password) if bintray_username else None
- return uri_resolver.resolve_bundle(cache_dir, bundle_download_url, auth)
+ return uri_resolver.resolve_bundle(cache_dir, bundle_download_url)
else:
return False, None, None
except MalformedBundleUriError:
@@ -61,8 +61,13 @@ def load_bintray_credentials():
lines = [line.replace('\n', '') for line in cred_file.readlines()]
data = dict()
for line in lines:
- key, value = line.replace(' = ', '=').split('=')
- data[key] = value
+ match = BINTRAY_PROPERTIES_RE.match(line)
+ if match is not None:
+ try:
+ key, value = match.group(1, 2)
+ data[key] = value
+ except IndexError:
+ pass
username = None if 'user' not in data else data['user']
password = None if 'password' not in data else data['password']
diff --git a/conductr_cli/resolvers/uri_resolver.py b/conductr_cli/resolvers/uri_resolver.py
index 8207d26..45a12df 100644
--- a/conductr_cli/resolvers/uri_resolver.py
+++ b/conductr_cli/resolvers/uri_resolver.py
@@ -6,10 +6,9 @@ from conductr_cli import screen_utils
import os
import logging
import shutil
-import urllib
-def resolve_bundle(cache_dir, uri, auth=None):
+def resolve_bundle(cache_dir, uri):
log = logging.getLogger(__name__)
if not os.path.exists(cache_dir):
@@ -24,7 +23,7 @@ def resolve_bundle(cache_dir, uri, auth=None):
if os.path.exists(tmp_download_path):
os.remove(tmp_download_path)
- download_bundle(log, bundle_url, tmp_download_path, auth)
+ download_bundle(log, bundle_url, tmp_download_path)
shutil.move(tmp_download_path, cached_file)
return True, bundle_name, cached_file
@@ -63,22 +62,12 @@ def cache_path(cache_dir, uri):
return '{}/{}'.format(cache_dir, basename)
-def download_bundle(log, bundle_url, tmp_download_path, auth):
+def download_bundle(log, bundle_url, tmp_download_path):
log.info('Retrieving {}'.format(bundle_url))
parsed = urlparse(bundle_url, scheme='file')
is_http_download = parsed.scheme == 'http' or parsed.scheme == 'https'
- if is_http_download and auth:
- realm, username, password = auth
- authinfo = urllib.request.HTTPBasicAuthHandler()
- authinfo.add_password(realm=realm,
- uri=bundle_url,
- user=username,
- passwd=password)
- opener = urllib.request.build_opener(authinfo)
- urllib.request.install_opener(opener)
-
if log.is_progress_enabled() and is_http_download:
urlretrieve(bundle_url, tmp_download_path, reporthook=show_progress(log))
else:
| bintray credential parsing
Apparently bintray credential file formats can screw up our parsing of them. Our parser will have to become more sophisticated. From @henrikengstrom:
> I haven’t changed the credentials file lately and it contained more than `user` and `password`, e.g. `realm` and `host`.
| typesafehub/conductr-cli | diff --git a/conductr_cli/resolvers/test/test_bintray_resolver.py b/conductr_cli/resolvers/test/test_bintray_resolver.py
index 2c8a77b..1d7b723 100644
--- a/conductr_cli/resolvers/test/test_bintray_resolver.py
+++ b/conductr_cli/resolvers/test/test_bintray_resolver.py
@@ -32,8 +32,7 @@ class TestResolveBundle(TestCase):
parse_mock.assert_called_with('bundle-name:v1')
bintray_download_url_mock.assert_called_with('username', 'password', 'typesafe', 'bundle', 'bundle-name', 'v1',
'digest')
- resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip',
- ('Bintray', 'username', 'password'))
+ resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip')
def test_bintray_version_not_found(self):
load_bintray_credentials_mock = MagicMock(return_value=('username', 'password'))
@@ -390,7 +389,8 @@ class TestLoadBintrayCredentials(TestCase):
def test_success(self):
bintray_credential_file = strip_margin(
"""|user = user1
- |password = secret
+ |password = sec=ret
+ |# Some comment
|""")
exists_mock = MagicMock(return_value=True)
@@ -400,7 +400,7 @@ class TestLoadBintrayCredentials(TestCase):
patch('builtins.open', open_mock):
username, password = bintray_resolver.load_bintray_credentials()
self.assertEqual('user1', username)
- self.assertEqual('secret', password)
+ self.assertEqual('sec=ret', password)
exists_mock.assert_called_with('{}/.bintray/.credentials'.format(os.path.expanduser('~')))
open_mock.assert_called_with('{}/.bintray/.credentials'.format(os.path.expanduser('~')), 'r')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.24 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==3.6.1
arrow==1.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/typesafehub/conductr-cli.git@5f065ac8f67877d0c393f26f789b22c270cb24d9#egg=conductr_cli
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
execnet==2.1.1
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pyhocon==0.2.1
pyparsing==2.0.3
pytest @ file:///croot/pytest_1738938843180/work
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
requests==2.32.3
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
urllib3==2.3.0
| name: conductr-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==3.6.1
- arrow==1.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- execnet==2.1.1
- idna==3.10
- pyhocon==0.2.1
- pyparsing==2.0.3
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- requests==2.32.3
- six==1.17.0
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- urllib3==2.3.0
prefix: /opt/conda/envs/conductr-cli
| [
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_success"
]
| []
| [
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_http_error",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_malformed_bundle_uri",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_http_error",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_malformed_bundle_uri",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_multiple_versions_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_failure_latest_version_malformed",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_no_version",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_credential_file_not_having_username_password",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_missing_credential_file",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json_no_credentials"
]
| []
| Apache License 2.0 | 447 | [
"conductr_cli/resolvers/uri_resolver.py",
"conductr_cli/resolvers/bintray_resolver.py"
]
| [
"conductr_cli/resolvers/uri_resolver.py",
"conductr_cli/resolvers/bintray_resolver.py"
]
|
typesafehub__conductr-cli-111 | 76f795642d4d2220be0eddf75bcf8e933a7b6821 | 2016-02-23 01:04:09 | 76f795642d4d2220be0eddf75bcf8e933a7b6821 | diff --git a/conductr_cli/resolvers/bintray_resolver.py b/conductr_cli/resolvers/bintray_resolver.py
index 8f892b1..cfb0ce9 100644
--- a/conductr_cli/resolvers/bintray_resolver.py
+++ b/conductr_cli/resolvers/bintray_resolver.py
@@ -10,6 +10,7 @@ import requests
BINTRAY_API_BASE_URL = 'https://api.bintray.com'
BINTRAY_DOWNLOAD_BASE_URL = 'https://dl.bintray.com'
+BINTRAY_DOWNLOAD_REALM = 'Bintray'
BINTRAY_CREDENTIAL_FILE_PATH = '{}/.bintray/.credentials'.format(os.path.expanduser('~'))
@@ -22,7 +23,8 @@ def resolve_bundle(cache_dir, uri):
bundle_download_url = bintray_download_url(bintray_username, bintray_password, org, repo, package_name,
compatibility_version, digest)
if bundle_download_url:
- return uri_resolver.resolve_bundle(cache_dir, bundle_download_url)
+ auth = (BINTRAY_DOWNLOAD_REALM, bintray_username, bintray_password) if bintray_username else None
+ return uri_resolver.resolve_bundle(cache_dir, bundle_download_url, auth)
else:
return False, None, None
except MalformedBundleUriError:
diff --git a/conductr_cli/resolvers/uri_resolver.py b/conductr_cli/resolvers/uri_resolver.py
index 45a12df..8207d26 100644
--- a/conductr_cli/resolvers/uri_resolver.py
+++ b/conductr_cli/resolvers/uri_resolver.py
@@ -6,9 +6,10 @@ from conductr_cli import screen_utils
import os
import logging
import shutil
+import urllib
-def resolve_bundle(cache_dir, uri):
+def resolve_bundle(cache_dir, uri, auth=None):
log = logging.getLogger(__name__)
if not os.path.exists(cache_dir):
@@ -23,7 +24,7 @@ def resolve_bundle(cache_dir, uri):
if os.path.exists(tmp_download_path):
os.remove(tmp_download_path)
- download_bundle(log, bundle_url, tmp_download_path)
+ download_bundle(log, bundle_url, tmp_download_path, auth)
shutil.move(tmp_download_path, cached_file)
return True, bundle_name, cached_file
@@ -62,12 +63,22 @@ def cache_path(cache_dir, uri):
return '{}/{}'.format(cache_dir, basename)
-def download_bundle(log, bundle_url, tmp_download_path):
+def download_bundle(log, bundle_url, tmp_download_path, auth):
log.info('Retrieving {}'.format(bundle_url))
parsed = urlparse(bundle_url, scheme='file')
is_http_download = parsed.scheme == 'http' or parsed.scheme == 'https'
+ if is_http_download and auth:
+ realm, username, password = auth
+ authinfo = urllib.request.HTTPBasicAuthHandler()
+ authinfo.add_password(realm=realm,
+ uri=bundle_url,
+ user=username,
+ passwd=password)
+ opener = urllib.request.build_opener(authinfo)
+ urllib.request.install_opener(opener)
+
if log.is_progress_enabled() and is_http_download:
urlretrieve(bundle_url, tmp_download_path, reporthook=show_progress(log))
else:
| Unable to access non-default org and repo with bintray resolver
Note how the URL duplicates the org and file name, even when you don't specify an org.
`conduct load typesafe/internal-bundle/typesafe-website`
```
Retrieving https://dl.bintray.com/typesafe/internal-bundle/typesafe/typesafe-website/v1-075dbb07a7c6271164c2a429b06f5908bc3d416d18c5813d3d4d718aa6470f2e/typesafe-website-v1-075dbb07a7c6271164c2a429b06f5908bc3d416d18c5813d3d4d718aa6470f2e.zip
Error: Bundle not found: Unable to resolve bundle using typesafe/internal-bundle/typesafe-website
```
` conduct load internal-bundle/typesafe-website`
```
Resolving bundle typesafe/internal-bundle/typesafe-website
Retrieving https://dl.bintray.com/typesafe/internal-bundle/typesafe/typesafe-website/v1-d31c136feef7a7c0a43a0bdf4a2179491e40161ee3a7a37335bcda1c13c5612f/typesafe-website-v1-d31c136feef7a7c0a43a0bdf4a2179491e40161ee3a7a37335bcda1c13c5612f.zip
Error: Bundle not found: Unable to resolve bundle using internal-bundle/typesafe-website
``` | typesafehub/conductr-cli | diff --git a/conductr_cli/resolvers/test/test_bintray_resolver.py b/conductr_cli/resolvers/test/test_bintray_resolver.py
index cf68b72..2c8a77b 100644
--- a/conductr_cli/resolvers/test/test_bintray_resolver.py
+++ b/conductr_cli/resolvers/test/test_bintray_resolver.py
@@ -32,7 +32,8 @@ class TestResolveBundle(TestCase):
parse_mock.assert_called_with('bundle-name:v1')
bintray_download_url_mock.assert_called_with('username', 'password', 'typesafe', 'bundle', 'bundle-name', 'v1',
'digest')
- resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip')
+ resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip',
+ ('Bintray', 'username', 'password'))
def test_bintray_version_not_found(self):
load_bintray_credentials_mock = MagicMock(return_value=('username', 'password'))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.24 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==3.6.1
arrow==1.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/typesafehub/conductr-cli.git@76f795642d4d2220be0eddf75bcf8e933a7b6821#egg=conductr_cli
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.13.0
pyflakes==3.3.1
pyhocon==0.2.1
pyparsing==2.0.3
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
requests==2.32.3
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
types-python-dateutil==2.9.0.20241206
urllib3==2.3.0
| name: conductr-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==3.6.1
- arrow==1.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- flake8==7.2.0
- idna==3.10
- mccabe==0.7.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pyhocon==0.2.1
- pyparsing==2.0.3
- python-dateutil==2.9.0.post0
- requests==2.32.3
- six==1.17.0
- types-python-dateutil==2.9.0.20241206
- urllib3==2.3.0
prefix: /opt/conda/envs/conductr-cli
| [
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_found"
]
| []
| [
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_http_error",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_malformed_bundle_uri",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_http_error",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_malformed_bundle_uri",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_multiple_versions_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_version_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_failure_latest_version_malformed",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names_not_found",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_no_version",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_credential_file_not_having_username_password",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_missing_credential_file",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_success",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json",
"conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json_no_credentials"
]
| []
| Apache License 2.0 | 448 | [
"conductr_cli/resolvers/uri_resolver.py",
"conductr_cli/resolvers/bintray_resolver.py"
]
| [
"conductr_cli/resolvers/uri_resolver.py",
"conductr_cli/resolvers/bintray_resolver.py"
]
|
|
wireservice__agate-509 | e90440bbee9f027459140fdfdb0478a8078338f9 | 2016-02-23 02:49:36 | e90440bbee9f027459140fdfdb0478a8078338f9 | diff --git a/agate/utils.py b/agate/utils.py
index e18e5f0..834e262 100644
--- a/agate/utils.py
+++ b/agate/utils.py
@@ -109,12 +109,12 @@ class Quantiles(Sequence):
raise ValueError('Value is greater than maximum quantile value.')
if value == self._quantiles[-1]:
- return len(self._quantiles) - 1
+ return Decimal(len(self._quantiles) - 1)
while value >= self._quantiles[i + 1]:
i += 1
- return i
+ return Decimal(i)
def median(data_sorted):
| Percentiles should be Decimals
For consistency | wireservice/agate | diff --git a/tests/test_computations.py b/tests/test_computations.py
index bfc3a9b..c9c76b3 100644
--- a/tests/test_computations.py
+++ b/tests/test_computations.py
@@ -316,6 +316,8 @@ class TestTableComputation(unittest.TestCase):
self.assertSequenceEqual(new_table.rows[500], (501, 50))
self.assertSequenceEqual(new_table.rows[998], (999, 99))
self.assertSequenceEqual(new_table.rows[999], (1000, 100))
+ self.assertIsInstance(new_table.columns['percentiles'][0], Decimal)
+ self.assertIsInstance(new_table.columns['percentiles'][-1], Decimal)
def test_percentile_rank_invalid_types(self):
with self.assertRaises(DataTypeError):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"parsedatetime>=2.0",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-py3.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/wireservice/agate.git@e90440bbee9f027459140fdfdb0478a8078338f9#egg=agate
alabaster==0.7.16
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isodate==0.7.2
Jinja2==3.1.6
MarkupSafe==3.0.2
nose==1.3.7
packaging==24.2
parsedatetime==2.6
platformdirs==4.3.7
pluggy==1.5.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytimeparse==1.1.8
pytz==2025.2
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: agate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isodate==0.7.2
- jinja2==3.1.6
- markupsafe==3.0.2
- nose==1.3.7
- packaging==24.2
- parsedatetime==2.6
- platformdirs==4.3.7
- pluggy==1.5.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytimeparse==1.1.8
- pytz==2025.2
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/agate
| [
"tests/test_computations.py::TestTableComputation::test_percentile_rank"
]
| []
| [
"tests/test_computations.py::TestTableComputation::test_change",
"tests/test_computations.py::TestTableComputation::test_change_mixed_types",
"tests/test_computations.py::TestTableComputation::test_change_nulls",
"tests/test_computations.py::TestTableComputation::test_changed_invalid_types",
"tests/test_computations.py::TestTableComputation::test_formula",
"tests/test_computations.py::TestTableComputation::test_formula_invalid",
"tests/test_computations.py::TestTableComputation::test_formula_no_validate",
"tests/test_computations.py::TestTableComputation::test_percent",
"tests/test_computations.py::TestTableComputation::test_percent_change",
"tests/test_computations.py::TestTableComputation::test_percent_change_invalid_columns",
"tests/test_computations.py::TestTableComputation::test_percent_nulls",
"tests/test_computations.py::TestTableComputation::test_percent_total_override",
"tests/test_computations.py::TestTableComputation::test_percent_zeros",
"tests/test_computations.py::TestTableComputation::test_percentile_rank_invalid_types",
"tests/test_computations.py::TestTableComputation::test_rank_number",
"tests/test_computations.py::TestTableComputation::test_rank_number_key",
"tests/test_computations.py::TestTableComputation::test_rank_number_reverse",
"tests/test_computations.py::TestTableComputation::test_rank_number_reverse_key",
"tests/test_computations.py::TestTableComputation::test_rank_text",
"tests/test_computations.py::TestDateAndTimeComputations::test_change_dates",
"tests/test_computations.py::TestDateAndTimeComputations::test_change_datetimes",
"tests/test_computations.py::TestDateAndTimeComputations::test_change_timedeltas"
]
| []
| MIT License | 449 | [
"agate/utils.py"
]
| [
"agate/utils.py"
]
|
|
jmespath__jmespath.py-102 | a6baa176714c72a68a2b367810b91162b1125f41 | 2016-02-23 07:25:34 | 71f44854a35c5abcdb8fbd84e25d185d0ca53f92 | diff --git a/README.rst b/README.rst
index e7262d4..486783b 100644
--- a/README.rst
+++ b/README.rst
@@ -96,6 +96,85 @@ of your dict keys. To do this you can use either of these options:
... jmespath.Options(dict_cls=collections.OrderedDict))
+Custom Functions
+~~~~~~~~~~~~~~~~
+
+The JMESPath language has numerous
+`built-in functions
+<http://jmespath.org/specification.html#built-in-functions>`__, but it is
+also possible to add your own custom functions. Keep in mind that
+custom function support in jmespath.py is experimental and the API may
+change based on feedback.
+
+**If you have a custom function that you've found useful, consider submitting
+it to jmespath.site and propose that it be added to the JMESPath language.**
+You can submit proposals
+`here <https://github.com/jmespath/jmespath.site/issues>`__.
+
+To create custom functions:
+
+* Create a subclass of ``jmespath.functions.Functions``.
+* Create a method with the name ``_func_<your function name>``.
+* Apply the ``jmespath.functions.signature`` decorator that indicates
+ the expected types of the function arguments.
+* Provide an instance of your subclass in a ``jmespath.Options`` object.
+
+Below are a few examples:
+
+.. code:: python
+
+ import jmespath
+ from jmespath import functions
+
+ # 1. Create a subclass of functions.Functions.
+ # The function.Functions base class has logic
+ # that introspects all of its methods and automatically
+ # registers your custom functions in its function table.
+ class CustomFunctions(functions.Functions):
+
+ # 2 and 3. Create a function that starts with _func_
+ # and decorate it with @signature which indicates its
+ # expected types.
+ # In this example, we're creating a jmespath function
+ # called "unique_letters" that accepts a single argument
+ # with an expected type "string".
+ @functions.signature({'types': ['string']})
+ def _func_unique_letters(self, s):
+ # Given a string s, return a sorted
+ # string of unique letters: 'ccbbadd' -> 'abcd'
+ return ''.join(sorted(set(s)))
+
+ # Here's another example. This is creating
+ # a jmespath function called "my_add" that expects
+ # two arguments, both of which should be of type number.
+ @functions.signature({'types': ['number']}, {'types': ['number']})
+ def _func_my_add(self, x, y):
+ return x + y
+
+ # 4. Provide an instance of your subclass in a Options object.
+ options = jmespath.Options(custom_functions=CustomFunctions())
+
+ # Provide this value to jmespath.search:
+ # This will print 3
+ print(
+ jmespath.search(
+ 'my_add(`1`, `2`)', {}, options=options)
+ )
+
+ # This will print "abcd"
+ print(
+ jmespath.search(
+ 'foo.bar | unique_letters(@)',
+ {'foo': {'bar': 'ccbbadd'}},
+ options=options)
+ )
+
+Again, if you come up with useful functions that you think make
+sense in the JMESPath language (and make sense to implement in all
+JMESPath libraries, not just python), please let us know at
+`jmespath.site <https://github.com/jmespath/jmespath.site/issues>`__.
+
+
Specification
=============
diff --git a/jmespath/compat.py b/jmespath/compat.py
index 7b70adb..2ed0fe7 100644
--- a/jmespath/compat.py
+++ b/jmespath/compat.py
@@ -3,6 +3,15 @@ import inspect
PY2 = sys.version_info[0] == 2
+
+def with_metaclass(meta, *bases):
+ # Taken from flask/six.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
if PY2:
text_type = unicode
string_type = basestring
diff --git a/jmespath/functions.py b/jmespath/functions.py
index e306f7b..a7c7bd9 100644
--- a/jmespath/functions.py
+++ b/jmespath/functions.py
@@ -1,10 +1,9 @@
import math
import json
-import weakref
from jmespath import exceptions
from jmespath.compat import string_type as STRING_TYPE
-from jmespath.compat import get_methods
+from jmespath.compat import get_methods, with_metaclass
# python types -> jmespath types
@@ -35,48 +34,39 @@ REVERSE_TYPES_MAP = {
}
-def populate_function_table(cls):
- func_table = cls.FUNCTION_TABLE
- for name, method in get_methods(cls):
- signature = getattr(method, 'signature', None)
- if signature is not None:
- func_table[name[6:]] = {"function": method,
- "signature": signature}
- return cls
-
-
-def builtin_function(*arguments):
- def _record_arity(func):
+def signature(*arguments):
+ def _record_signature(func):
func.signature = arguments
return func
- return _record_arity
+ return _record_signature
-@populate_function_table
-class RuntimeFunctions(object):
- # The built in functions are automatically populated in the FUNCTION_TABLE
- # using the @builtin_function decorator on methods defined in this class.
+class FunctionRegistry(type):
+ def __init__(cls, name, bases, attrs):
+ cls._populate_function_table()
+ super(FunctionRegistry, cls).__init__(name, bases, attrs)
- FUNCTION_TABLE = {
- }
+ def _populate_function_table(cls):
+ function_table = getattr(cls, 'FUNCTION_TABLE', {})
+ # Any method with a @signature decorator that also
+ # starts with "_func_" is registered as a function.
+ # _func_max_by -> max_by function.
+ for name, method in get_methods(cls):
+ if not name.startswith('_func_'):
+ continue
+ signature = getattr(method, 'signature', None)
+ if signature is not None:
+ function_table[name[6:]] = {
+ 'function': method,
+ 'signature': signature,
+ }
+ cls.FUNCTION_TABLE = function_table
- def __init__(self):
- self._interpreter = None
- @property
- def interpreter(self):
- if self._interpreter is None:
- return None
- else:
- return self._interpreter()
+class Functions(with_metaclass(FunctionRegistry, object)):
- @interpreter.setter
- def interpreter(self, value):
- # A weakref is used because we have
- # a cyclic reference and we want to allow
- # for the memory to be properly freed when
- # the objects are no longer needed.
- self._interpreter = weakref.ref(value)
+ FUNCTION_TABLE = {
+ }
def call_function(self, function_name, resolved_args):
try:
@@ -170,28 +160,28 @@ class RuntimeFunctions(object):
raise exceptions.JMESPathTypeError(
function_name, element, actual_typename, types)
- @builtin_function({'types': ['number']})
+ @signature({'types': ['number']})
def _func_abs(self, arg):
return abs(arg)
- @builtin_function({'types': ['array-number']})
+ @signature({'types': ['array-number']})
def _func_avg(self, arg):
return sum(arg) / float(len(arg))
- @builtin_function({'types': [], 'variadic': True})
+ @signature({'types': [], 'variadic': True})
def _func_not_null(self, *arguments):
for argument in arguments:
if argument is not None:
return argument
- @builtin_function({'types': []})
+ @signature({'types': []})
def _func_to_array(self, arg):
if isinstance(arg, list):
return arg
else:
return [arg]
- @builtin_function({'types': []})
+ @signature({'types': []})
def _func_to_string(self, arg):
if isinstance(arg, STRING_TYPE):
return arg
@@ -199,7 +189,7 @@ class RuntimeFunctions(object):
return json.dumps(arg, separators=(',', ':'),
default=str)
- @builtin_function({'types': []})
+ @signature({'types': []})
def _func_to_number(self, arg):
if isinstance(arg, (list, dict, bool)):
return None
@@ -216,88 +206,88 @@ class RuntimeFunctions(object):
except ValueError:
return None
- @builtin_function({'types': ['array', 'string']}, {'types': []})
+ @signature({'types': ['array', 'string']}, {'types': []})
def _func_contains(self, subject, search):
return search in subject
- @builtin_function({'types': ['string', 'array', 'object']})
+ @signature({'types': ['string', 'array', 'object']})
def _func_length(self, arg):
return len(arg)
- @builtin_function({'types': ['string']}, {'types': ['string']})
+ @signature({'types': ['string']}, {'types': ['string']})
def _func_ends_with(self, search, suffix):
return search.endswith(suffix)
- @builtin_function({'types': ['string']}, {'types': ['string']})
+ @signature({'types': ['string']}, {'types': ['string']})
def _func_starts_with(self, search, suffix):
return search.startswith(suffix)
- @builtin_function({'types': ['array', 'string']})
+ @signature({'types': ['array', 'string']})
def _func_reverse(self, arg):
if isinstance(arg, STRING_TYPE):
return arg[::-1]
else:
return list(reversed(arg))
- @builtin_function({"types": ['number']})
+ @signature({"types": ['number']})
def _func_ceil(self, arg):
return math.ceil(arg)
- @builtin_function({"types": ['number']})
+ @signature({"types": ['number']})
def _func_floor(self, arg):
return math.floor(arg)
- @builtin_function({"types": ['string']}, {"types": ['array-string']})
+ @signature({"types": ['string']}, {"types": ['array-string']})
def _func_join(self, separator, array):
return separator.join(array)
- @builtin_function({'types': ['expref']}, {'types': ['array']})
+ @signature({'types': ['expref']}, {'types': ['array']})
def _func_map(self, expref, arg):
result = []
for element in arg:
- result.append(self.interpreter.visit(expref.expression, element))
+ result.append(expref.visit(expref.expression, element))
return result
- @builtin_function({"types": ['array-number', 'array-string']})
+ @signature({"types": ['array-number', 'array-string']})
def _func_max(self, arg):
if arg:
return max(arg)
else:
return None
- @builtin_function({"types": ["object"], "variadic": True})
+ @signature({"types": ["object"], "variadic": True})
def _func_merge(self, *arguments):
merged = {}
for arg in arguments:
merged.update(arg)
return merged
- @builtin_function({"types": ['array-number', 'array-string']})
+ @signature({"types": ['array-number', 'array-string']})
def _func_min(self, arg):
if arg:
return min(arg)
else:
return None
- @builtin_function({"types": ['array-string', 'array-number']})
+ @signature({"types": ['array-string', 'array-number']})
def _func_sort(self, arg):
return list(sorted(arg))
- @builtin_function({"types": ['array-number']})
+ @signature({"types": ['array-number']})
def _func_sum(self, arg):
return sum(arg)
- @builtin_function({"types": ['object']})
+ @signature({"types": ['object']})
def _func_keys(self, arg):
# To be consistent with .values()
# should we also return the indices of a list?
return list(arg.keys())
- @builtin_function({"types": ['object']})
+ @signature({"types": ['object']})
def _func_values(self, arg):
return list(arg.values())
- @builtin_function({'types': []})
+ @signature({'types': []})
def _func_type(self, arg):
if isinstance(arg, STRING_TYPE):
return "string"
@@ -312,7 +302,7 @@ class RuntimeFunctions(object):
elif arg is None:
return "null"
- @builtin_function({'types': ['array']}, {'types': ['expref']})
+ @signature({'types': ['array']}, {'types': ['expref']})
def _func_sort_by(self, array, expref):
if not array:
return array
@@ -323,34 +313,32 @@ class RuntimeFunctions(object):
# that validates that type, which requires that remaining array
# elements resolve to the same type as the first element.
required_type = self._convert_to_jmespath_type(
- type(self.interpreter.visit(expref.expression, array[0])).__name__)
+ type(expref.visit(expref.expression, array[0])).__name__)
if required_type not in ['number', 'string']:
raise exceptions.JMESPathTypeError(
'sort_by', array[0], required_type, ['string', 'number'])
- keyfunc = self._create_key_func(expref.expression,
+ keyfunc = self._create_key_func(expref,
[required_type],
'sort_by')
return list(sorted(array, key=keyfunc))
- @builtin_function({'types': ['array']}, {'types': ['expref']})
+ @signature({'types': ['array']}, {'types': ['expref']})
def _func_min_by(self, array, expref):
- keyfunc = self._create_key_func(expref.expression,
+ keyfunc = self._create_key_func(expref,
['number', 'string'],
'min_by')
return min(array, key=keyfunc)
- @builtin_function({'types': ['array']}, {'types': ['expref']})
+ @signature({'types': ['array']}, {'types': ['expref']})
def _func_max_by(self, array, expref):
- keyfunc = self._create_key_func(expref.expression,
+ keyfunc = self._create_key_func(expref,
['number', 'string'],
'min_by')
return max(array, key=keyfunc)
- def _create_key_func(self, expr_node, allowed_types, function_name):
- interpreter = self.interpreter
-
+ def _create_key_func(self, expref, allowed_types, function_name):
def keyfunc(x):
- result = interpreter.visit(expr_node, x)
+ result = expref.visit(expref.expression, x)
actual_typename = type(result).__name__
jmespath_type = self._convert_to_jmespath_type(actual_typename)
# allowed_types is in term of jmespath types, not python types.
diff --git a/jmespath/visitor.py b/jmespath/visitor.py
index bbb5a8e..8a94a7e 100644
--- a/jmespath/visitor.py
+++ b/jmespath/visitor.py
@@ -35,7 +35,7 @@ def _is_special_integer_case(x, y):
class Options(object):
"""Options to control how a JMESPath function is evaluated."""
- def __init__(self, dict_cls):
+ def __init__(self, dict_cls=None, custom_functions=None):
#: The class to use when creating a dict. The interpreter
# may create dictionaries during the evalution of a JMESPath
# expression. For example, a multi-select hash will
@@ -45,11 +45,16 @@ class Options(object):
# want to set a collections.OrderedDict so that you can
# have predictible key ordering.
self.dict_cls = dict_cls
+ self.custom_functions = custom_functions
class _Expression(object):
- def __init__(self, expression):
+ def __init__(self, expression, interpreter):
self.expression = expression
+ self.interpreter = interpreter
+
+ def visit(self, node, *args, **kwargs):
+ return self.interpreter.visit(node, *args, **kwargs)
class Visitor(object):
@@ -83,15 +88,16 @@ class TreeInterpreter(Visitor):
def __init__(self, options=None):
super(TreeInterpreter, self).__init__()
- self._options = options
self._dict_cls = self.MAP_TYPE
- if options is not None and options.dict_cls is not None:
+ if options is None:
+ options = Options()
+ self._options = options
+ if options.dict_cls is not None:
self._dict_cls = self._options.dict_cls
- self._functions = functions.RuntimeFunctions()
- # Note that .interpreter is a property that uses
- # a weakref so that the cyclic reference can be
- # properly freed.
- self._functions.interpreter = self
+ if options.custom_functions is not None:
+ self._functions = self._options.custom_functions
+ else:
+ self._functions = functions.Functions()
def default_visit(self, node, *args, **kwargs):
raise NotImplementedError(node['type'])
@@ -119,7 +125,7 @@ class TreeInterpreter(Visitor):
return value
def visit_expref(self, node, value):
- return _Expression(node['children'][0])
+ return _Expression(node['children'][0], self)
def visit_function_expression(self, node, value):
resolved_args = []
| Custom functions
What are your thoughts about adding a method to register custom functions directly into `RuntimeFunctions` class in `functions.py`?
JMESPath is almost good enough to use as a domain specific language for general language transforming objects. You can sneak in literals in the multi-select hash. You can filter for values, and transform them to booleans using `<=`, `==` operators. There's some support for making sure values are numbers.
However I don't see anyway to do something like "if value is x, return `y`" where you show a literal if a condition matches. There's no way to convert conditions to an arbitrary literal - if a value in a multi-select hash is going to be a literal, it has to be of the same value no matter what.
I can see a possible workaround if custom functions on JMESPath. E.g. if I implement the "if" function for use, I can do something like:
search("if(bar==`1`, `hello`, `world`)", {'bar': '1'})
This would return the literal `hello` if the `bar` key is 1, otherwise it returns `world`. The only issue is the current python implementation means its going to be hacky to do this. You have to override multiple classes, in `functions.py` and override the `TreeInterpreter` and the `ParsedResult` classes as well.
I think if custom functions were desired, it would be much more elegant if there is a method to register them directly into the `RuntimeFunctions` in `functions.py`, rather than either forcing a fork or overriding a litany of classes.
What do you think?
| jmespath/jmespath.py | diff --git a/tests/test_search.py b/tests/test_search.py
index 56a0a75..71ab3dc 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -1,6 +1,7 @@
from tests import unittest, OrderedDict
import jmespath
+import jmespath.functions
class TestSearchOptions(unittest.TestCase):
@@ -10,3 +11,28 @@ class TestSearchOptions(unittest.TestCase):
{'c': 'c', 'b': 'b', 'a': 'a', 'd': 'd'},
options=jmespath.Options(dict_cls=OrderedDict))
self.assertEqual(result, ['a', 'b', 'c'])
+
+ def test_can_provide_custom_functions(self):
+ class CustomFunctions(jmespath.functions.Functions):
+ @jmespath.functions.signature(
+ {'types': ['number']},
+ {'types': ['number']})
+ def _func_custom_add(self, x, y):
+ return x + y
+
+ @jmespath.functions.signature(
+ {'types': ['number']},
+ {'types': ['number']})
+ def _func_my_subtract(self, x, y):
+ return x - y
+
+
+ options = jmespath.Options(custom_functions=CustomFunctions())
+ self.assertEqual(
+ jmespath.search('custom_add(`1`, `2`)', {}, options=options),
+ 3
+ )
+ self.assertEqual(
+ jmespath.search('my_subtract(`10`, `3`)', {}, options=options),
+ 7
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 4
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
distlib==0.3.9
filelock==3.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
-e git+https://github.com/jmespath/jmespath.py.git@a6baa176714c72a68a2b367810b91162b1125f41#egg=jmespath
nose==1.2.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
tox==1.4.2
typing_extensions==4.1.1
virtualenv==20.17.1
zipp==3.6.0
| name: jmespath.py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- distlib==0.3.9
- filelock==3.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- nose==1.2.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- tox==1.4.2
- typing-extensions==4.1.1
- virtualenv==20.17.1
- wheel==0.24.0
- zipp==3.6.0
prefix: /opt/conda/envs/jmespath.py
| [
"tests/test_search.py::TestSearchOptions::test_can_provide_custom_functions"
]
| []
| [
"tests/test_search.py::TestSearchOptions::test_can_provide_dict_cls"
]
| []
| MIT License | 450 | [
"README.rst",
"jmespath/compat.py",
"jmespath/functions.py",
"jmespath/visitor.py"
]
| [
"README.rst",
"jmespath/compat.py",
"jmespath/functions.py",
"jmespath/visitor.py"
]
|
|
rbarrois__python-semanticversion-38 | d10ab4cb1d1ca8090132f205017dfc90473c91cc | 2016-02-23 22:25:50 | d10ab4cb1d1ca8090132f205017dfc90473c91cc | diff --git a/CREDITS b/CREDITS
index 53fdef1..ca9a781 100644
--- a/CREDITS
+++ b/CREDITS
@@ -23,6 +23,7 @@ The project has received contributions from (in alphabetical order):
* Michael Hrivnak <[email protected]> (https://github.com/mhrivnak)
* William Minchin <[email protected]> (https://github.com/minchinweb)
* Dave Hall <[email protected]> (https://github.com/skwashd)
+* Martin Ek <[email protected]> (https://github.com/ekmartin)
Contributor license agreement
diff --git a/semantic_version/base.py b/semantic_version/base.py
index 1504642..83a9c25 100644
--- a/semantic_version/base.py
+++ b/semantic_version/base.py
@@ -405,6 +405,7 @@ class SpecItem(object):
KIND_NEQ = '!='
KIND_CARET = '^'
KIND_TILDE = '~'
+ KIND_COMPATIBLE = '~='
# Map a kind alias to its full version
KIND_ALIASES = {
@@ -412,7 +413,7 @@ class SpecItem(object):
KIND_EMPTY: KIND_EQUAL,
}
- re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~)(\d.*)$')
+ re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~|~=)(\d.*)$')
def __init__(self, requirement_string):
kind, spec = self.parse(requirement_string)
@@ -468,6 +469,12 @@ class SpecItem(object):
return self.spec <= version < upper
elif self.kind == self.KIND_TILDE:
return self.spec <= version < self.spec.next_minor()
+ elif self.kind == self.KIND_COMPATIBLE:
+ if self.spec.patch:
+ upper = self.spec.next_minor()
+ else:
+ upper = self.spec.next_major()
+ return self.spec <= version < upper
else: # pragma: no cover
raise ValueError('Unexpected match kind: %r' % self.kind)
| Support compatible release clauses (~=)
Hi!
Here's an example:
```python
Spec('~=1.5.6') # results in a ValueError
```
https://www.python.org/dev/peps/pep-0440/#compatible-release | rbarrois/python-semanticversion | diff --git a/tests/test_base.py b/tests/test_base.py
index 24bf86e..0675b24 100755
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -514,6 +514,14 @@ class SpecItemTestCase(unittest.TestCase):
['0.0.2', '0.0.2-alpha', '0.0.2+abb'],
['0.1.0', '0.0.3', '1.0.0'],
),
+ '~=1.4.5': (
+ ['1.4.5', '1.4.10-alpha', '1.4.10'],
+ ['1.3.6', '1.4.4', '1.5.0'],
+ ),
+ '~=1.4': (
+ ['1.4.0', '1.6.10-alpha', '1.6.10'],
+ ['1.3.0', '2.0.0'],
+ ),
}
def test_matches(self):
diff --git a/tests/test_match.py b/tests/test_match.py
index 49464f8..4d1a96f 100755
--- a/tests/test_match.py
+++ b/tests/test_match.py
@@ -31,6 +31,7 @@ class MatchTestCase(unittest.TestCase):
'!=0.1.2-rc1.3-14.15+build.2012-01-01.11h34',
'^0.1.2',
'~0.1.2',
+ '~=0.1.2',
]
matches = {
@@ -113,6 +114,16 @@ class MatchTestCase(unittest.TestCase):
'0.1.2+build4.5',
'0.1.3-rc1.3',
],
+ '~=1.4.5': (
+ '1.4.5',
+ '1.4.10-alpha',
+ '1.4.10',
+ ),
+ '~=1.4': [
+ '1.4.0',
+ '1.6.10-alpha',
+ '1.6.10',
+ ],
}
def test_invalid(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 2.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/rbarrois/python-semanticversion.git@d10ab4cb1d1ca8090132f205017dfc90473c91cc#egg=semantic_version
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-semanticversion
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-semanticversion
| [
"tests/test_base.py::SpecItemTestCase::test_matches",
"tests/test_match.py::MatchTestCase::test_match",
"tests/test_match.py::MatchTestCase::test_simple"
]
| []
| [
"tests/test_base.py::ComparisonTestCase::test_identifier_cmp",
"tests/test_base.py::ComparisonTestCase::test_identifier_list_cmp",
"tests/test_base.py::TopLevelTestCase::test_compare",
"tests/test_base.py::TopLevelTestCase::test_match",
"tests/test_base.py::TopLevelTestCase::test_validate_invalid",
"tests/test_base.py::TopLevelTestCase::test_validate_valid",
"tests/test_base.py::VersionTestCase::test_bump_clean_versions",
"tests/test_base.py::VersionTestCase::test_bump_prerelease_versions",
"tests/test_base.py::VersionTestCase::test_compare_partial_to_self",
"tests/test_base.py::VersionTestCase::test_compare_to_self",
"tests/test_base.py::VersionTestCase::test_hash",
"tests/test_base.py::VersionTestCase::test_invalid_comparisons",
"tests/test_base.py::VersionTestCase::test_parsing",
"tests/test_base.py::VersionTestCase::test_parsing_partials",
"tests/test_base.py::VersionTestCase::test_str",
"tests/test_base.py::VersionTestCase::test_str_partials",
"tests/test_base.py::SpecItemTestCase::test_components",
"tests/test_base.py::SpecItemTestCase::test_equality",
"tests/test_base.py::SpecItemTestCase::test_hash",
"tests/test_base.py::SpecItemTestCase::test_invalids",
"tests/test_base.py::SpecItemTestCase::test_to_string",
"tests/test_base.py::CoerceTestCase::test_coerce",
"tests/test_base.py::CoerceTestCase::test_invalid",
"tests/test_base.py::SpecTestCase::test_contains",
"tests/test_base.py::SpecTestCase::test_equality",
"tests/test_base.py::SpecTestCase::test_filter_compatible",
"tests/test_base.py::SpecTestCase::test_filter_empty",
"tests/test_base.py::SpecTestCase::test_filter_incompatible",
"tests/test_base.py::SpecTestCase::test_hash",
"tests/test_base.py::SpecTestCase::test_matches",
"tests/test_base.py::SpecTestCase::test_parsing",
"tests/test_base.py::SpecTestCase::test_parsing_split",
"tests/test_base.py::SpecTestCase::test_select_compatible",
"tests/test_base.py::SpecTestCase::test_select_empty",
"tests/test_base.py::SpecTestCase::test_select_incompatible",
"tests/test_match.py::MatchTestCase::test_build_check",
"tests/test_match.py::MatchTestCase::test_contains",
"tests/test_match.py::MatchTestCase::test_invalid",
"tests/test_match.py::MatchTestCase::test_prerelease_check"
]
| []
| BSD 2-Clause "Simplified" License | 451 | [
"CREDITS",
"semantic_version/base.py"
]
| [
"CREDITS",
"semantic_version/base.py"
]
|
|
scieloorg__xylose-96 | df12890d7e4d8d986f33844513b9d4f68a148fda | 2016-02-25 18:06:10 | df12890d7e4d8d986f33844513b9d4f68a148fda | fabiobatalha: Puesdes escribir un testcase para eso.
swarzesherz: Actualizado con testcase
fabiobatalha: Desculpe não ter comentado antes.
Acho que seria legal também incluir no retorno do Xylose a version ISO do país, quando existir e for válida.
swarzesherz: Actualizado, agregue affdict['country_iso_3166'] = aff['p'] | diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index 0b42e3e..c71e5b2 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -1309,6 +1309,7 @@ class Article(object):
continue
normalized[aff['index']]['normalized'] = True
normalized[aff['index']]['country'] = aff.get('country', '')
+ normalized[aff['index']]['country_iso_3166'] = aff.get('country_iso_3166', '')
normalized[aff['index']]['institution'] = aff.get('institution', '')
normalized[aff['index']]['state'] = aff.get('state', '')
@@ -1335,8 +1336,7 @@ class Article(object):
if 'p' in aff and aff['p'] in choices.ISO_3166:
affdict['country'] = choices.ISO_3166[aff['p']]
- if aff['p'] in choices.ISO_3166:
- affdict['country_iso_3166'] = aff['p']
+ affdict['country_iso_3166'] = aff['p']
if 's' in aff:
affdict['state'] = aff['s']
@@ -1369,6 +1369,9 @@ class Article(object):
affdict['state'] = html_decode(aff['s'])
if 'p' in aff:
affdict['country'] = html_decode(aff['p'])
+ if 'p' in aff and 'q' in aff and aff['p'] in choices.ISO_3166:
+ affdict['country'] = choices.ISO_3166[aff['p']]
+ affdict['country_iso_3166'] = aff['p']
if 'e' in aff:
affdict['email'] = html_decode(aff['e'])
if 'd' in aff:
| Asignación incorrecta de country en afiliaciones no normalizadas (v70)
En versiones de PC-Programs anteriores a: https://github.com/scieloorg/PC-Programs/commit/5e494a031cabb9d718970a6201f3ee6c9847b942 se realizaba la asignación del campo ```p```de la siguiente forma:
```
a['p'] = item.country if item.i_country is None else item.i_country
a['q'] = item.country if item.i_country is not None else None
```
Por lo que aunque el campo no este normalizado se asignaba el valor correspondiente al código ISO_3166, lo cual trae problemas en aplicaciones como articles_meta en donde encontramos combinados datos de códigos ISO_3166 con nombres del país:
http://articlemeta.scielo.org/api/v1/article/?code=S1665-70632015000300102&format=xmlwos

| scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index 758d7b8..6e6b1d6 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -1751,6 +1751,13 @@ class ArticleTests(unittest.TestCase):
u"p": u"US",
u"s": u"São Paulo",
u"_": u"University of Florida Not Normalized"
+ },
+ {
+ u"i": u"A04",
+ u"q": u"Mexico",
+ u"p": u"MX",
+ u"s": u"Yucatán",
+ u"_": u"Secretaría de Salud de Yucatán"
}
]
@@ -1758,13 +1765,15 @@ class ArticleTests(unittest.TestCase):
result_index = u''.join([i['index'] for i in sorted(amc, key=lambda k: k['index'])])
result_country = u''.join([i['country'] for i in sorted(amc, key=lambda k: k['index'])])
+ result_country_iso = u''.join([i['country_iso_3166'] for i in sorted(amc, key=lambda k: k['index']) if 'country_iso_3166' in i])
result_status = u''.join([str(i['normalized']) for i in sorted(amc, key=lambda k: k['index'])])
result_state = u''.join([i['state'] for i in sorted(amc, key=lambda k: k['index'])])
- self.assertEqual(result_index, u'A01A02A03')
- self.assertEqual(result_country, u'BrazilBrazilUS')
- self.assertEqual(result_status, u'TrueTrueFalse')
- self.assertEqual(result_state, u'São PauloSão Paulo')
+ self.assertEqual(result_index, u'A01A02A03A04')
+ self.assertEqual(result_country, u'BrazilBrazilUSMexico')
+ self.assertEqual(result_country_iso, u'BRBRMX')
+ self.assertEqual(result_status, u'TrueTrueFalseFalse')
+ self.assertEqual(result_state, u'São PauloSão PauloYucatán')
def test_without_normalized_affiliations(self):
article = self.article
@@ -1992,6 +2001,41 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.affiliations, expected)
+ def test_affiliation_with_country_iso_3166(self):
+
+ article = self.article
+
+ del(article.data['article']['v70'])
+
+ article.data['article']['v70'] = [
+ {
+ u"1": u"Escuela Nacional de Enfermería y Obstetricia",
+ u"2": u"División de Estudios de Posgrado e Investigación",
+ u"q": u"Mexico",
+ u"c": u"México",
+ u"i": u"A01",
+ u"l": u"a",
+ u"p": u"MX",
+ u"s": u"D.F.",
+ u"_": u"Universidad Nacional Autónoma de México"
+ }
+ ]
+
+ expected = [
+ {
+ 'index': u'A01',
+ 'city': u'México',
+ 'state': u'D.F.',
+ 'country': u'Mexico',
+ 'country_iso_3166': u'MX',
+ 'orgdiv1': u'Escuela Nacional de Enfermería y Obstetricia',
+ 'orgdiv2': u'División de Estudios de Posgrado e Investigación',
+ 'institution': u'Universidad Nacional Autónoma de México'
+ }
+ ]
+
+ self.assertEqual(article.affiliations, expected)
+
def test_without_scielo_domain(self):
article = self.article
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/scieloorg/xylose.git@df12890d7e4d8d986f33844513b9d4f68a148fda#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::IssueTests::test_collection_acronym",
"tests/test_document.py::IssueTests::test_is_ahead",
"tests/test_document.py::IssueTests::test_is_ahead_1",
"tests/test_document.py::IssueTests::test_issue",
"tests/test_document.py::IssueTests::test_issue_label_field_v4",
"tests/test_document.py::IssueTests::test_issue_label_without_field_v4",
"tests/test_document.py::IssueTests::test_issue_url",
"tests/test_document.py::IssueTests::test_order",
"tests/test_document.py::IssueTests::test_supplement_number",
"tests/test_document.py::IssueTests::test_supplement_volume",
"tests/test_document.py::IssueTests::test_volume",
"tests/test_document.py::IssueTests::test_without_issue",
"tests/test_document.py::IssueTests::test_without_suplement_number",
"tests/test_document.py::IssueTests::test_without_supplement_volume",
"tests/test_document.py::IssueTests::test_without_volume",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_journal_without_subtitle",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_load_issn_with_v435",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_in_months",
"tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_periodicity_in_months",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_data_model_version_html",
"tests/test_document.py::ArticleTests::test_data_model_version_html_1",
"tests/test_document.py::ArticleTests::test_data_model_version_xml",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_journal_acronym",
"tests/test_document.py::ArticleTests::test_journal_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_publisher_loc",
"tests/test_document.py::ArticleTests::test_publisher_name",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_journal_acronym",
"tests/test_document.py::ArticleTests::test_without_journal_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_publisher_loc",
"tests/test_document.py::ArticleTests::test_without_publisher_name",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 452 | [
"xylose/scielodocument.py"
]
| [
"xylose/scielodocument.py"
]
|
kako-nawao__ffconv-25 | b93056b5cb13eb62cbc8c3aa0e21c45e908d153f | 2016-02-26 13:29:15 | b93056b5cb13eb62cbc8c3aa0e21c45e908d153f | diff --git a/DESCRIPTION.rst b/DESCRIPTION.rst
index d1b41af..2649378 100644
--- a/DESCRIPTION.rst
+++ b/DESCRIPTION.rst
@@ -1,4 +1,4 @@
ffconv
-=======================
+======
Process media files to match profiles using ffmpeg.
\ No newline at end of file
diff --git a/ffconv/stream_processors.py b/ffconv/stream_processors.py
index e81feac..145207b 100644
--- a/ffconv/stream_processors.py
+++ b/ffconv/stream_processors.py
@@ -108,9 +108,19 @@ class VideoProcessor(StreamProcessor):
# Set input reference frames value
self.refs = int(stream['refs'])
- # Set target values for frames, profile, level, preset and quality
- height = int(stream.get('height', 720))
- self.max_refs = profile[self.media_type]['max_refs'].get(height, 5)
+ # Assert height is included in stream
+ if 'height' not in stream:
+ raise KeyError("Height not specified in video stream.")
+
+ # Get height and set target for ref frames (default is 4)
+ self.max_refs = 4
+ height = int(stream['height'])
+ for h, f in sorted(profile[self.media_type]['max_refs'].items()):
+ if height <= h:
+ self.max_refs = f
+ break
+
+ # Set target values for profile, level, preset and quality
self.target_profile = profile[self.media_type]['profile']
self.target_level = profile[self.media_type]['level']
self.target_preset = profile[self.media_type]['preset']
diff --git a/setup.py b/setup.py
index 20c088d..e4ec3cc 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ setup(
name='ffconv',
# https://packaging.python.org/en/latest/single_source_version.html
- version='0.0.3',
+ version='0.1.0',
description='Process media files with ffmpeg',
long_description=long_description,
| Height-to-Reference Frames matching error
Sometimes the video height does not match the standard values perfectly (720, 1080, etc), and in those cases the stream processor is deciding incorrectly: a video stream with a height if 704 is trying to limit the frames to 4, when it should do it at 8.
When deciding ref frames maximum, we should *compare heights* instead of matching them exactly. | kako-nawao/ffconv | diff --git a/tests/test_files.py b/tests/test_files.py
index e332188..e4f8612 100644
--- a/tests/test_files.py
+++ b/tests/test_files.py
@@ -108,7 +108,7 @@ class FileProcessorTest(TestCase):
self.assertEqual(res, [])
# Process 1 video only
- streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 1}]
+ streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 1, 'height': 720}]
res = processor.process_streams(streams)
self.assertEqual(len(res), 1)
self.assertEqual(VideoProcessor.process.call_count, 1)
@@ -117,7 +117,7 @@ class FileProcessorTest(TestCase):
VideoProcessor.process.reset_mock()
# Process 1 video, 2 audio, 2 subs
- streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4},
+ streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4, 'height': 720},
{'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 2},
{'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 6},
{'codec_type': 'subtitle', 'codec_name': 'srt', 'index': 0},
@@ -143,7 +143,7 @@ class FileProcessorTest(TestCase):
self.assertEqual(res, [])
# Process 1 video, 2 audio, 2 subs
- streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4},
+ streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4, 'height': 480},
{'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 2},
{'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 6},
{'codec_type': 'subtitle', 'codec_name': 'srt', 'index': 0},
@@ -220,7 +220,7 @@ class FileProcessorTest(TestCase):
@patch('ffconv.stream_processors.execute_cmd', MagicMock())
@patch('ffconv.file_processor.execute_cmd', MagicMock())
@patch('ffconv.file_processor.FileProcessor.probe', MagicMock(return_value=[
- {'index': 0, 'codec_type': 'video', 'codec_name': 'h264', 'refs': 4},
+ {'index': 0, 'codec_type': 'video', 'codec_name': 'h264', 'refs': 4, 'height': 720},
{'index': 1, 'codec_type': 'audio', 'codec_name': 'aac', 'channels': 6, 'tags': {'LANGUAGE': 'eng'}},
{'index': 2, 'codec_type': 'subtitle', 'codec_name': 'ass', 'tags': {'LANGUAGE': 'spa'}},
{'index': 3, 'codec_type': 'subtitle', 'codec_name': 'srt', 'tags': {'LANGUAGE': 'por'}},
diff --git a/tests/test_streams.py b/tests/test_streams.py
index a24c1bb..04584d8 100644
--- a/tests/test_streams.py
+++ b/tests/test_streams.py
@@ -64,6 +64,26 @@ class VideoProcessorTest(TestCase):
processor.convert.reset_mock()
processor.clean_up.reset_mock()
+ # Attempt to process 704:8, nothing to do
+ stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'h264',
+ 'refs': 8, 'height': 704}
+ processor = VideoProcessor(input, stream, profile)
+ res = processor.process()
+ self.assertEqual(res, {'input': 'some-film.mkv', 'index': 7})
+ self.assertFalse(processor.convert.called)
+ self.assertFalse(processor.clean_up.called)
+
+ # Attempt process for 2160:8 refs, needs to convert (default ref is 4)
+ stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'h264',
+ 'refs': 8, 'height': 2160}
+ processor = VideoProcessor(input, stream, profile)
+ res = processor.process()
+ self.assertEqual(res, {'input': 'video-7.mp4', 'index': 0})
+ self.assertTrue(processor.convert.called)
+ self.assertTrue(processor.clean_up.called)
+ processor.convert.reset_mock()
+ processor.clean_up.reset_mock()
+
# Attempt to process xvid, turn to h264
stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'xvid',
'refs': 1, 'height': 720}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
coverage==7.8.0
dill==0.3.9
exceptiongroup==1.2.2
-e git+https://github.com/kako-nawao/ffconv.git@b93056b5cb13eb62cbc8c3aa0e21c45e908d153f#egg=ffconv
iniconfig==2.1.0
isort==6.0.1
mccabe==0.7.0
nose2==0.15.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pylint==3.3.6
pytest==8.3.5
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: ffconv
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- coverage==7.8.0
- dill==0.3.9
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- nose2==0.15.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pylint==3.3.6
- pytest==8.3.5
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/ffconv
| [
"tests/test_streams.py::VideoProcessorTest::test_process"
]
| []
| [
"tests/test_files.py::ExecuteCommandTest::test_errors",
"tests/test_files.py::FileProcessorTest::test_clean_up",
"tests/test_files.py::FileProcessorTest::test_init",
"tests/test_files.py::FileProcessorTest::test_merge",
"tests/test_files.py::FileProcessorTest::test_probe",
"tests/test_files.py::FileProcessorTest::test_process",
"tests/test_files.py::FileProcessorTest::test_process_streams_error",
"tests/test_files.py::FileProcessorTest::test_process_streams_success",
"tests/test_files.py::FileProcessorTest::test_replace_original",
"tests/test_streams.py::VideoProcessorTest::test_convert",
"tests/test_streams.py::VideoProcessorTest::test_init",
"tests/test_streams.py::AudioProcessorTest::test_convert",
"tests/test_streams.py::AudioProcessorTest::test_init",
"tests/test_streams.py::AudioProcessorTest::test_process",
"tests/test_streams.py::SubtitleProcessorTest::test_clean_up",
"tests/test_streams.py::SubtitleProcessorTest::test_convert",
"tests/test_streams.py::SubtitleProcessorTest::test_init",
"tests/test_streams.py::SubtitleProcessorTest::test_process"
]
| []
| null | 453 | [
"setup.py",
"ffconv/stream_processors.py",
"DESCRIPTION.rst"
]
| [
"setup.py",
"ffconv/stream_processors.py",
"DESCRIPTION.rst"
]
|
|
nose-devs__nose2-278 | f4ab61b95a67aecf61abb161e4cf138bf8da6ce8 | 2016-02-28 09:32:12 | b5eeeba68f2bad3e199af8903cdd0540b95f06cf | diff --git a/nose2/plugins/junitxml.py b/nose2/plugins/junitxml.py
index e61a08b..05cfee6 100644
--- a/nose2/plugins/junitxml.py
+++ b/nose2/plugins/junitxml.py
@@ -164,11 +164,10 @@ class JUnitXmlReporter(events.Plugin):
skipped.set('message', 'expected test failure')
skipped.text = msg
- system_err = ET.SubElement(testcase, 'system-err')
- system_err.text = string_cleanup(
+ system_out = ET.SubElement(testcase, 'system-out')
+ system_out.text = string_cleanup(
'\n'.join(event.metadata.get('logs', '')),
- self.keep_restricted
- )
+ self.keep_restricted)
def _check(self):
if not os.path.exists(os.path.dirname(self.path)):
| junit-xml always add logs in system_err
This is really strange because logs are typically not outputted in stderr.
In nosetests we did not have logs in case of successful test but now they are always displayed as system_err.
May it be possible to output them in system_out instead ?
In the testOutcome handler just replace system_err with this code ...
```python
system_out = ET.SubElement(testcase, 'system-out')
system_out.text = string_cleanup(
'\n'.join(event.metadata.get('logs', '')),
self.keep_restricted
)
``` | nose-devs/nose2 | diff --git a/nose2/tests/unit/test_junitxml.py b/nose2/tests/unit/test_junitxml.py
index 706daa2..0bb96f0 100644
--- a/nose2/tests/unit/test_junitxml.py
+++ b/nose2/tests/unit/test_junitxml.py
@@ -145,7 +145,6 @@ class TestJunitXmlPlugin(TestCase):
test(self.result)
case = self.plugin.tree.find('testcase')
error = case.find('error')
- ending = six.u(' \uFFFD\uFFFD')
assert error is not None
self.assertRegex(error.text, self.EXPECTED_RE_SAFE)
@@ -277,22 +276,22 @@ class TestJunitXmlPlugin(TestCase):
self.assertEqual(inital_dir,
os.path.dirname(os.path.realpath(self.plugin.path)))
- def test_xml_contains_empty_system_err_without_logcapture(self):
+ def test_xml_contains_empty_system_out_without_logcapture(self):
test = self.case('test_with_log')
test(self.result)
case = self.plugin.tree.find('testcase')
- system_err = case.find('system-err')
- assert system_err is not None
- assert not system_err.text
+ system_out = case.find('system-out')
+ assert system_out is not None
+ assert not system_out.text
- def test_xml_contains_log_message_in_system_err_with_logcapture(self):
+ def test_xml_contains_log_message_in_system_out_with_logcapture(self):
self.logcapture_plugin = logcapture.LogCapture(session=self.session)
self.logcapture_plugin.register()
test = self.case('test_with_log')
test(self.result)
case = self.plugin.tree.find('testcase')
- system_err = case.find('system-err')
- assert system_err is not None
- assert 'log message' in system_err.text
- assert 'INFO' in system_err.text
+ system_out = case.find('system-out')
+ assert system_out is not None
+ assert 'log message' in system_out.text
+ assert 'INFO' in system_out.text
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose2",
"cov-core>=1.12",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
cov-core==1.15.0
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/nose-devs/nose2.git@f4ab61b95a67aecf61abb161e4cf138bf8da6ce8#egg=nose2
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: nose2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cov-core==1.15.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/nose2
| [
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_empty_system_out_without_logcapture",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_log_message_in_system_out_with_logcapture"
]
| []
| [
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b_keep",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_keep",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_includes_traceback",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_failure_includes_traceback",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_full_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_full_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_name_correct",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_skip_includes_skipped",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_success_added_to_xml",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_writes_xml_file_at_end",
"nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_file_path_is_not_affected_by_chdir_in_test"
]
| []
| BSD | 454 | [
"nose2/plugins/junitxml.py"
]
| [
"nose2/plugins/junitxml.py"
]
|
|
scieloorg__xylose-108 | 9d72bccf95503133b6fe7ef55ec88f9cf9b50a71 | 2016-02-29 21:07:48 | 743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5 | diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index 8f9703d..8676c4a 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -866,6 +866,24 @@ class Journal(object):
return missions
+ @property
+ def publisher_country(self):
+ """
+ This method retrieves the publisher country of journal.
+ This method return a tuple: ('US', u'United States'), otherwise
+ return None.
+ """
+ if 'v310' not in self.data:
+ return None
+
+ country_code = self.data.get('v310', [{'_': None}])[0]['_']
+ country_name = choices.ISO_3166.get(country_code, None)
+
+ if not country_code or not country_name:
+ return None
+
+ return (country_code, country_name)
+
@property
def copyright(self):
"""
| Adicionar o campo ``publisher_country`` a classe Journal
Para que possamos cadastrar o publisher_country no processamento inicial do Site, precisamos que esse atributo esteja no Xylose. | scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index 282c34f..30996d5 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -1085,6 +1085,26 @@ class JournalTests(unittest.TestCase):
self.assertIsNone(journal.mission)
+ def test_journal_publisher_country(self):
+ journal = self.journal
+
+ expected = ('BR', 'Brazil')
+
+ self.assertEqual(journal.publisher_country, expected)
+
+ def test_journal_publisher_country_without_country(self):
+ journal = self.journal
+
+ del(journal.data['v310'])
+
+ self.assertIsNone(journal.publisher_country)
+
+ def test_journal_publisher_country_not_findable_code(self):
+ self.fulldoc['title']['v310'] = [{"_": "BRX"}]
+ journal = Journal(self.fulldoc['title'])
+
+ self.assertIsNone(journal.publisher_country)
+
def test_journal_copyright(self):
journal = self.journal
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
mocker==1.1.1
nose==1.0.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
pytest-mock==3.14.0
tomli==2.2.1
-e git+https://github.com/scieloorg/xylose.git@9d72bccf95503133b6fe7ef55ec88f9cf9b50a71#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mocker==1.1.1
- nose==1.0.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- tomli==2.2.1
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::JournalTests::test_journal_publisher_country",
"tests/test_document.py::JournalTests::test_journal_publisher_country_not_findable_code",
"tests/test_document.py::JournalTests::test_journal_publisher_country_without_country"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::IssueTests::test_collection_acronym",
"tests/test_document.py::IssueTests::test_is_ahead",
"tests/test_document.py::IssueTests::test_is_ahead_1",
"tests/test_document.py::IssueTests::test_issue",
"tests/test_document.py::IssueTests::test_issue_label",
"tests/test_document.py::IssueTests::test_issue_url",
"tests/test_document.py::IssueTests::test_order",
"tests/test_document.py::IssueTests::test_processing_date",
"tests/test_document.py::IssueTests::test_processing_date_1",
"tests/test_document.py::IssueTests::test_publication_date",
"tests/test_document.py::IssueTests::test_supplement_number",
"tests/test_document.py::IssueTests::test_supplement_volume",
"tests/test_document.py::IssueTests::test_type_regular",
"tests/test_document.py::IssueTests::test_type_supplement_1",
"tests/test_document.py::IssueTests::test_type_supplement_2",
"tests/test_document.py::IssueTests::test_volume",
"tests/test_document.py::IssueTests::test_without_issue",
"tests/test_document.py::IssueTests::test_without_processing_date",
"tests/test_document.py::IssueTests::test_without_publication_date",
"tests/test_document.py::IssueTests::test_without_suplement_number",
"tests/test_document.py::IssueTests::test_without_supplement_volume",
"tests/test_document.py::IssueTests::test_without_volume",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_cnn_code",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_ctrl_vocabulary",
"tests/test_document.py::JournalTests::test_ctrl_vocabulary_out_of_choices",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_first_number",
"tests/test_document.py::JournalTests::test_first_number_1",
"tests/test_document.py::JournalTests::test_first_volume",
"tests/test_document.py::JournalTests::test_first_volume_1",
"tests/test_document.py::JournalTests::test_first_year",
"tests/test_document.py::JournalTests::test_first_year_1",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_copyright",
"tests/test_document.py::JournalTests::test_journal_copyright_without_copyright",
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_mission",
"tests/test_document.py::JournalTests::test_journal_mission_without_language_key",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission_text",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission_text_and_language",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_journal_without_subtitle",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_last_cnn_code_1",
"tests/test_document.py::JournalTests::test_last_number",
"tests/test_document.py::JournalTests::test_last_number_1",
"tests/test_document.py::JournalTests::test_last_volume",
"tests/test_document.py::JournalTests::test_last_volume_1",
"tests/test_document.py::JournalTests::test_last_year",
"tests/test_document.py::JournalTests::test_last_year_1",
"tests/test_document.py::JournalTests::test_load_issn_with_v435",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_in_months",
"tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_plevel",
"tests/test_document.py::JournalTests::test_plevel_out_of_choices",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_secs_code",
"tests/test_document.py::JournalTests::test_standard",
"tests/test_document.py::JournalTests::test_standard_out_of_choices",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_subject_descriptors",
"tests/test_document.py::JournalTests::test_subject_index_coverage",
"tests/test_document.py::JournalTests::test_submission_url",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_ctrl_vocabulary",
"tests/test_document.py::JournalTests::test_without_index_coverage",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_periodicity_in_months",
"tests/test_document.py::JournalTests::test_without_plevel",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_secs_code",
"tests/test_document.py::JournalTests::test_without_standard",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_subject_descriptors",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_data_model_version_html",
"tests/test_document.py::ArticleTests::test_data_model_version_html_1",
"tests/test_document.py::ArticleTests::test_data_model_version_xml",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_journal_acronym",
"tests/test_document.py::ArticleTests::test_journal_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_publisher_loc",
"tests/test_document.py::ArticleTests::test_publisher_name",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_journal_acronym",
"tests/test_document.py::ArticleTests::test_without_journal_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_publisher_loc",
"tests/test_document.py::ArticleTests::test_without_publisher_name",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 455 | [
"xylose/scielodocument.py"
]
| [
"xylose/scielodocument.py"
]
|
|
scieloorg__xylose-118 | 743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5 | 2016-03-01 17:38:49 | 743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5 | diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py
index 0a6fdb0..b177a8d 100644
--- a/xylose/scielodocument.py
+++ b/xylose/scielodocument.py
@@ -567,18 +567,6 @@ class Journal(object):
if len(langs) > 0:
return langs
- @property
- def abstract_languages(self):
- """
- This method retrieves a list of possible languages that the journal
- publishes the abstracts.
- This method deals with the legacy fields (v360).
- """
- if 'v360' in self.data:
- langs = [i['_'] for i in self.data['v360'] if i['_'] in choices.ISO639_1_to_2.keys()]
- if len(langs) > 0:
- return langs
-
@property
def collection_acronym(self):
"""
@@ -717,20 +705,49 @@ class Journal(object):
This method retrieves the publisher name of the given article,
if it exists.
This method deals with the legacy fields (480).
+
+ This method return a list:
+
+ ["Associa\u00e7\u00e3o Brasileira de Limnologia",
+ "Sociedade Botânica do Brasil"]
"""
+ if 'v480' not in self.data:
+ return None
- return self.data.get('v480', [{'_': None}])[0]['_']
+ return [publisher['_'] for publisher in self.data.get('v480') if '_' in publisher and publisher['_'] != ""]
@property
def publisher_loc(self):
"""
- This method retrieves the publisher localization of the given article,
+ This method retrieves the publisher localization of the given journal,
if it exists.
This method deals with the legacy fields (490).
"""
+ warnings.warn("deprecated, use journal.publisher_city", DeprecationWarning)
+
return self.data.get('v490', [{'_': None}])[0]['_']
+ @property
+ def publisher_city(self):
+ """
+ This method retrieves the publisher localization of the given journal,
+ if it exists.
+ This method deals with the legacy fields (490).
+ """
+
+ return self.data.get('v490', [{'_': None}])[0]['_']
+
+ @property
+ def publisher_state(self):
+ """
+ This method retrieves the publisher state of the given journal,
+ if it exists.
+ This method deals with the legacy fields (320).
+ """
+
+ return self.data.get('v320', [{'_': None}])[0]['_']
+
@property
def title(self):
"""
@@ -741,6 +758,24 @@ class Journal(object):
return self.data.get('v100', [{'_': None}])[0]['_']
+ @property
+ def publisher_country(self):
+ """
+ This method retrieves the publisher country of journal.
+ This method return a tuple: ('US', u'United States'), otherwise
+ return None.
+ """
+ if 'v310' not in self.data:
+ return None
+
+ country_code = self.data.get('v310', [{'_': None}])[0]['_']
+ country_name = choices.ISO_3166.get(country_code, None)
+
+ if not country_code or not country_name:
+ return None
+
+ return (country_code, country_name)
+
@property
def subtitle(self):
"""
@@ -927,24 +962,6 @@ class Journal(object):
return missions
- @property
- def publisher_country(self):
- """
- This method retrieves the publisher country of journal.
- This method return a tuple: ('US', u'United States'), otherwise
- return None.
- """
- if 'v310' not in self.data:
- return None
-
- country_code = self.data.get('v310', [{'_': None}])[0]['_']
- country_name = choices.ISO_3166.get(country_code, None)
-
- if not country_code or not country_name:
- return None
-
- return (country_code, country_name)
-
@property
def copyrighter(self):
"""
| Adiciona o campo ``publisher_state`` a classe Journal | scieloorg/xylose | diff --git a/tests/test_document.py b/tests/test_document.py
index 83a5588..e9cb6fb 100644
--- a/tests/test_document.py
+++ b/tests/test_document.py
@@ -435,18 +435,6 @@ class JournalTests(unittest.TestCase):
self.assertEqual(journal.languages, None)
- def test_abstract_languages(self):
- journal = Journal(self.fulldoc['title'])
-
- self.assertEqual(sorted(journal.abstract_languages), [u'en', u'pt'])
-
- def test_abstract_languages_without_v350(self):
- del(self.fulldoc['title']['v360'])
-
- journal = Journal(self.fulldoc['title'])
-
- self.assertEqual(journal.abstract_languages, None)
-
def test_current_without_v51(self):
del(self.fulldoc['title']['v51'])
@@ -1021,7 +1009,7 @@ class JournalTests(unittest.TestCase):
def test_publisher_name(self):
journal = self.journal
- self.assertEqual(journal.publisher_name, u'Associação Brasileira de Limnologia')
+ self.assertEqual(journal.publisher_name, [u'Associação Brasileira de Limnologia'])
def test_without_publisher_name(self):
journal = self.journal
@@ -1040,6 +1028,28 @@ class JournalTests(unittest.TestCase):
del(journal.data['v490'])
self.assertEqual(journal.publisher_loc, None)
+ def test_publisher_city(self):
+ journal = self.journal
+
+ self.assertEqual(journal.publisher_city, u'Rio Claro')
+
+ def test_without_publisher_city(self):
+ journal = self.journal
+
+ del(journal.data['v490'])
+ self.assertEqual(journal.publisher_city, None)
+
+ def test_publisher_state(self):
+ journal = self.journal
+
+ self.assertEqual(journal.publisher_state, u'SP')
+
+ def test_without_publisher_state(self):
+ journal = self.journal
+
+ del(journal.data['v320'])
+ self.assertEqual(journal.publisher_state, None)
+
def test_journal_title(self):
journal = self.journal
@@ -1536,50 +1546,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.original_language(iso_format=None), u'en')
- def test_publisher_name(self):
- article = self.article
-
- self.assertEqual(article.journal.publisher_name, u'Associação Brasileira de Limnologia')
-
- def test_without_publisher_name(self):
- article = self.article
-
- del(article.data['title']['v480'])
- self.assertEqual(article.journal.publisher_name, None)
-
- def test_publisher_loc(self):
- article = self.article
-
- self.assertEqual(article.journal.publisher_loc, u'Rio Claro')
-
- def test_without_publisher_loc(self):
- article = self.article
-
- del(article.data['title']['v490'])
- self.assertEqual(article.journal.publisher_loc, None)
-
- def test_journal_title(self):
- article = self.article
-
- self.assertEqual(article.journal.title, u'Acta Limnologica Brasiliensia')
-
- def test_without_journal_title(self):
- article = self.article
-
- del(article.data['title']['v100'])
- self.assertEqual(article.journal.title, None)
-
- def test_journal_acronym(self):
- article = self.article
-
- self.assertEqual(article.journal.acronym, u'alb')
-
- def test_without_journal_acronym(self):
- article = self.article
-
- del(article.data['title']['v68'])
- self.assertEqual(article.journal.acronym, None)
-
def test_publication_date(self):
article = self.article
@@ -1620,7 +1586,6 @@ class ArticleTests(unittest.TestCase):
article.data['article']['v93'] = [{u'_': u'20120419'}]
self.assertEqual(article.creation_date, '2012-04-19')
-
def test_creation_date_1(self):
article = self.article
@@ -1769,7 +1734,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.elocation, None)
-
def test_start_page_loaded_through_xml(self):
article = self.article
@@ -1877,7 +1841,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.doi, u'10.1590/S2179-975X2012005000004')
-
def test_doi_clean_1(self):
article = self.article
@@ -1885,7 +1848,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.doi, u'10.1590/S2179-975X2012005000004')
-
def test_doi_clean_2(self):
article = self.article
@@ -2030,7 +1992,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.original_abstract(iso_format=None), None)
-
def test_without_corporative_authors(self):
article = self.article
@@ -2689,7 +2650,6 @@ class ArticleTests(unittest.TestCase):
self.assertEqual(article.translated_titles(iso_format=None), expected)
-
def test_translated_abstracts_without_v83(self):
article = self.article
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/scieloorg/xylose.git@743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5#egg=xylose
| name: xylose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/xylose
| [
"tests/test_document.py::JournalTests::test_publisher_city",
"tests/test_document.py::JournalTests::test_publisher_name",
"tests/test_document.py::JournalTests::test_publisher_state",
"tests/test_document.py::JournalTests::test_without_publisher_city",
"tests/test_document.py::JournalTests::test_without_publisher_state"
]
| []
| [
"tests/test_document.py::ToolsTests::test_get_date_wrong_day",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int",
"tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int",
"tests/test_document.py::ToolsTests::test_get_date_year",
"tests/test_document.py::ToolsTests::test_get_date_year_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day",
"tests/test_document.py::ToolsTests::test_get_date_year_month_day_31",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined",
"tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined",
"tests/test_document.py::ToolsTests::test_get_language_without_iso_format",
"tests/test_document.py::IssueTests::test_collection_acronym",
"tests/test_document.py::IssueTests::test_is_ahead",
"tests/test_document.py::IssueTests::test_is_ahead_1",
"tests/test_document.py::IssueTests::test_issue",
"tests/test_document.py::IssueTests::test_issue_label",
"tests/test_document.py::IssueTests::test_issue_url",
"tests/test_document.py::IssueTests::test_order",
"tests/test_document.py::IssueTests::test_processing_date",
"tests/test_document.py::IssueTests::test_processing_date_1",
"tests/test_document.py::IssueTests::test_publication_date",
"tests/test_document.py::IssueTests::test_supplement_number",
"tests/test_document.py::IssueTests::test_supplement_volume",
"tests/test_document.py::IssueTests::test_type_regular",
"tests/test_document.py::IssueTests::test_type_supplement_1",
"tests/test_document.py::IssueTests::test_type_supplement_2",
"tests/test_document.py::IssueTests::test_volume",
"tests/test_document.py::IssueTests::test_without_issue",
"tests/test_document.py::IssueTests::test_without_processing_date",
"tests/test_document.py::IssueTests::test_without_publication_date",
"tests/test_document.py::IssueTests::test_without_suplement_number",
"tests/test_document.py::IssueTests::test_without_supplement_volume",
"tests/test_document.py::IssueTests::test_without_volume",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic",
"tests/test_document.py::JournalTests::test_any_issn_priority_print",
"tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print",
"tests/test_document.py::JournalTests::test_cnn_code",
"tests/test_document.py::JournalTests::test_collection_acronym",
"tests/test_document.py::JournalTests::test_creation_date",
"tests/test_document.py::JournalTests::test_ctrl_vocabulary",
"tests/test_document.py::JournalTests::test_ctrl_vocabulary_out_of_choices",
"tests/test_document.py::JournalTests::test_current_status",
"tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_current_status_some_changes",
"tests/test_document.py::JournalTests::test_current_without_v51",
"tests/test_document.py::JournalTests::test_editor_address",
"tests/test_document.py::JournalTests::test_editor_address_without_data",
"tests/test_document.py::JournalTests::test_editor_email",
"tests/test_document.py::JournalTests::test_editor_email_without_data",
"tests/test_document.py::JournalTests::test_first_number",
"tests/test_document.py::JournalTests::test_first_number_1",
"tests/test_document.py::JournalTests::test_first_volume",
"tests/test_document.py::JournalTests::test_first_volume_1",
"tests/test_document.py::JournalTests::test_first_year",
"tests/test_document.py::JournalTests::test_first_year_1",
"tests/test_document.py::JournalTests::test_in_ahci",
"tests/test_document.py::JournalTests::test_in_scie",
"tests/test_document.py::JournalTests::test_in_ssci",
"tests/test_document.py::JournalTests::test_journal",
"tests/test_document.py::JournalTests::test_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_journal_acronym",
"tests/test_document.py::JournalTests::test_journal_copyrighter",
"tests/test_document.py::JournalTests::test_journal_copyrighter_without_copyright",
"tests/test_document.py::JournalTests::test_journal_fulltitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle",
"tests/test_document.py::JournalTests::test_journal_fulltitle_without_title",
"tests/test_document.py::JournalTests::test_journal_mission",
"tests/test_document.py::JournalTests::test_journal_mission_without_language_key",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission_text",
"tests/test_document.py::JournalTests::test_journal_mission_without_mission_text_and_language",
"tests/test_document.py::JournalTests::test_journal_other_title_without_other_titles",
"tests/test_document.py::JournalTests::test_journal_other_titles",
"tests/test_document.py::JournalTests::test_journal_publisher_country",
"tests/test_document.py::JournalTests::test_journal_publisher_country_not_findable_code",
"tests/test_document.py::JournalTests::test_journal_publisher_country_without_country",
"tests/test_document.py::JournalTests::test_journal_sponsors",
"tests/test_document.py::JournalTests::test_journal_sponsors_with_empty_items",
"tests/test_document.py::JournalTests::test_journal_sponsors_without_sponsors",
"tests/test_document.py::JournalTests::test_journal_subtitle",
"tests/test_document.py::JournalTests::test_journal_title",
"tests/test_document.py::JournalTests::test_journal_title_nlm",
"tests/test_document.py::JournalTests::test_journal_url",
"tests/test_document.py::JournalTests::test_journal_without_subtitle",
"tests/test_document.py::JournalTests::test_languages",
"tests/test_document.py::JournalTests::test_languages_without_v350",
"tests/test_document.py::JournalTests::test_last_cnn_code_1",
"tests/test_document.py::JournalTests::test_last_number",
"tests/test_document.py::JournalTests::test_last_number_1",
"tests/test_document.py::JournalTests::test_last_volume",
"tests/test_document.py::JournalTests::test_last_volume_1",
"tests/test_document.py::JournalTests::test_last_year",
"tests/test_document.py::JournalTests::test_last_year_1",
"tests/test_document.py::JournalTests::test_load_issn_with_v435",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT",
"tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35",
"tests/test_document.py::JournalTests::test_periodicity",
"tests/test_document.py::JournalTests::test_periodicity_in_months",
"tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices",
"tests/test_document.py::JournalTests::test_periodicity_out_of_choices",
"tests/test_document.py::JournalTests::test_permission_id",
"tests/test_document.py::JournalTests::test_permission_t0",
"tests/test_document.py::JournalTests::test_permission_t1",
"tests/test_document.py::JournalTests::test_permission_t2",
"tests/test_document.py::JournalTests::test_permission_t3",
"tests/test_document.py::JournalTests::test_permission_t4",
"tests/test_document.py::JournalTests::test_permission_text",
"tests/test_document.py::JournalTests::test_permission_url",
"tests/test_document.py::JournalTests::test_permission_without_v540",
"tests/test_document.py::JournalTests::test_permission_without_v540_t",
"tests/test_document.py::JournalTests::test_plevel",
"tests/test_document.py::JournalTests::test_plevel_out_of_choices",
"tests/test_document.py::JournalTests::test_publisher_loc",
"tests/test_document.py::JournalTests::test_scielo_issn",
"tests/test_document.py::JournalTests::test_secs_code",
"tests/test_document.py::JournalTests::test_standard",
"tests/test_document.py::JournalTests::test_standard_out_of_choices",
"tests/test_document.py::JournalTests::test_status",
"tests/test_document.py::JournalTests::test_status_lots_of_changes",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1",
"tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason",
"tests/test_document.py::JournalTests::test_status_some_changes",
"tests/test_document.py::JournalTests::test_status_without_v51",
"tests/test_document.py::JournalTests::test_subject_areas",
"tests/test_document.py::JournalTests::test_subject_descriptors",
"tests/test_document.py::JournalTests::test_subject_index_coverage",
"tests/test_document.py::JournalTests::test_submission_url",
"tests/test_document.py::JournalTests::test_update_date",
"tests/test_document.py::JournalTests::test_without_ctrl_vocabulary",
"tests/test_document.py::JournalTests::test_without_index_coverage",
"tests/test_document.py::JournalTests::test_without_journal_abbreviated_title",
"tests/test_document.py::JournalTests::test_without_journal_acronym",
"tests/test_document.py::JournalTests::test_without_journal_title",
"tests/test_document.py::JournalTests::test_without_journal_title_nlm",
"tests/test_document.py::JournalTests::test_without_journal_url",
"tests/test_document.py::JournalTests::test_without_periodicity",
"tests/test_document.py::JournalTests::test_without_periodicity_in_months",
"tests/test_document.py::JournalTests::test_without_plevel",
"tests/test_document.py::JournalTests::test_without_publisher_loc",
"tests/test_document.py::JournalTests::test_without_publisher_name",
"tests/test_document.py::JournalTests::test_without_scielo_domain",
"tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::JournalTests::test_without_secs_code",
"tests/test_document.py::JournalTests::test_without_standard",
"tests/test_document.py::JournalTests::test_without_subject_areas",
"tests/test_document.py::JournalTests::test_without_subject_descriptors",
"tests/test_document.py::JournalTests::test_without_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_without_wos_subject_areas",
"tests/test_document.py::JournalTests::test_wos_citation_indexes",
"tests/test_document.py::JournalTests::test_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_acceptance_date",
"tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166",
"tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name",
"tests/test_document.py::ArticleTests::test_affiliations",
"tests/test_document.py::ArticleTests::test_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_article",
"tests/test_document.py::ArticleTests::test_author_with_two_affiliations",
"tests/test_document.py::ArticleTests::test_author_with_two_role",
"tests/test_document.py::ArticleTests::test_author_without_affiliations",
"tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names",
"tests/test_document.py::ArticleTests::test_authors",
"tests/test_document.py::ArticleTests::test_collection_acronym",
"tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection",
"tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992",
"tests/test_document.py::ArticleTests::test_collection_name_brazil",
"tests/test_document.py::ArticleTests::test_collection_name_undefined",
"tests/test_document.py::ArticleTests::test_corporative_authors",
"tests/test_document.py::ArticleTests::test_creation_date",
"tests/test_document.py::ArticleTests::test_creation_date_1",
"tests/test_document.py::ArticleTests::test_creation_date_2",
"tests/test_document.py::ArticleTests::test_data_model_version_html",
"tests/test_document.py::ArticleTests::test_data_model_version_html_1",
"tests/test_document.py::ArticleTests::test_data_model_version_xml",
"tests/test_document.py::ArticleTests::test_document_type",
"tests/test_document.py::ArticleTests::test_doi",
"tests/test_document.py::ArticleTests::test_doi_clean_1",
"tests/test_document.py::ArticleTests::test_doi_clean_2",
"tests/test_document.py::ArticleTests::test_doi_v237",
"tests/test_document.py::ArticleTests::test_e_location",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_file_code",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1",
"tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2",
"tests/test_document.py::ArticleTests::test_first_author",
"tests/test_document.py::ArticleTests::test_first_author_without_author",
"tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts",
"tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts",
"tests/test_document.py::ArticleTests::test_html_url",
"tests/test_document.py::ArticleTests::test_invalid_document_type",
"tests/test_document.py::ArticleTests::test_issue_url",
"tests/test_document.py::ArticleTests::test_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_keywords",
"tests/test_document.py::ArticleTests::test_keywords_iso639_2",
"tests/test_document.py::ArticleTests::test_keywords_with_undefined_language",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_k",
"tests/test_document.py::ArticleTests::test_keywords_without_subfield_l",
"tests/test_document.py::ArticleTests::test_languages_field_fulltexts",
"tests/test_document.py::ArticleTests::test_languages_field_v40",
"tests/test_document.py::ArticleTests::test_last_page",
"tests/test_document.py::ArticleTests::test_mixed_affiliations_1",
"tests/test_document.py::ArticleTests::test_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE",
"tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p",
"tests/test_document.py::ArticleTests::test_order",
"tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined",
"tests/test_document.py::ArticleTests::test_original_html_field_body",
"tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_iso639_2",
"tests/test_document.py::ArticleTests::test_original_language_original",
"tests/test_document.py::ArticleTests::test_original_section_field_v49",
"tests/test_document.py::ArticleTests::test_original_title_subfield_t",
"tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined",
"tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language",
"tests/test_document.py::ArticleTests::test_original_title_without_language_defined",
"tests/test_document.py::ArticleTests::test_pdf_url",
"tests/test_document.py::ArticleTests::test_processing_date",
"tests/test_document.py::ArticleTests::test_processing_date_1",
"tests/test_document.py::ArticleTests::test_project_name",
"tests/test_document.py::ArticleTests::test_project_sponsors",
"tests/test_document.py::ArticleTests::test_publication_contract",
"tests/test_document.py::ArticleTests::test_publication_date",
"tests/test_document.py::ArticleTests::test_publisher_id",
"tests/test_document.py::ArticleTests::test_receive_date",
"tests/test_document.py::ArticleTests::test_review_date",
"tests/test_document.py::ArticleTests::test_secion_code_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_code_without_field_v49",
"tests/test_document.py::ArticleTests::test_section_field_v49",
"tests/test_document.py::ArticleTests::test_section_nd_field_v49",
"tests/test_document.py::ArticleTests::test_section_without_field_v49",
"tests/test_document.py::ArticleTests::test_start_page",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1",
"tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2",
"tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml",
"tests/test_document.py::ArticleTests::test_subject_areas",
"tests/test_document.py::ArticleTests::test_thesis_degree",
"tests/test_document.py::ArticleTests::test_thesis_organization",
"tests/test_document.py::ArticleTests::test_thesis_organization_and_division",
"tests/test_document.py::ArticleTests::test_thesis_organization_without_name",
"tests/test_document.py::ArticleTests::test_translated_abstracts",
"tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83",
"tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_htmls_field_body",
"tests/test_document.py::ArticleTests::test_translated_section_field_v49",
"tests/test_document.py::ArticleTests::test_translated_titles",
"tests/test_document.py::ArticleTests::test_translated_titles_iso639_2",
"tests/test_document.py::ArticleTests::test_translated_titles_without_v12",
"tests/test_document.py::ArticleTests::test_update_date",
"tests/test_document.py::ArticleTests::test_update_date_1",
"tests/test_document.py::ArticleTests::test_update_date_2",
"tests/test_document.py::ArticleTests::test_update_date_3",
"tests/test_document.py::ArticleTests::test_whitwout_acceptance_date",
"tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date",
"tests/test_document.py::ArticleTests::test_whitwout_receive_date",
"tests/test_document.py::ArticleTests::test_whitwout_review_date",
"tests/test_document.py::ArticleTests::test_without_affiliations",
"tests/test_document.py::ArticleTests::test_without_authors",
"tests/test_document.py::ArticleTests::test_without_citations",
"tests/test_document.py::ArticleTests::test_without_collection_acronym",
"tests/test_document.py::ArticleTests::test_without_corporative_authors",
"tests/test_document.py::ArticleTests::test_without_document_type",
"tests/test_document.py::ArticleTests::test_without_doi",
"tests/test_document.py::ArticleTests::test_without_e_location",
"tests/test_document.py::ArticleTests::test_without_html_url",
"tests/test_document.py::ArticleTests::test_without_issue_url",
"tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title",
"tests/test_document.py::ArticleTests::test_without_keywords",
"tests/test_document.py::ArticleTests::test_without_last_page",
"tests/test_document.py::ArticleTests::test_without_normalized_affiliations",
"tests/test_document.py::ArticleTests::test_without_order",
"tests/test_document.py::ArticleTests::test_without_original_abstract",
"tests/test_document.py::ArticleTests::test_without_original_title",
"tests/test_document.py::ArticleTests::test_without_pages",
"tests/test_document.py::ArticleTests::test_without_pdf_url",
"tests/test_document.py::ArticleTests::test_without_processing_date",
"tests/test_document.py::ArticleTests::test_without_project_name",
"tests/test_document.py::ArticleTests::test_without_project_sponsor",
"tests/test_document.py::ArticleTests::test_without_publication_contract",
"tests/test_document.py::ArticleTests::test_without_publication_date",
"tests/test_document.py::ArticleTests::test_without_publisher_id",
"tests/test_document.py::ArticleTests::test_without_scielo_domain",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690",
"tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690",
"tests/test_document.py::ArticleTests::test_without_start_page",
"tests/test_document.py::ArticleTests::test_without_subject_areas",
"tests/test_document.py::ArticleTests::test_without_thesis_degree",
"tests/test_document.py::ArticleTests::test_without_thesis_organization",
"tests/test_document.py::ArticleTests::test_without_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_without_wos_subject_areas",
"tests/test_document.py::ArticleTests::test_wos_citation_indexes",
"tests/test_document.py::ArticleTests::test_wos_subject_areas",
"tests/test_document.py::CitationTest::test_a_link_access_date",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation",
"tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation",
"tests/test_document.py::CitationTest::test_article_title",
"tests/test_document.py::CitationTest::test_article_without_title",
"tests/test_document.py::CitationTest::test_authors_article",
"tests/test_document.py::CitationTest::test_authors_book",
"tests/test_document.py::CitationTest::test_authors_link",
"tests/test_document.py::CitationTest::test_authors_thesis",
"tests/test_document.py::CitationTest::test_book_chapter_title",
"tests/test_document.py::CitationTest::test_book_edition",
"tests/test_document.py::CitationTest::test_book_volume",
"tests/test_document.py::CitationTest::test_book_without_chapter_title",
"tests/test_document.py::CitationTest::test_citation_sample_congress",
"tests/test_document.py::CitationTest::test_citation_sample_link",
"tests/test_document.py::CitationTest::test_citation_sample_link_without_comment",
"tests/test_document.py::CitationTest::test_conference_edition",
"tests/test_document.py::CitationTest::test_conference_name",
"tests/test_document.py::CitationTest::test_conference_sponsor",
"tests/test_document.py::CitationTest::test_conference_without_name",
"tests/test_document.py::CitationTest::test_conference_without_sponsor",
"tests/test_document.py::CitationTest::test_date",
"tests/test_document.py::CitationTest::test_doi",
"tests/test_document.py::CitationTest::test_editor",
"tests/test_document.py::CitationTest::test_elocation_14",
"tests/test_document.py::CitationTest::test_elocation_514",
"tests/test_document.py::CitationTest::test_end_page_14",
"tests/test_document.py::CitationTest::test_end_page_514",
"tests/test_document.py::CitationTest::test_end_page_withdout_data",
"tests/test_document.py::CitationTest::test_first_author_article",
"tests/test_document.py::CitationTest::test_first_author_book",
"tests/test_document.py::CitationTest::test_first_author_link",
"tests/test_document.py::CitationTest::test_first_author_thesis",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors",
"tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_index_number",
"tests/test_document.py::CitationTest::test_institutions_all_fields",
"tests/test_document.py::CitationTest::test_institutions_v11",
"tests/test_document.py::CitationTest::test_institutions_v17",
"tests/test_document.py::CitationTest::test_institutions_v29",
"tests/test_document.py::CitationTest::test_institutions_v50",
"tests/test_document.py::CitationTest::test_institutions_v58",
"tests/test_document.py::CitationTest::test_invalid_edition",
"tests/test_document.py::CitationTest::test_isbn",
"tests/test_document.py::CitationTest::test_isbn_but_not_a_book",
"tests/test_document.py::CitationTest::test_issn",
"tests/test_document.py::CitationTest::test_issn_but_not_an_article",
"tests/test_document.py::CitationTest::test_issue_part",
"tests/test_document.py::CitationTest::test_issue_title",
"tests/test_document.py::CitationTest::test_journal_issue",
"tests/test_document.py::CitationTest::test_journal_volume",
"tests/test_document.py::CitationTest::test_link",
"tests/test_document.py::CitationTest::test_link_title",
"tests/test_document.py::CitationTest::test_link_without_title",
"tests/test_document.py::CitationTest::test_monographic_authors",
"tests/test_document.py::CitationTest::test_monographic_first_author",
"tests/test_document.py::CitationTest::test_pages_14",
"tests/test_document.py::CitationTest::test_pages_514",
"tests/test_document.py::CitationTest::test_pages_withdout_data",
"tests/test_document.py::CitationTest::test_publication_type_article",
"tests/test_document.py::CitationTest::test_publication_type_book",
"tests/test_document.py::CitationTest::test_publication_type_conference",
"tests/test_document.py::CitationTest::test_publication_type_link",
"tests/test_document.py::CitationTest::test_publication_type_thesis",
"tests/test_document.py::CitationTest::test_publication_type_undefined",
"tests/test_document.py::CitationTest::test_publisher",
"tests/test_document.py::CitationTest::test_publisher_address",
"tests/test_document.py::CitationTest::test_publisher_address_without_e",
"tests/test_document.py::CitationTest::test_series_book",
"tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation",
"tests/test_document.py::CitationTest::test_series_conference",
"tests/test_document.py::CitationTest::test_series_journal",
"tests/test_document.py::CitationTest::test_source_book_title",
"tests/test_document.py::CitationTest::test_source_journal",
"tests/test_document.py::CitationTest::test_source_journal_without_journal_title",
"tests/test_document.py::CitationTest::test_sponsor",
"tests/test_document.py::CitationTest::test_start_page_14",
"tests/test_document.py::CitationTest::test_start_page_514",
"tests/test_document.py::CitationTest::test_start_page_withdout_data",
"tests/test_document.py::CitationTest::test_thesis_institution",
"tests/test_document.py::CitationTest::test_thesis_title",
"tests/test_document.py::CitationTest::test_thesis_without_title",
"tests/test_document.py::CitationTest::test_title_when_article_citation",
"tests/test_document.py::CitationTest::test_title_when_conference_citation",
"tests/test_document.py::CitationTest::test_title_when_link_citation",
"tests/test_document.py::CitationTest::test_title_when_thesis_citation",
"tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book",
"tests/test_document.py::CitationTest::test_without_analytic_institution",
"tests/test_document.py::CitationTest::test_without_authors",
"tests/test_document.py::CitationTest::test_without_date",
"tests/test_document.py::CitationTest::test_without_doi",
"tests/test_document.py::CitationTest::test_without_edition",
"tests/test_document.py::CitationTest::test_without_editor",
"tests/test_document.py::CitationTest::test_without_first_author",
"tests/test_document.py::CitationTest::test_without_index_number",
"tests/test_document.py::CitationTest::test_without_institutions",
"tests/test_document.py::CitationTest::test_without_issue",
"tests/test_document.py::CitationTest::test_without_issue_part",
"tests/test_document.py::CitationTest::test_without_issue_title",
"tests/test_document.py::CitationTest::test_without_link",
"tests/test_document.py::CitationTest::test_without_monographic_authors",
"tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation",
"tests/test_document.py::CitationTest::test_without_publisher",
"tests/test_document.py::CitationTest::test_without_publisher_address",
"tests/test_document.py::CitationTest::test_without_series",
"tests/test_document.py::CitationTest::test_without_sponsor",
"tests/test_document.py::CitationTest::test_without_thesis_institution",
"tests/test_document.py::CitationTest::test_without_volume"
]
| []
| BSD 2-Clause "Simplified" License | 456 | [
"xylose/scielodocument.py"
]
| [
"xylose/scielodocument.py"
]
|
|
DataDog__datadogpy-118 | c8bc9d6cce1caebea0be16366f2cd0c3efb47571 | 2016-03-01 18:46:54 | ef81785f880925467b9eeccf5ebd5b226a05d32f | yannmh: @JohnLZeller can you take a pass on it ?
JohnLZeller: :+1: looks good once conflicts are resolved. | diff --git a/datadog/dogstatsd/base.py b/datadog/dogstatsd/base.py
index 7e0e11d..2f7725c 100644
--- a/datadog/dogstatsd/base.py
+++ b/datadog/dogstatsd/base.py
@@ -22,7 +22,7 @@ log = logging.getLogger('dogstatsd')
class DogStatsd(object):
OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3)
- def __init__(self, host='localhost', port=8125, max_buffer_size=50,
+ def __init__(self, host='localhost', port=8125, max_buffer_size=50, namespace=None,
constant_tags=None, use_ms=False):
"""
Initialize a DogStatsd object.
@@ -39,7 +39,10 @@ class DogStatsd(object):
if sending metrics in batch
:type max_buffer_size: integer
- :param constant_tags: Tags to attach to every metric reported by this client
+ :param namepace: Namespace to prefix all metric names
+ :type namepace: string
+
+ :param constant_tags: Tags to attach to all metrics
:type constant_tags: list of strings
:param use_ms: Report timed values in milliseconds instead of seconds (default False)
@@ -58,6 +61,7 @@ class DogStatsd(object):
if constant_tags is None:
constant_tags = []
self.constant_tags = constant_tags + env_tags
+ self.namespace = namespace
self.use_ms = use_ms
def __enter__(self):
@@ -222,24 +226,37 @@ class DogStatsd(object):
self._report(metric, 's', value, tags, sample_rate)
def _report(self, metric, metric_type, value, tags, sample_rate):
+ """
+ Create a metric packet and send it.
+
+ More information about the packets' format: http://docs.datadoghq.com/guides/dogstatsd/
+ """
if sample_rate != 1 and random() > sample_rate:
return
- payload = [metric, ":", value, "|", metric_type]
- if sample_rate != 1:
- payload.extend(["|@", sample_rate])
+ payload = []
- # Append all client level tags to every metric
+ # Resolve the full tag list
if self.constant_tags:
if tags:
tags = tags + self.constant_tags
else:
tags = self.constant_tags
+ # Create/format the metric packet
+ if self.namespace:
+ payload.extend([self.namespace, "."])
+ payload.extend([metric, ":", value, "|", metric_type])
+
+ if sample_rate != 1:
+ payload.extend(["|@", sample_rate])
+
if tags:
payload.extend(["|#", ",".join(tags)])
encoded = "".join(imap(str, payload))
+
+ # Send it
self._send(encoded)
def _send_to_server(self, packet):
diff --git a/datadog/threadstats/base.py b/datadog/threadstats/base.py
index 89bbfc9..40aed00 100644
--- a/datadog/threadstats/base.py
+++ b/datadog/threadstats/base.py
@@ -23,7 +23,7 @@ log = logging.getLogger('dd.datadogpy')
class ThreadStats(object):
- def __init__(self, constant_tags=None):
+ def __init__(self, namespace="", constant_tags=None):
"""
Initialize a dogstats object.
@@ -33,13 +33,16 @@ class ThreadStats(object):
:envvar DATADOG_TAGS: Tags to attach to every metric reported by ThreadStats client
:type constant_tags: list of strings
"""
- # Don't collect until start is called.
- self._disabled = True
+ # Parameters
+ self.namespace = namespace
env_tags = [tag for tag in os.environ.get('DATADOG_TAGS', '').split(',') if tag]
if constant_tags is None:
constant_tags = []
self.constant_tags = constant_tags + env_tags
+ # State
+ self._disabled = True
+
def start(self, flush_interval=10, roll_up_interval=10, device=None,
flush_in_thread=True, flush_in_greenlet=False, disabled=False):
"""
@@ -307,23 +310,31 @@ class ThreadStats(object):
self._is_flush_in_progress = False
def _get_aggregate_metrics(self, flush_time=None):
+ """
+ Get, format and return the rolled up metrics from the aggregator.
+ """
# Get rolled up metrics
rolled_up_metrics = self._metric_aggregator.flush(flush_time)
# FIXME: emit a dictionary from the aggregator
metrics = []
for timestamp, value, name, tags, host in rolled_up_metrics:
- # Append all client level tags to every metric
metric_tags = tags
+ metric_name = name
+ # Append all client level tags to every metric
if self.constant_tags:
if tags:
metric_tags = tags + self.constant_tags
else:
metric_tags = self.constant_tags
+ # Resolve the metric name
+ if self.namespace:
+ metric_name = self.namespace + "." + name
+
metric = {
- 'metric': name,
+ 'metric': metric_name,
'points': [[timestamp, value]],
'type': MetricType.Gauge,
'host': host,
| Prefix support
Please, add prefix support like Java and C#:
https://github.com/DataDog/dogstatsd-csharp-client/blob/master/src/StatsdClient/StatsdConfig.cs#L8
https://github.com/indeedeng/java-dogstatsd-client/blob/master/src/main/java/com/timgroup/statsd/NonBlockingStatsDClient.java#L120 | DataDog/datadogpy | diff --git a/tests/unit/dogstatsd/test_statsd.py b/tests/unit/dogstatsd/test_statsd.py
index dacf09d..8473b87 100644
--- a/tests/unit/dogstatsd/test_statsd.py
+++ b/tests/unit/dogstatsd/test_statsd.py
@@ -149,6 +149,14 @@ class TestDogStatsd(object):
u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2|m:{2}'
.format(self.statsd.WARNING, now, u"♬ †øU \\n†øU ¥ºu|m\: T0µ ♪"), self.recv())
+ def test_metric_namespace(self):
+ """
+ Namespace prefixes all metric names.
+ """
+ self.statsd.namespace = "foo"
+ self.statsd.gauge('gauge', 123.4)
+ t.assert_equal('foo.gauge:123.4|g', self.recv())
+
# Test Client level contant tags
def test_gauge_constant_tags(self):
self.statsd.constant_tags=['bar:baz', 'foo']
diff --git a/tests/unit/threadstats/test_threadstats.py b/tests/unit/threadstats/test_threadstats.py
index eaa0658..6f2e90b 100644
--- a/tests/unit/threadstats/test_threadstats.py
+++ b/tests/unit/threadstats/test_threadstats.py
@@ -2,18 +2,19 @@
Tests for the ThreadStats class, using HTTP mode
"""
-import os
+# stdlib
import logging
+import os
import random
import time
-import threading
+import unittest
+# 3p
+from mock import patch
import nose.tools as nt
-from nose.plugins.skip import SkipTest
+# datadog
from datadog import ThreadStats
-from datadog.api.exceptions import ApiNotInitialized
-
from tests.util.contextmanagers import preserve_environment_variable
@@ -22,12 +23,10 @@ logger = logging.getLogger('dd.datadogpy')
logger.setLevel(logging.ERROR)
-#
-# Test fixtures.
-#
-
class MemoryReporter(object):
- """ A reporting class that reports to memory for testing. """
+ """
+ A reporting class that reports to memory for testing.
+ """
def __init__(self):
self.metrics = []
@@ -40,14 +39,20 @@ class MemoryReporter(object):
self.events += events
-#
-# Unit tests.
-#
-class TestUnitThreadStats(object):
- """ Unit tests for the dog stats api. """
+class TestUnitThreadStats(unittest.TestCase):
+ """
+ Unit tests for ThreadStats.
+ """
+ def setUp(self):
+ """
+ Set a mocked reporter.
+ """
+ self.reporter = MemoryReporter()
def sort_metrics(self, metrics):
- """ Sort metrics by timestamp of first point and then name """
+ """
+ Sort metrics by timestamp of first point and then name.
+ """
def sort(metric):
tags = metric['tags'] or []
host = metric['host'] or ''
@@ -55,6 +60,39 @@ class TestUnitThreadStats(object):
metric['points'][0][1])
return sorted(metrics, key=sort)
+ def assertMetric(self, name=None, value=None, tags=None, count=None):
+ """
+ Helper, to make assertions on metrics.
+ """
+ matching_metrics = []
+
+ for metric in self.reporter.metrics:
+ if name and name != metric['metric']:
+ continue
+ if value and value != metric['points'][0][1]:
+ continue
+ if tags and tags != metric['tags']:
+ continue
+ matching_metrics.append(metric)
+
+ if count:
+ self.assertEquals(
+ len(matching_metrics), count,
+ u"Candidate size assertion failure: expected {expected}, found {count}. "
+ u"Metric name={name}, value={value}, tags={tags}.".format(
+ expected=count, count=len(matching_metrics),
+ name=name, value=value, tags=tags
+ )
+ )
+ else:
+ self.assertTrue(
+ len(matching_metrics) > 0,
+ u"Candidate size assertion failure: no matching metric found. "
+ u"Metric name={name}, value={value}, tags={tags}.".format(
+ name=name, value=value, tags=tags
+ )
+ )
+
def test_timed_decorator(self):
dog = ThreadStats()
dog.start(roll_up_interval=1, flush_in_thread=False)
@@ -393,51 +431,75 @@ class TestUnitThreadStats(object):
nt.assert_equal(g3['points'][0][1], 20)
def test_constant_tags(self):
- dog = ThreadStats(constant_tags=['type:constant'])
- dog.start(roll_up_interval=10, flush_in_thread=False)
- reporter = dog.reporter = MemoryReporter()
+ """
+ Constant tags are attached to all metrics.
+ """
+ dog = ThreadStats(constant_tags=["type:constant"])
+ dog.start(roll_up_interval=1, flush_in_thread=False)
+ dog.reporter = self.reporter
# Post the same metric with different tags.
- dog.gauge('gauge', 10, timestamp=100.0)
- dog.gauge('gauge', 15, timestamp=100.0, tags=['env:production', 'db'])
- dog.gauge('gauge', 20, timestamp=100.0, tags=['env:staging'])
+ dog.gauge("gauge", 10, timestamp=100.0)
+ dog.gauge("gauge", 15, timestamp=100.0, tags=["env:production", 'db'])
+ dog.gauge("gauge", 20, timestamp=100.0, tags=["env:staging"])
- dog.increment('counter', timestamp=100.0)
- dog.increment('counter', timestamp=100.0, tags=['env:production', 'db'])
- dog.increment('counter', timestamp=100.0, tags=['env:staging'])
+ dog.increment("counter", timestamp=100.0)
+ dog.increment("counter", timestamp=100.0, tags=["env:production", 'db'])
+ dog.increment("counter", timestamp=100.0, tags=["env:staging"])
dog.flush(200.0)
- metrics = self.sort_metrics(reporter.metrics)
- nt.assert_equal(len(metrics), 6)
+ # Assertions on all metrics
+ self.assertMetric(count=6)
- [c1, c2, c3, g1, g2, g3] = metrics
- (nt.assert_equal(c['metric'], 'counter') for c in [c1, c2, c3])
- nt.assert_equal(c1['tags'], ['env:production', 'db', 'type:constant'])
- nt.assert_equal(c1['points'][0][1], 1)
- nt.assert_equal(c2['tags'], ['env:staging', 'type:constant'])
- nt.assert_equal(c2['points'][0][1], 1)
- nt.assert_equal(c3['tags'], ['type:constant'])
- nt.assert_equal(c3['points'][0][1], 1)
+ # Assertions on gauges
+ self.assertMetric(name='gauge', value=10, tags=["type:constant"], count=1)
+ self.assertMetric(name="gauge", value=15, tags=["env:production", "db", "type:constant"], count=1) # noqa
+ self.assertMetric(name="gauge", value=20, tags=["env:staging", "type:constant"], count=1)
- (nt.assert_equal(c['metric'], 'gauge') for c in [g1, g2, g3])
- nt.assert_equal(g1['tags'], ['env:production', 'db', 'type:constant'])
- nt.assert_equal(g1['points'][0][1], 15)
- nt.assert_equal(g2['tags'], ['env:staging', 'type:constant'])
- nt.assert_equal(g2['points'][0][1], 20)
- nt.assert_equal(g3['tags'], ['type:constant'])
- nt.assert_equal(g3['points'][0][1], 10)
+ # Assertions on counters
+ self.assertMetric(name="counter", value=1, tags=["type:constant"], count=1)
+ self.assertMetric(name="counter", value=1, tags=["env:production", "db", "type:constant"], count=1) # noqa
+ self.assertMetric(name="counter", value=1, tags=["env:staging", "type:constant"], count=1)
# Ensure histograms work as well.
@dog.timed('timed', tags=['version:1'])
- def test():
+ def do_nothing():
+ """
+ A function that does nothing, but being timed.
+ """
pass
- test()
+
+ with patch("datadog.threadstats.base.time", return_value=300):
+ do_nothing()
+
dog.histogram('timed', 20, timestamp=300.0, tags=['db', 'version:2'])
- reporter.metrics = []
- dog.flush(400)
- for metric in reporter.metrics:
- assert metric['tags'] # this is enough
+
+ self.reporter.metrics = []
+ dog.flush(400.0)
+
+ # Histograms, and related metric types, produce 8 different metrics
+ self.assertMetric(tags=["version:1", "type:constant"], count=8)
+ self.assertMetric(tags=["db", "version:2", "type:constant"], count=8)
+
+ def test_metric_namespace(self):
+ """
+ Namespace prefixes all metric names.
+ """
+ # Set up ThreadStats with a namespace
+ dog = ThreadStats(namespace="foo")
+ dog.start(roll_up_interval=1, flush_in_thread=False)
+ dog.reporter = self.reporter
+
+ # Send a few metrics
+ dog.gauge("gauge", 20, timestamp=100.0)
+ dog.increment("counter", timestamp=100.0)
+ dog.flush(200.0)
+
+ # Metric names are prefixed with the namespace
+ self.assertMetric(count=2)
+ self.assertMetric(name="foo.gauge", count=1)
+ self.assertMetric(name="foo.counter", count=1)
def test_host(self):
dog = ThreadStats()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"six",
"mock",
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/DataDog/datadogpy.git@c8bc9d6cce1caebea0be16366f2cd0c3efb47571#egg=datadog
decorator==5.1.1
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
requests==2.27.1
simplejson==3.20.1
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: datadogpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==2.0.12
- decorator==5.1.1
- idna==3.10
- mock==5.2.0
- nose==1.3.7
- requests==2.27.1
- simplejson==3.20.1
- six==1.17.0
- urllib3==1.26.20
prefix: /opt/conda/envs/datadogpy
| [
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_metric_namespace"
]
| [
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_set",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_counter",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_histogram",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_gauge",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_counter",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_histogram",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_sample_rate",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_and_samples",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timing",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_event",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_event_constant_tags",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_service_check",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_metric_namespace",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge_constant_tags",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_counter_constant_tag_with_metric_level_tags",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge_constant_tags_with_metric_level_tags_twice",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_socket_error",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_in_ms",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_no_metric",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context_exception",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context_no_metric_exception",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_batched"
]
| [
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_initialization",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_context_manager",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_batched_buffer_autoflush",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_module_level_instance",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_instantiating_does_not_connect",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_accessing_socket_opens_socket",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_accessing_socket_multiple_times_returns_same_socket",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_from_environment",
"tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_from_environment_and_constant",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_constant_tags",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_counter",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_custom_host_and_device",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_default_host_and_device",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_disabled_mode",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_event",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_event_constant_tags",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_gauge",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_histogram",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_histogram_percentiles",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_host",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_stop",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags_from_environment",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags_from_environment_and_constant",
"tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_timed_decorator"
]
| []
| BSD-3-Clause | 457 | [
"datadog/threadstats/base.py",
"datadog/dogstatsd/base.py"
]
| [
"datadog/threadstats/base.py",
"datadog/dogstatsd/base.py"
]
|
falconry__falcon-723 | ac03888ad750598175fa1591ef11ec8ae31b0dc0 | 2016-03-03 01:33:46 | b78ffaac7c412d3b3d6cd3c70dd05024d79d2cce | kgriffs: Routing tree from the test: https://github.com/falconry/falcon/pull/723
jmvrbanac: Interesting. Seems to look good to me.
kgriffs: Sorry, I had the wrong link for the routing tree. Updated.
jmvrbanac: Outside of of @fxfitz's suggestion, :+1: | diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py
index 5f8f951..9177edb 100644
--- a/falcon/routing/compiled.py
+++ b/falcon/routing/compiled.py
@@ -95,7 +95,7 @@ class CompiledRouter(object):
else:
return None, None, None
- def _compile_tree(self, nodes, indent=1, level=0):
+ def _compile_tree(self, nodes, indent=1, level=0, fast_return=True):
"""Generates Python code for a routing tree or subtree."""
def line(text, indent_offset=0):
@@ -119,6 +119,18 @@ class CompiledRouter(object):
nodes, key=lambda node: node.is_var + (node.is_var and
not node.is_complex))
+ # NOTE(kgriffs): Down to this branch in the tree, we can do a
+ # fast 'return None'. See if the nodes at this branch are
+ # all still simple, meaning there is only one possible path.
+ if fast_return:
+ if len(nodes) > 1:
+ # NOTE(kgriffs): There's the possibility of more than
+ # one path.
+ var_nodes = [node for node in nodes if node.is_var]
+ found_var_nodes = bool(var_nodes)
+
+ fast_return = not found_var_nodes
+
for node in nodes:
if node.is_var:
if node.is_complex:
@@ -162,10 +174,11 @@ class CompiledRouter(object):
resource_idx = len(self._return_values)
self._return_values.append(node)
- self._compile_tree(node.children, indent, level + 1)
+ self._compile_tree(node.children, indent, level + 1, fast_return)
if node.resource is None:
- line('return None')
+ if fast_return:
+ line('return None')
else:
# NOTE(kgriffs): Make sure that we have consumed all of
# the segments for the requested route; otherwise we could
@@ -173,11 +186,12 @@ class CompiledRouter(object):
line('if path_len == %d:' % (level + 1))
line('return return_values[%d]' % resource_idx, 1)
- line('return None')
+ if fast_return:
+ line('return None')
indent = level_indent
- if not found_simple:
+ if not found_simple and fast_return:
line('return None')
def _compile(self):
| Path segment in one route's URI template masks the field expression in another route
The following test demonstrates the issue. The assertion fails since the resulting status code is 404, rather than 200. "/v2.0/thing" should route to `/{version}/thing` instead of `/v2.0`.
```py
def test_string_vs_var(self):
self.api.add_route('/v2.0', self.resource)
self.simulate_request('/v2.0')
self.api.add_route('/{version}/thing', testing.TestResource())
self.simulate_request('/v2.0/thing')
self.assertEqual(self.srmock.status, falcon.HTTP_200)
```
```
/{version}/foo/bar
/v1.0
``` | falconry/falcon | diff --git a/tests/test_default_router.py b/tests/test_default_router.py
index 9dc5ecd..84af78f 100644
--- a/tests/test_default_router.py
+++ b/tests/test_default_router.py
@@ -1,5 +1,6 @@
import ddt
+from falcon.routing import DefaultRouter
import falcon.testing as testing
@@ -14,66 +15,115 @@ class ResourceWithId(object):
resp.body = self.resource_id
-def setup_routes(router_interface):
- router_interface.add_route(
- '/repos', {}, ResourceWithId(1))
- router_interface.add_route(
- '/repos/{org}', {}, ResourceWithId(2))
- router_interface.add_route(
- '/repos/{org}/{repo}', {}, ResourceWithId(3))
- router_interface.add_route(
- '/repos/{org}/{repo}/commits', {}, ResourceWithId(4))
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}',
- {}, ResourceWithId(5))
- router_interface.add_route(
- '/teams/{id}', {}, ResourceWithId(6))
- router_interface.add_route(
- '/teams/{id}/members', {}, ResourceWithId(7))
- router_interface.add_route(
- '/user/memberships', {}, ResourceWithId(8))
- router_interface.add_route(
- '/emojis', {}, ResourceWithId(9))
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/full',
- {}, ResourceWithId(10))
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/all', {}, ResourceWithId(11))
-
- # NOTE(kgriffs): The ordering of these calls is significant; we
- # need to test that the {id} field does not match the other routes,
- # regardless of the order they are added.
- router_interface.add_route(
- '/emojis/signs/0', {}, ResourceWithId(12))
- router_interface.add_route(
- '/emojis/signs/{id}', {}, ResourceWithId(13))
- router_interface.add_route(
- '/emojis/signs/42', {}, ResourceWithId(14))
- router_interface.add_route(
- '/emojis/signs/42/small', {}, ResourceWithId(14.1))
- router_interface.add_route(
- '/emojis/signs/78/small', {}, ResourceWithId(14.1))
-
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/part',
- {}, ResourceWithId(15))
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/{usr0}:{branch0}',
- {}, ResourceWithId(16))
- router_interface.add_route(
- '/repos/{org}/{repo}/compare/{usr0}:{branch0}/full',
- {}, ResourceWithId(17))
-
- router_interface.add_route(
- '/gists/{id}/raw', {}, ResourceWithId(18))
+class TestRegressionCases(testing.TestBase):
+ """Test specific repros reported by users of the framework."""
+
+ def before(self):
+ self.router = DefaultRouter()
+
+ def test_versioned_url(self):
+ self.router.add_route('/{version}/messages', {}, ResourceWithId(2))
+
+ resource, method_map, params = self.router.find('/v2/messages')
+ self.assertEqual(resource.resource_id, 2)
+
+ self.router.add_route('/v2', {}, ResourceWithId(1))
+
+ resource, method_map, params = self.router.find('/v2')
+ self.assertEqual(resource.resource_id, 1)
+
+ resource, method_map, params = self.router.find('/v2/messages')
+ self.assertEqual(resource.resource_id, 2)
+
+ resource, method_map, params = self.router.find('/v1/messages')
+ self.assertEqual(resource.resource_id, 2)
+
+ resource, method_map, params = self.router.find('/v1')
+ self.assertIs(resource, None)
+
+ def test_recipes(self):
+ self.router.add_route(
+ '/recipes/{activity}/{type_id}', {}, ResourceWithId(1))
+ self.router.add_route(
+ '/recipes/baking', {}, ResourceWithId(2))
+
+ resource, method_map, params = self.router.find('/recipes/baking/4242')
+ self.assertEqual(resource.resource_id, 1)
+
+ resource, method_map, params = self.router.find('/recipes/baking')
+ self.assertEqual(resource.resource_id, 2)
+
+ resource, method_map, params = self.router.find('/recipes/grilling')
+ self.assertIs(resource, None)
@ddt.ddt
-class TestStandaloneRouter(testing.TestBase):
+class TestComplexRouting(testing.TestBase):
def before(self):
- from falcon.routing import DefaultRouter
self.router = DefaultRouter()
- setup_routes(self.router)
+
+ self.router.add_route(
+ '/repos', {}, ResourceWithId(1))
+ self.router.add_route(
+ '/repos/{org}', {}, ResourceWithId(2))
+ self.router.add_route(
+ '/repos/{org}/{repo}', {}, ResourceWithId(3))
+ self.router.add_route(
+ '/repos/{org}/{repo}/commits', {}, ResourceWithId(4))
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}',
+ {}, ResourceWithId(5))
+
+ self.router.add_route(
+ '/teams/{id}', {}, ResourceWithId(6))
+ self.router.add_route(
+ '/teams/{id}/members', {}, ResourceWithId(7))
+
+ self.router.add_route(
+ '/teams/default', {}, ResourceWithId(19))
+ self.router.add_route(
+ '/teams/default/members/thing', {}, ResourceWithId(19))
+
+ self.router.add_route(
+ '/user/memberships', {}, ResourceWithId(8))
+ self.router.add_route(
+ '/emojis', {}, ResourceWithId(9))
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/full',
+ {}, ResourceWithId(10))
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/all', {}, ResourceWithId(11))
+
+ # NOTE(kgriffs): The ordering of these calls is significant; we
+ # need to test that the {id} field does not match the other routes,
+ # regardless of the order they are added.
+ self.router.add_route(
+ '/emojis/signs/0', {}, ResourceWithId(12))
+ self.router.add_route(
+ '/emojis/signs/{id}', {}, ResourceWithId(13))
+ self.router.add_route(
+ '/emojis/signs/42', {}, ResourceWithId(14))
+ self.router.add_route(
+ '/emojis/signs/42/small', {}, ResourceWithId(14.1))
+ self.router.add_route(
+ '/emojis/signs/78/small', {}, ResourceWithId(22))
+
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/part',
+ {}, ResourceWithId(15))
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/{usr0}:{branch0}',
+ {}, ResourceWithId(16))
+ self.router.add_route(
+ '/repos/{org}/{repo}/compare/{usr0}:{branch0}/full',
+ {}, ResourceWithId(17))
+
+ self.router.add_route(
+ '/gists/{id}/{representation}', {}, ResourceWithId(21))
+ self.router.add_route(
+ '/gists/{id}/raw', {}, ResourceWithId(18))
+ self.router.add_route(
+ '/gists/first', {}, ResourceWithId(20))
@ddt.data(
'/teams/{collision}', # simple vs simple
@@ -103,20 +153,6 @@ class TestStandaloneRouter(testing.TestBase):
resource, method_map, params = self.router.find('/emojis/signs/0')
self.assertEqual(resource.resource_id, -1)
- def test_missing(self):
- resource, method_map, params = self.router.find('/this/does/not/exist')
- self.assertIs(resource, None)
-
- resource, method_map, params = self.router.find('/user/bogus')
- self.assertIs(resource, None)
-
- resource, method_map, params = self.router.find('/teams/1234/bogus')
- self.assertIs(resource, None)
-
- resource, method_map, params = self.router.find(
- '/repos/racker/falcon/compare/johndoe:master...janedoe:dev/bogus')
- self.assertIs(resource, None)
-
def test_literal_segment(self):
resource, method_map, params = self.router.find('/emojis/signs/0')
self.assertEqual(resource.resource_id, 12)
@@ -167,6 +203,54 @@ class TestStandaloneRouter(testing.TestBase):
resource, method_map, params = self.router.find('/gists/42/raw')
self.assertEqual(params, {'id': '42'})
+ @ddt.data(
+ ('/teams/default', 19),
+ ('/teams/default/members', 7),
+ ('/teams/foo', 6),
+ ('/teams/foo/members', 7),
+ ('/gists/first', 20),
+ ('/gists/first/raw', 18),
+ ('/gists/first/pdf', 21),
+ ('/gists/1776/pdf', 21),
+ ('/emojis/signs/78', 13),
+ ('/emojis/signs/78/small', 22),
+ )
+ @ddt.unpack
+ def test_literal_vs_variable(self, path, expected_id):
+ resource, method_map, params = self.router.find(path)
+ self.assertEqual(resource.resource_id, expected_id)
+
+ @ddt.data(
+ # Misc.
+ '/this/does/not/exist',
+ '/user/bogus',
+ '/repos/racker/falcon/compare/johndoe:master...janedoe:dev/bogus',
+
+ # Literal vs variable (teams)
+ '/teams',
+ '/teams/42/members/undefined',
+ '/teams/42/undefined',
+ '/teams/42/undefined/segments',
+ '/teams/default/members/undefined',
+ '/teams/default/members/thing/undefined',
+ '/teams/default/members/thing/undefined/segments',
+ '/teams/default/undefined',
+ '/teams/default/undefined/segments',
+
+ # Literal vs variable (emojis)
+ '/emojis/signs',
+ '/emojis/signs/0/small',
+ '/emojis/signs/0/undefined',
+ '/emojis/signs/0/undefined/segments',
+ '/emojis/signs/20/small',
+ '/emojis/signs/20/undefined',
+ '/emojis/signs/42/undefined',
+ '/emojis/signs/78/undefined',
+ )
+ def test_not_found(self, path):
+ resource, method_map, params = self.router.find(path)
+ self.assertIs(resource, None)
+
def test_subsegment_not_found(self):
resource, method_map, params = self.router.find('/emojis/signs/0/x')
self.assertIs(resource, None)
@@ -195,7 +279,7 @@ class TestStandaloneRouter(testing.TestBase):
'usr0': 'johndoe',
'branch0': 'master',
'usr1': 'janedoe',
- 'branch1': 'dev'
+ 'branch1': 'dev',
})
@ddt.data(('', 16), ('/full', 17))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"coveralls",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
coveralls==3.3.1
ddt==1.7.2
distlib==0.3.9
docopt==0.6.2
-e git+https://github.com/falconry/falcon.git@ac03888ad750598175fa1591ef11ec8ae31b0dc0#egg=falcon
filelock==3.4.1
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==3.3.1
- ddt==1.7.2
- distlib==0.3.9
- docopt==0.6.2
- filelock==3.4.1
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_default_router.py::TestRegressionCases::test_recipes",
"tests/test_default_router.py::TestRegressionCases::test_versioned_url",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_02____teams_default_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_06____gists_first_raw___18_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_07____gists_first_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_09____emojis_signs_78___13_"
]
| []
| [
"tests/test_default_router.py::TestComplexRouting::test_collision_1__teams__collision_",
"tests/test_default_router.py::TestComplexRouting::test_collision_2__emojis_signs__id_too_",
"tests/test_default_router.py::TestComplexRouting::test_collision_3__repos__org___repo__compare__complex___vs_____complex2___collision_",
"tests/test_default_router.py::TestComplexRouting::test_complex_1______5_",
"tests/test_default_router.py::TestComplexRouting::test_complex_2____full___10_",
"tests/test_default_router.py::TestComplexRouting::test_complex_3____part___15_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_1______16_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_2____full___17_",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_1__teams",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_2__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_3__gists",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_4__gists_42",
"tests/test_default_router.py::TestComplexRouting::test_dump",
"tests/test_default_router.py::TestComplexRouting::test_literal",
"tests/test_default_router.py::TestComplexRouting::test_literal_segment",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_01____teams_default___19_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_03____teams_foo___6_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_04____teams_foo_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_05____gists_first___20_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_08____gists_1776_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_10____emojis_signs_78_small___22_",
"tests/test_default_router.py::TestComplexRouting::test_malformed_pattern",
"tests/test_default_router.py::TestComplexRouting::test_multivar",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_1__repos__org___repo__compare__simple_vs_complex_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_2__repos__complex___vs___simple_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_3__repos__org___repo__compare__complex___vs_____complex2__full",
"tests/test_default_router.py::TestComplexRouting::test_not_found_01__this_does_not_exist",
"tests/test_default_router.py::TestComplexRouting::test_not_found_02__user_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_03__repos_racker_falcon_compare_johndoe_master___janedoe_dev_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_04__teams",
"tests/test_default_router.py::TestComplexRouting::test_not_found_05__teams_42_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_06__teams_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_07__teams_42_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_08__teams_default_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_09__teams_default_members_thing_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_10__teams_default_members_thing_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_11__teams_default_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_12__teams_default_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_13__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_not_found_14__emojis_signs_0_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_15__emojis_signs_0_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_16__emojis_signs_0_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_17__emojis_signs_20_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_18__emojis_signs_20_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_19__emojis_signs_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_20__emojis_signs_78_undefined",
"tests/test_default_router.py::TestComplexRouting::test_override",
"tests/test_default_router.py::TestComplexRouting::test_subsegment_not_found",
"tests/test_default_router.py::TestComplexRouting::test_variable"
]
| []
| Apache License 2.0 | 458 | [
"falcon/routing/compiled.py"
]
| [
"falcon/routing/compiled.py"
]
|
dask__dask-1024 | 09bda79a79acc261f31a446425efc5dd2fb42b9a | 2016-03-03 07:43:47 | 6dc9229362f2d3b1dfa466a8a63831c3c832b4be | diff --git a/dask/array/reductions.py b/dask/array/reductions.py
index 3f05e1628..a29003285 100644
--- a/dask/array/reductions.py
+++ b/dask/array/reductions.py
@@ -1,12 +1,12 @@
from __future__ import absolute_import, division, print_function
from functools import partial, wraps
-from itertools import product
+from itertools import product, repeat
from math import factorial, log, ceil
import operator
import numpy as np
-from toolz import compose, partition_all, merge, get
+from toolz import compose, partition_all, merge, get, accumulate, pluck
from . import chunk
from .core import _concatenate2, Array, atop, sqrt, lol_tuples
@@ -34,6 +34,22 @@ def reduction(x, chunk, aggregate, axis=None, keepdims=None, dtype=None,
if dtype and 'dtype' in getargspec(aggregate).args:
aggregate = partial(aggregate, dtype=dtype)
+ # Map chunk across all blocks
+ inds = tuple(range(x.ndim))
+ tmp = atop(partial(chunk, axis=axis, keepdims=True), inds, x, inds)
+ tmp._chunks = tuple((1,)*len(c) if i in axis else c for (i, c)
+ in enumerate(tmp.chunks))
+
+ return _tree_reduce(tmp, aggregate, axis, keepdims, dtype, split_every,
+ combine)
+
+
+def _tree_reduce(x, aggregate, axis, keepdims, dtype, split_every=None,
+ combine=None):
+ """Perform the tree reduction step of a reduction.
+
+ Lower level, users should use ``reduction`` or ``arg_reduction`` directly.
+ """
# Normalize split_every
split_every = split_every or _globals.get('split_every', 4)
if isinstance(split_every, dict):
@@ -44,24 +60,18 @@ def reduction(x, chunk, aggregate, axis=None, keepdims=None, dtype=None,
else:
split_every = dict((k, v) for (k, v) in enumerate(x.numblocks) if k in axis)
- # Map chunk across all blocks
- inds = tuple(range(x.ndim))
- tmp = atop(partial(chunk, axis=axis, keepdims=True), inds, x, inds)
- tmp._chunks = tuple((1,)*len(c) if i in axis else c for (i, c)
- in enumerate(tmp.chunks))
-
# Reduce across intermediates
depth = 1
- for i, n in enumerate(tmp.numblocks):
+ for i, n in enumerate(x.numblocks):
if i in split_every and split_every[i] != 1:
depth = int(builtins.max(depth, ceil(log(n, split_every[i]))))
func = compose(partial(combine or aggregate, axis=axis, keepdims=True),
partial(_concatenate2, axes=axis))
for i in range(depth - 1):
- tmp = partial_reduce(func, tmp, split_every, True, None)
+ x = partial_reduce(func, x, split_every, True, None)
func = compose(partial(aggregate, axis=axis, keepdims=keepdims),
partial(_concatenate2, axes=axis))
- return partial_reduce(func, tmp, split_every, keepdims=keepdims,
+ return partial_reduce(func, x, split_every, keepdims=keepdims,
dtype=dtype)
@@ -403,71 +413,130 @@ def vnorm(a, ord=None, axis=None, dtype=None, keepdims=False, split_every=None):
split_every=split_every)**(1./ord)
-def _arg_combine(data, axis, argfunc):
+def _arg_combine(data, axis, argfunc, keepdims=False):
"""Merge intermediate results from ``arg_*`` functions"""
+ axis = None if len(axis) == data.ndim or data.ndim == 1 else axis[0]
vals = data['vals']
arg = data['arg']
- ns = data['n']
- args = argfunc(vals, axis=axis)
- offsets = np.roll(np.cumsum(ns, axis=axis), 1, axis)
- offsets[tuple(slice(None) if i != axis else 0 for i in range(ns.ndim))] = 0
- inds = list(reversed(np.meshgrid(*map(np.arange, args.shape), sparse=True)))
- inds.insert(axis, args)
-
- arg = (arg + offsets)[tuple(inds)]
- vals = vals[tuple(inds)]
- n = ns.sum(axis=axis).take(0, 0)
- return arg, vals, n
-
-
-def arg_chunk(func, argfunc, x, axis=None, **kwargs):
- axis = axis[0] if isinstance(axis, tuple) else axis
- vals = func(x, axis=axis, keepdims=True)
- arg = argfunc(x, axis=axis, keepdims=True)
+ if axis is None:
+ local_args = argfunc(vals, axis=axis, keepdims=keepdims)
+ vals = vals.ravel()[local_args]
+ arg = arg.ravel()[local_args]
+ else:
+ local_args = argfunc(vals, axis=axis)
+ inds = np.ogrid[tuple(map(slice, local_args.shape))]
+ inds.insert(axis, local_args)
+ vals = vals[inds]
+ arg = arg[inds]
+ if keepdims:
+ vals = np.expand_dims(vals, axis)
+ arg = np.expand_dims(arg, axis)
+ return arg, vals
+
+
+def arg_chunk(func, argfunc, x, axis, offset_info):
+ arg_axis = None if len(axis) == x.ndim or x.ndim == 1 else axis[0]
+ vals = func(x, axis=arg_axis, keepdims=True)
+ arg = argfunc(x, axis=arg_axis, keepdims=True)
+ if arg_axis is None:
+ offset, total_shape = offset_info
+ ind = np.unravel_index(arg.ravel()[0], x.shape)
+ total_ind = tuple(o + i for (o, i) in zip(offset, ind))
+ arg[:] = np.ravel_multi_index(total_ind, total_shape)
+ else:
+ arg += offset_info
+
result = np.empty(shape=vals.shape, dtype=[('vals', vals.dtype),
- ('arg', arg.dtype),
- ('n', 'i8')])
+ ('arg', arg.dtype)])
result['vals'] = vals
result['arg'] = arg
- result['n'] = x.shape[axis]
return result
def arg_combine(func, argfunc, data, axis=None, **kwargs):
- axis = axis[0] if isinstance(axis, tuple) else axis
- arg, vals, n = _arg_combine(data, axis, argfunc)
- shape = tuple(s if i != axis else 1 for (i, s) in enumerate(data.shape))
- result = np.empty(shape=shape, dtype=[('vals', vals.dtype),
- ('arg', arg.dtype),
- ('n', 'i8')])
- result['vals'] = vals.reshape(shape)
- result['arg'] = arg.reshape(shape)
- result['n'] = n
+ arg, vals = _arg_combine(data, axis, argfunc, keepdims=True)
+ result = np.empty(shape=vals.shape, dtype=[('vals', vals.dtype),
+ ('arg', arg.dtype)])
+ result['vals'] = vals
+ result['arg'] = arg
return result
def arg_agg(func, argfunc, data, axis=None, **kwargs):
- axis = axis[0] if isinstance(axis, tuple) else axis
- return _arg_combine(data, axis, argfunc)[0]
+ return _arg_combine(data, axis, argfunc, keepdims=False)[0]
+
+
+def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None):
+ """Generic function for argreduction.
+
+ Parameters
+ ----------
+ x : Array
+ chunk : callable
+ Partialed ``arg_chunk``.
+ combine : callable
+ Partialed ``arg_combine``.
+ agg : callable
+ Partialed ``arg_agg``.
+ axis : int, optional
+ split_every : int or dict, optional
+ """
+ if axis is None:
+ axis = tuple(range(x.ndim))
+ ravel = True
+ elif isinstance(axis, int):
+ if axis < 0:
+ axis += x.ndim
+ if axis < 0 or axis >= x.ndim:
+ raise ValueError("axis entry is out of bounds")
+ axis = (axis,)
+ ravel = x.ndim == 1
+ else:
+ raise TypeError("axis must be either `None` or int, "
+ "got '{0}'".format(axis))
+
+ # Map chunk across all blocks
+ name = 'arg-reduce-chunk-{0}'.format(tokenize(chunk, axis))
+ old = x.name
+ keys = list(product(*map(range, x.numblocks)))
+ offsets = list(product(*(accumulate(operator.add, bd[:-1], 0)
+ for bd in x.chunks)))
+ if ravel:
+ offset_info = zip(offsets, repeat(x.shape))
+ else:
+ offset_info = pluck(axis[0], offsets)
+
+ chunks = tuple((1,)*len(c) if i in axis else c for (i, c)
+ in enumerate(x.chunks))
+ dsk = dict(((name,) + k, (chunk, (old,) + k, axis, off)) for (k, off)
+ in zip(keys, offset_info))
+ tmp = Array(merge(dsk, x.dask), name, chunks)
+ return _tree_reduce(tmp, agg, axis, False, np.int64, split_every, combine)
+
+def make_arg_reduction(func, argfunc):
+ """Create a argreduction callable.
-def arg_reduction(func, argfunc):
+ Parameters
+ ----------
+ func : callable
+ The reduction (e.g. ``min``)
+ argfunc : callable
+ The argreduction (e.g. ``argmin``)
+ """
chunk = partial(arg_chunk, func, argfunc)
- agg = partial(arg_agg, func, argfunc)
combine = partial(arg_combine, func, argfunc)
+ agg = partial(arg_agg, func, argfunc)
@wraps(argfunc)
- def _(a, axis=None, split_every=None):
- if axis < 0:
- axis = a.ndim + axis
- return reduction(a, chunk, agg, axis=axis, dtype='i8',
- split_every=split_every, combine=combine)
+ def _(x, axis=None, split_every=None):
+ return arg_reduction(x, chunk, combine, agg, axis, split_every)
return _
-argmin = arg_reduction(chunk.min, chunk.argmin)
-argmax = arg_reduction(chunk.max, chunk.argmax)
-nanargmin = arg_reduction(chunk.nanmin, chunk.nanargmin)
-nanargmax = arg_reduction(chunk.nanmax, chunk.nanargmax)
+argmin = make_arg_reduction(chunk.min, chunk.argmin)
+argmax = make_arg_reduction(chunk.max, chunk.argmax)
+nanargmin = make_arg_reduction(chunk.nanmin, chunk.nanargmin)
+nanargmax = make_arg_reduction(chunk.nanmax, chunk.nanargmax)
def cumreduction(func, binop, ident, x, axis, dtype=None):
| dask.array.argmin fails on 3D input
```
In [29]: da.from_array(np.random.randn(2, 3, 4), chunks=(2, 3, 4)).argmin(axis=0).compute()
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-29-b25ac117010d> in <module>()
----> 1 da.from_array(np.random.randn(2, 3, 4), chunks=(2, 3, 4)).argmin(axis=0).compute()
/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/base.py in compute(self, **kwargs)
35
36 def compute(self, **kwargs):
---> 37 return compute(self, **kwargs)[0]
38
39 @classmethod
/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/base.py in compute(*args, **kwargs)
108 for opt, val in groups.items()])
109 keys = [var._keys() for var in variables]
--> 110 results = get(dsk, keys, **kwargs)
111
112 results_iter = iter(results)
/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
55 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
56 cache=cache, queue=queue, get_id=_thread_get_id,
---> 57 **kwargs)
58
59 return results
/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py in get_async(apply_async, num_workers, dsk, result, cache, queue, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, **kwargs)
479 _execute_task(task, data) # Re-execute locally
480 else:
--> 481 raise(remote_exception(res, tb))
482 state['cache'][key] = res
483 finish_task(dsk, key, state, results, keyorder.get)
IndexError: shape mismatch: indexing arrays could not be broadcast together with shapes (3,4) (4,1) (1,3)
Traceback
---------
File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py", line 264, in execute_task
result = _execute_task(task, data)
File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py", line 246, in _execute_task
return func(*args2)
File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/toolz/functoolz.py", line 381, in __call__
ret = f(ret)
File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/array/reductions.py", line 450, in arg_agg
return _arg_combine(data, axis, argfunc)[0]
File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/array/reductions.py", line 416, in _arg_combine
arg = (arg + offsets)[tuple(inds)]
```
This was reported in xarray: https://github.com/pydata/xarray/issues/759 | dask/dask | diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py
index 9b13a98dc..7b734416f 100644
--- a/dask/array/tests/test_reductions.py
+++ b/dask/array/tests/test_reductions.py
@@ -73,16 +73,6 @@ def test_reductions_1D(dtype):
reduction_1d_test(da.nanmin, a, np.nanmin, x, False)
reduction_1d_test(da.nanmax, a, np.nanmax, x, False)
- assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
- assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
- assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
- assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
-
- assert eq(da.argmax(a, axis=0, split_every=2), np.argmax(x, axis=0))
- assert eq(da.argmin(a, axis=0, split_every=2), np.argmin(x, axis=0))
- assert eq(da.nanargmax(a, axis=0, split_every=2), np.nanargmax(x, axis=0))
- assert eq(da.nanargmin(a, axis=0, split_every=2), np.nanargmin(x, axis=0))
-
def reduction_2d_test(da_func, darr, np_func, narr, use_dtype=True,
split_every=True):
@@ -144,23 +134,32 @@ def test_reductions_2D(dtype):
reduction_2d_test(da.nanmin, a, np.nanmin, x, False)
reduction_2d_test(da.nanmax, a, np.nanmax, x, False)
- assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
- assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
- assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
- assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
- assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
- assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
- assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
- assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
-
- assert eq(da.argmax(a, axis=0, split_every=2), np.argmax(x, axis=0))
- assert eq(da.argmin(a, axis=0, split_every=2), np.argmin(x, axis=0))
- assert eq(da.nanargmax(a, axis=0, split_every=2), np.nanargmax(x, axis=0))
- assert eq(da.nanargmin(a, axis=0, split_every=2), np.nanargmin(x, axis=0))
- assert eq(da.argmax(a, axis=1, split_every=2), np.argmax(x, axis=1))
- assert eq(da.argmin(a, axis=1, split_every=2), np.argmin(x, axis=1))
- assert eq(da.nanargmax(a, axis=1, split_every=2), np.nanargmax(x, axis=1))
- assert eq(da.nanargmin(a, axis=1, split_every=2), np.nanargmin(x, axis=1))
+
[email protected](['dfunc', 'func'],
+ [(da.argmin, np.argmin), (da.argmax, np.argmax),
+ (da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)])
+def test_arg_reductions(dfunc, func):
+ x = np.random.random((10, 10, 10))
+ a = da.from_array(x, chunks=(3, 4, 5))
+
+ assert eq(dfunc(a), func(x))
+ assert eq(dfunc(a, 0), func(x, 0))
+ assert eq(dfunc(a, 1), func(x, 1))
+ assert eq(dfunc(a, 2), func(x, 2))
+ with set_options(split_every=2):
+ assert eq(dfunc(a), func(x))
+ assert eq(dfunc(a, 0), func(x, 0))
+ assert eq(dfunc(a, 1), func(x, 1))
+ assert eq(dfunc(a, 2), func(x, 2))
+
+ pytest.raises(ValueError, lambda: dfunc(a, 3))
+ pytest.raises(TypeError, lambda: dfunc(a, (0, 1)))
+
+ x2 = np.arange(10)
+ a2 = da.from_array(x2, chunks=3)
+ assert eq(dfunc(a2), func(x2))
+ assert eq(dfunc(a2, 0), func(x2, 0))
+ assert eq(dfunc(a2, 0, split_every=2), func(x2, 0))
def test_reductions_2D_nans():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@09bda79a79acc261f31a446425efc5dd2fb42b9a#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- distributed=2021.3.0=py36h06a4308_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- msgpack-python=1.0.2=py36hff7bd54_1
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions[nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[nanargmax-nanargmax]"
]
| [
"dask/array/tests/test_reductions.py::test_reductions_2D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_2D[i4]"
]
| [
"dask/array/tests/test_reductions.py::test_reductions_1D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_1D[i4]",
"dask/array/tests/test_reductions.py::test_reductions_2D_nans",
"dask/array/tests/test_reductions.py::test_moment",
"dask/array/tests/test_reductions.py::test_reductions_with_negative_axes",
"dask/array/tests/test_reductions.py::test_nan",
"dask/array/tests/test_reductions.py::test_0d_array",
"dask/array/tests/test_reductions.py::test_reduction_on_scalar",
"dask/array/tests/test_reductions.py::test_tree_reduce_depth",
"dask/array/tests/test_reductions.py::test_tree_reduce_set_options"
]
| []
| BSD 3-Clause "New" or "Revised" License | 459 | [
"dask/array/reductions.py"
]
| [
"dask/array/reductions.py"
]
|
|
falconry__falcon-727 | 7bffb3342fea9fff5677c43e89e7e6eccfc388ed | 2016-03-03 18:05:13 | b78ffaac7c412d3b3d6cd3c70dd05024d79d2cce | diff --git a/falcon/request.py b/falcon/request.py
index c7322b4..eac9b95 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -167,19 +167,13 @@ class Request(object):
Note:
If an HTML form is POSTed to the API using the
- *application/x-www-form-urlencoded* media type, Falcon
+ *application/x-www-form-urlencoded* media type, and
+ the :py:attr:`~.RequestOptions.auto_parse_form_urlencoded`
+ option is set, the framework
will consume `stream` in order to parse the parameters
and merge them into the query string parameters. In this
case, the stream will be left at EOF.
- Note also that the character encoding for fields, before
- percent-encoding non-ASCII bytes, is assumed to be
- UTF-8. The special `_charset_` field is ignored if present.
-
- Falcon expects form-encoded request bodies to be
- encoded according to the standard W3C algorithm (see
- also http://goo.gl/6rlcux).
-
date (datetime): Value of the Date header, converted to a
``datetime`` instance. The header value is assumed to
conform to RFC 1123.
@@ -320,7 +314,8 @@ class Request(object):
# PERF(kgriffs): Technically, we should spend a few more
# cycles and parse the content type for real, but
# this heuristic will work virtually all the time.
- if (self.content_type is not None and
+ if (self.options.auto_parse_form_urlencoded and
+ self.content_type is not None and
'application/x-www-form-urlencoded' in self.content_type):
self._parse_form_urlencoded()
@@ -1159,11 +1154,28 @@ class RequestOptions(object):
Attributes:
keep_blank_qs_values (bool): Set to ``True`` in order to retain
blank values in query string parameters (default ``False``).
+ auto_parse_form_urlencoded: Set to ``True`` in order to
+ automatically consume the request stream and merge the
+ results into the request's query string params when the
+ request's content type is
+ *application/x-www-form-urlencoded* (default ``False``). In
+ this case, the request's content stream will be left at EOF.
+
+ Note:
+ The character encoding for fields, before
+ percent-encoding non-ASCII bytes, is assumed to be
+ UTF-8. The special `_charset_` field is ignored if present.
+
+ Falcon expects form-encoded request bodies to be
+ encoded according to the standard W3C algorithm (see
+ also http://goo.gl/6rlcux).
"""
__slots__ = (
'keep_blank_qs_values',
+ 'auto_parse_form_urlencoded',
)
def __init__(self):
self.keep_blank_qs_values = False
+ self.auto_parse_form_urlencoded = False
| Add an option to RequestOptions to toggle parsing of form params
Add an option to RequestOptions to enable/disable parsing form params as currently implemented. It would be disabled by default (breaking change, but easy to work around). This would mitigate the problems people have experienced re the request stream being automatically slurped when the POSTed content type is `application/x-www-form-urlencoded`.
This would not preclude working on a longer-term strategy for handling forms in a manner distinct from query parameters.
See also:
- #418
- #493 | falconry/falcon | diff --git a/tests/test_query_params.py b/tests/test_query_params.py
index 50ed010..93a429f 100644
--- a/tests/test_query_params.py
+++ b/tests/test_query_params.py
@@ -473,6 +473,10 @@ class _TestQueryParams(testing.TestBase):
class PostQueryParams(_TestQueryParams):
+ def before(self):
+ super(PostQueryParams, self).before()
+ self.api.req_options.auto_parse_form_urlencoded = True
+
def simulate_request(self, path, query_string, **kwargs):
headers = {"Content-Type": "application/x-www-form-urlencoded"}
super(PostQueryParams, self).simulate_request(
@@ -484,10 +488,29 @@ class PostQueryParams(_TestQueryParams):
self.simulate_request('/', query_string=query_string)
req = self.resource.req
- self.assertEqual(req.get_param('q'), None)
+ self.assertIs(req.get_param('q'), None)
+
+ def test_explicitly_disable_auto_parse(self):
+ self.api.req_options.auto_parse_form_urlencoded = False
+ self.simulate_request('/', query_string='q=42')
+
+ req = self.resource.req
+ self.assertIs(req.get_param('q'), None)
class GetQueryParams(_TestQueryParams):
def simulate_request(self, path, query_string, **kwargs):
super(GetQueryParams, self).simulate_request(
path, query_string=query_string, **kwargs)
+
+
+class PostQueryParamsDefaultBehavior(testing.TestBase):
+ def test_dont_auto_parse_by_default(self):
+ self.resource = testing.TestResource()
+ self.api.add_route('/', self.resource)
+
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ self.simulate_request('/', body='q=42', headers=headers)
+
+ req = self.resource.req
+ self.assertIs(req.get_param('q'), None)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"ddt",
"pyyaml",
"requests",
"testtools",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
ddt==1.7.2
exceptiongroup==1.2.2
-e git+https://github.com/falconry/falcon.git@7bffb3342fea9fff5677c43e89e7e6eccfc388ed#egg=falcon
idna==3.10
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-mimeparse==2.0.0
PyYAML==6.0.2
requests==2.32.3
six==1.17.0
testtools==2.7.2
tomli==2.2.1
urllib3==2.3.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- ddt==1.7.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-mimeparse==2.0.0
- pyyaml==6.0.2
- requests==2.32.3
- six==1.17.0
- testtools==2.7.2
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_query_params.py::PostQueryParams::test_allowed_names",
"tests/test_query_params.py::PostQueryParams::test_bad_percentage",
"tests/test_query_params.py::PostQueryParams::test_blank",
"tests/test_query_params.py::PostQueryParams::test_boolean",
"tests/test_query_params.py::PostQueryParams::test_boolean_blank",
"tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse",
"tests/test_query_params.py::PostQueryParams::test_get_date_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_date_store",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::PostQueryParams::test_int",
"tests/test_query_params.py::PostQueryParams::test_int_neg",
"tests/test_query_params.py::PostQueryParams::test_list_transformer",
"tests/test_query_params.py::PostQueryParams::test_list_type",
"tests/test_query_params.py::PostQueryParams::test_list_type_blank",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::PostQueryParams::test_non_ascii",
"tests/test_query_params.py::PostQueryParams::test_none",
"tests/test_query_params.py::PostQueryParams::test_param_property",
"tests/test_query_params.py::PostQueryParams::test_percent_encoded",
"tests/test_query_params.py::PostQueryParams::test_required_1_get_param",
"tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::PostQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default"
]
| []
| [
"tests/test_query_params.py::_TestQueryParams::test_allowed_names",
"tests/test_query_params.py::_TestQueryParams::test_bad_percentage",
"tests/test_query_params.py::_TestQueryParams::test_blank",
"tests/test_query_params.py::_TestQueryParams::test_boolean",
"tests/test_query_params.py::_TestQueryParams::test_boolean_blank",
"tests/test_query_params.py::_TestQueryParams::test_get_date_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_date_store",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::_TestQueryParams::test_int",
"tests/test_query_params.py::_TestQueryParams::test_int_neg",
"tests/test_query_params.py::_TestQueryParams::test_list_transformer",
"tests/test_query_params.py::_TestQueryParams::test_list_type",
"tests/test_query_params.py::_TestQueryParams::test_list_type_blank",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::_TestQueryParams::test_none",
"tests/test_query_params.py::_TestQueryParams::test_param_property",
"tests/test_query_params.py::_TestQueryParams::test_percent_encoded",
"tests/test_query_params.py::_TestQueryParams::test_required_1_get_param",
"tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::_TestQueryParams::test_simple",
"tests/test_query_params.py::GetQueryParams::test_allowed_names",
"tests/test_query_params.py::GetQueryParams::test_bad_percentage",
"tests/test_query_params.py::GetQueryParams::test_blank",
"tests/test_query_params.py::GetQueryParams::test_boolean",
"tests/test_query_params.py::GetQueryParams::test_boolean_blank",
"tests/test_query_params.py::GetQueryParams::test_get_date_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_date_store",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::GetQueryParams::test_int",
"tests/test_query_params.py::GetQueryParams::test_int_neg",
"tests/test_query_params.py::GetQueryParams::test_list_transformer",
"tests/test_query_params.py::GetQueryParams::test_list_type",
"tests/test_query_params.py::GetQueryParams::test_list_type_blank",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::GetQueryParams::test_none",
"tests/test_query_params.py::GetQueryParams::test_param_property",
"tests/test_query_params.py::GetQueryParams::test_percent_encoded",
"tests/test_query_params.py::GetQueryParams::test_required_1_get_param",
"tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::GetQueryParams::test_simple"
]
| []
| Apache License 2.0 | 460 | [
"falcon/request.py"
]
| [
"falcon/request.py"
]
|
|
dask__dask-1028 | 6dc9229362f2d3b1dfa466a8a63831c3c832b4be | 2016-03-03 21:56:25 | 6dc9229362f2d3b1dfa466a8a63831c3c832b4be | diff --git a/dask/array/reductions.py b/dask/array/reductions.py
index a29003285..c0b12cd08 100644
--- a/dask/array/reductions.py
+++ b/dask/array/reductions.py
@@ -466,6 +466,13 @@ def arg_agg(func, argfunc, data, axis=None, **kwargs):
return _arg_combine(data, axis, argfunc, keepdims=False)[0]
+def nanarg_agg(func, argfunc, data, axis=None, **kwargs):
+ arg, vals = _arg_combine(data, axis, argfunc, keepdims=False)
+ if np.any(np.isnan(vals)):
+ raise ValueError("All NaN slice encountered")
+ return arg
+
+
def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None):
"""Generic function for argreduction.
@@ -514,7 +521,7 @@ def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None):
return _tree_reduce(tmp, agg, axis, False, np.int64, split_every, combine)
-def make_arg_reduction(func, argfunc):
+def make_arg_reduction(func, argfunc, is_nan_func=False):
"""Create a argreduction callable.
Parameters
@@ -526,17 +533,34 @@ def make_arg_reduction(func, argfunc):
"""
chunk = partial(arg_chunk, func, argfunc)
combine = partial(arg_combine, func, argfunc)
- agg = partial(arg_agg, func, argfunc)
+ if is_nan_func:
+ agg = partial(nanarg_agg, func, argfunc)
+ else:
+ agg = partial(arg_agg, func, argfunc)
@wraps(argfunc)
def _(x, axis=None, split_every=None):
return arg_reduction(x, chunk, combine, agg, axis, split_every)
return _
+def _nanargmin(x, axis, **kwargs):
+ try:
+ return chunk.nanargmin(x, axis, **kwargs)
+ except ValueError:
+ return chunk.nanargmin(np.where(np.isnan(x), np.inf, x), axis, **kwargs)
+
+
+def _nanargmax(x, axis, **kwargs):
+ try:
+ return chunk.nanargmax(x, axis, **kwargs)
+ except ValueError:
+ return chunk.nanargmax(np.where(np.isnan(x), -np.inf, x), axis, **kwargs)
+
+
argmin = make_arg_reduction(chunk.min, chunk.argmin)
argmax = make_arg_reduction(chunk.max, chunk.argmax)
-nanargmin = make_arg_reduction(chunk.nanmin, chunk.nanargmin)
-nanargmax = make_arg_reduction(chunk.nanmax, chunk.nanargmax)
+nanargmin = make_arg_reduction(chunk.nanmin, _nanargmin, True)
+nanargmax = make_arg_reduction(chunk.nanmax, _nanargmax, True)
def cumreduction(func, binop, ident, x, axis, dtype=None):
| da.nanargmax fails when it encounters an all-NaN slice in a chunk
Follow up on #776
```
In [1]: import numpy as np
In [2]: import dask.array as da
In [3]: x = np.array([[1.0, np.nan], [np.nan, 2.0]])
In [4]: da.nanmax(da.from_array(x, chunks=1), axis=1).compute()
/Users/shoyer/miniconda/envs/dask-dev/lib/python2.7/site-packages/numpy/lib/nanfunctions.py:319: RuntimeWarning: All-NaN slice encountered
warnings.warn("All-NaN slice encountered", RuntimeWarning)
Out[4]: array([ 1., 2.])
In [5]: da.nanargmax(da.from_array(x, chunks=1), axis=1).compute()
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-403b812e207c> in <module>()
----> 1 da.nanargmax(da.from_array(x, chunks=1), axis=1).compute()
/Users/shoyer/dev/dask/dask/base.pyc in compute(self, **kwargs)
29
30 def compute(self, **kwargs):
---> 31 return compute(self, **kwargs)[0]
32
33 @classmethod
/Users/shoyer/dev/dask/dask/base.pyc in compute(*args, **kwargs)
97 for opt, val in groups.items()])
98 keys = [arg._keys() for arg in args]
---> 99 results = get(dsk, keys, **kwargs)
100 return tuple(a._finalize(a, r) for a, r in zip(args, results))
101
/Users/shoyer/dev/dask/dask/threaded.pyc in get(dsk, result, cache, num_workers, **kwargs)
55 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
56 cache=cache, queue=queue, get_id=_thread_get_id,
---> 57 **kwargs)
58
59 return results
/Users/shoyer/dev/dask/dask/async.pyc in get_async(apply_async, num_workers, dsk, result, cache, queue, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, **kwargs)
480 _execute_task(task, data) # Re-execute locally
481 else:
--> 482 raise(remote_exception(res, tb))
483 state['cache'][key] = res
484 finish_task(dsk, key, state, results, keyorder.get)
ValueError: All-NaN slice encountered
Traceback
---------
File "dask/async.py", line 262, in execute_task
result = _execute_task(task, data)
File "dask/async.py", line 245, in _execute_task
return func(*args2)
File "dask/array/reductions.py", line 367, in argreduce
return (func(x, axis=axis), argfunc(x, axis=axis))
File "dask/array/chunk.py", line 25, in keepdims_wrapped_callable
r = a_callable(x, axis=axis, *args, **kwargs)
File "/Users/shoyer/miniconda/envs/dask-dev/lib/python2.7/site-packages/numpy/lib/nanfunctions.py", line 420, in nanargmax
raise ValueError("All-NaN slice encountered")
``` | dask/dask | diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py
index 7b734416f..2b1a08437 100644
--- a/dask/array/tests/test_reductions.py
+++ b/dask/array/tests/test_reductions.py
@@ -162,6 +162,26 @@ def test_arg_reductions(dfunc, func):
assert eq(dfunc(a2, 0, split_every=2), func(x2, 0))
[email protected](['dfunc', 'func'],
+ [(da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)])
+def test_nanarg_reductions(dfunc, func):
+ x = np.random.random((10, 10, 10))
+ x[5] = np.nan
+ a = da.from_array(x, chunks=(3, 4, 5))
+ assert eq(dfunc(a), func(x))
+ assert eq(dfunc(a, 0), func(x, 0))
+ with pytest.raises(ValueError):
+ dfunc(a, 1).compute()
+
+ with pytest.raises(ValueError):
+ dfunc(a, 2).compute()
+
+ x[:] = np.nan
+ a = da.from_array(x, chunks=(3, 4, 5))
+ with pytest.raises(ValueError):
+ dfunc(a).compute()
+
+
def test_reductions_2D_nans():
# chunks are a mix of some/all/no NaNs
x = np.full((4, 4), np.nan)
@@ -189,17 +209,18 @@ def test_reductions_2D_nans():
reduction_2d_test(da.nanmin, a, np.nanmin, x, False, False)
reduction_2d_test(da.nanmax, a, np.nanmax, x, False, False)
- # TODO: fix these tests, which fail with this error from NumPy:
- # ValueError("All-NaN slice encountered"), because some of the chunks
- # (not all) have all NaN values.
- # assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
- # assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
- # assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
- # assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
- # assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
- # assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
- # assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
- # assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
+ assert eq(da.argmax(a), np.argmax(x))
+ assert eq(da.argmin(a), np.argmin(x))
+ assert eq(da.nanargmax(a), np.nanargmax(x))
+ assert eq(da.nanargmin(a), np.nanargmin(x))
+ assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
+ assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
+ assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
+ assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
+ assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
+ assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
+ assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
+ assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
def test_moment():
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@6dc9229362f2d3b1dfa466a8a63831c3c832b4be#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- distributed=2021.3.0=py36h06a4308_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- msgpack-python=1.0.2=py36hff7bd54_1
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_reductions_2D_nans"
]
| [
"dask/array/tests/test_reductions.py::test_reductions_2D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_2D[i4]"
]
| [
"dask/array/tests/test_reductions.py::test_reductions_1D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_1D[i4]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]",
"dask/array/tests/test_reductions.py::test_moment",
"dask/array/tests/test_reductions.py::test_reductions_with_negative_axes",
"dask/array/tests/test_reductions.py::test_nan",
"dask/array/tests/test_reductions.py::test_0d_array",
"dask/array/tests/test_reductions.py::test_reduction_on_scalar",
"dask/array/tests/test_reductions.py::test_tree_reduce_depth",
"dask/array/tests/test_reductions.py::test_tree_reduce_set_options"
]
| []
| BSD 3-Clause "New" or "Revised" License | 461 | [
"dask/array/reductions.py"
]
| [
"dask/array/reductions.py"
]
|
|
MITLibraries__slingshot-11 | 755a842371e63a1c70fde8568523b9b5db0d304e | 2016-03-07 17:04:21 | 755a842371e63a1c70fde8568523b9b5db0d304e | diff --git a/slingshot/cli.py b/slingshot/cli.py
index 55efcf6..047d98e 100644
--- a/slingshot/cli.py
+++ b/slingshot/cli.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
+from datetime import datetime
import os
import shutil
@@ -58,4 +59,8 @@ def run(layers, store, url, namespace, username, password):
submit(zf, url, auth)
except Exception as e:
shutil.rmtree(bag, ignore_errors=True)
+ click.echo("%sZ: %s failed with %r" %
+ (datetime.utcnow().isoformat(), data_layer, e))
raise e
+ click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(),
+ data_layer))
| Add logging | MITLibraries/slingshot | diff --git a/tests/test_cli.py b/tests/test_cli.py
index 1aff724..61eca94 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -59,3 +59,21 @@ def test_run_uses_authentication(runner, layers_dir):
'--username', 'foo', '--password', 'bar'])
assert m.request_history[0].headers['Authorization'] == \
'Basic Zm9vOmJhcg=='
+
+
+def test_run_logs_uploaded_layers_to_stdout(runner, layers_dir):
+ with requests_mock.Mocker() as m:
+ store = tempfile.mkdtemp()
+ m.post('http://localhost')
+ res = runner.invoke(main, ['run', layers_dir, store,
+ 'http://localhost'])
+ assert 'SDE_DATA_BD_A8GNS_2003.zip uploaded' in res.output
+
+
+def test_run_logs_failed_layers_to_stdout(runner, layers_dir):
+ with requests_mock.Mocker() as m:
+ store = tempfile.mkdtemp()
+ m.post('http://localhost', status_code=500)
+ res = runner.invoke(main, ['run', layers_dir, store,
+ 'http://localhost'])
+ assert 'SDE_DATA_BD_A8GNS_2003.zip failed' in res.output
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"requests_mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bagit==1.5.4
certifi==2025.1.31
charset-normalizer==3.4.1
click==6.3
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
requests-mock==1.12.1
-e git+https://github.com/MITLibraries/slingshot.git@755a842371e63a1c70fde8568523b9b5db0d304e#egg=slingshot
tomli==2.2.1
urllib3==2.3.0
| name: slingshot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bagit==1.5.4
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==6.3
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- requests-mock==1.12.1
- tomli==2.2.1
- urllib3==2.3.0
- wheel==0.26.0
prefix: /opt/conda/envs/slingshot
| [
"tests/test_cli.py::test_run_logs_uploaded_layers_to_stdout",
"tests/test_cli.py::test_run_logs_failed_layers_to_stdout"
]
| []
| [
"tests/test_cli.py::test_run_submits_bags",
"tests/test_cli.py::test_run_leaves_bag_on_success",
"tests/test_cli.py::test_run_removes_bag_on_failure",
"tests/test_cli.py::test_run_uses_supplied_namespace",
"tests/test_cli.py::test_run_uses_authentication"
]
| []
| Apache License 2.0 | 464 | [
"slingshot/cli.py"
]
| [
"slingshot/cli.py"
]
|
|
MITLibraries__slingshot-12 | 6dca99653150369b2b5b422292acd1e0b1fb131e | 2016-03-07 18:19:18 | 6dca99653150369b2b5b422292acd1e0b1fb131e | diff --git a/slingshot/cli.py b/slingshot/cli.py
index 047d98e..75ad0c9 100644
--- a/slingshot/cli.py
+++ b/slingshot/cli.py
@@ -28,7 +28,9 @@ def main():
@click.option('--username', help="Username for kepler submission.")
@click.option('--password',
help="Password for kepler submission. Omit for prompt.")
-def run(layers, store, url, namespace, username, password):
[email protected]('--fail-after', default=5,
+ help="Stop after number of consecutive failures. Default is 5.")
+def run(layers, store, url, namespace, username, password, fail_after):
"""Create and upload bags to the specified endpoint.
This script will create bags from all the layers in the LAYERS
@@ -50,6 +52,7 @@ def run(layers, store, url, namespace, username, password):
auth = username, password
if not all(auth):
auth = None
+ failures = 0
for data_layer in uploadable(layers, store):
bag = prep_bag(os.path.join(layers, data_layer), store)
try:
@@ -57,10 +60,15 @@ def run(layers, store, url, namespace, username, password):
bag_name = make_uuid(os.path.basename(bag), namespace)
with temp_archive(bag, bag_name) as zf:
submit(zf, url, auth)
+ click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(),
+ data_layer))
+ failures = 0
except Exception as e:
shutil.rmtree(bag, ignore_errors=True)
+ failures += 1
click.echo("%sZ: %s failed with %r" %
(datetime.utcnow().isoformat(), data_layer, e))
- raise e
- click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(),
- data_layer))
+ if failures >= fail_after:
+ click.echo("%sZ: Maximum number of consecutive failures (%d)" %
+ (datetime.utcnow().isoformat(), failures))
+ raise e
| Set script failure conditions
The script should not completely fail for one bad layer, but rather log it and continue. It should fail immediately in certain cases, such as, 404 and 401. It should probably fail after some number of layers have failed consecutively, since that would likely indicate something more fundamental is wrong. | MITLibraries/slingshot | diff --git a/tests/test_cli.py b/tests/test_cli.py
index 61eca94..e32903d 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -77,3 +77,12 @@ def test_run_logs_failed_layers_to_stdout(runner, layers_dir):
res = runner.invoke(main, ['run', layers_dir, store,
'http://localhost'])
assert 'SDE_DATA_BD_A8GNS_2003.zip failed' in res.output
+
+
+def test_run_fails_after_consecutive_failures(runner, layers_dir):
+ with requests_mock.Mocker() as m:
+ store = tempfile.mkdtemp()
+ m.post('http://localhost', status_code=500)
+ res = runner.invoke(main, ['run', layers_dir, store,
+ 'http://localhost', '--fail-after', 1])
+ assert 'Maximum number of consecutive failures' in res.output
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"requests_mock"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bagit==1.5.4
certifi==2025.1.31
charset-normalizer==3.4.1
click==6.3
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
requests-mock==1.12.1
-e git+https://github.com/MITLibraries/slingshot.git@6dca99653150369b2b5b422292acd1e0b1fb131e#egg=slingshot
tomli==2.2.1
urllib3==2.3.0
| name: slingshot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bagit==1.5.4
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==6.3
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- requests-mock==1.12.1
- tomli==2.2.1
- urllib3==2.3.0
- wheel==0.26.0
prefix: /opt/conda/envs/slingshot
| [
"tests/test_cli.py::test_run_fails_after_consecutive_failures"
]
| []
| [
"tests/test_cli.py::test_run_submits_bags",
"tests/test_cli.py::test_run_leaves_bag_on_success",
"tests/test_cli.py::test_run_removes_bag_on_failure",
"tests/test_cli.py::test_run_uses_supplied_namespace",
"tests/test_cli.py::test_run_uses_authentication",
"tests/test_cli.py::test_run_logs_uploaded_layers_to_stdout",
"tests/test_cli.py::test_run_logs_failed_layers_to_stdout"
]
| []
| Apache License 2.0 | 465 | [
"slingshot/cli.py"
]
| [
"slingshot/cli.py"
]
|
|
PumucklOnTheAir__TestFramework-132 | 8d1c52e7c2569834f12f7a0739748e7333ca0099 | 2016-03-07 18:38:58 | 8d1c52e7c2569834f12f7a0739748e7333ca0099 | diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..5989d64
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,13 @@
+
+little text describing your pull request.
+
+
+Following tickets are finished:
+resolved #115
+resolved #116
+
+
+tested on Pi:
+- [ ] test_R_Server_VLAN.py
+- [ ] test_AP_*.py
+- [ ] test_A_Server_2.py
diff --git a/.travis.yml b/.travis.yml
index e596f5f..7f48a93 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,6 +1,7 @@
language: python
python:
- "3.4"
+ - "3.5"
cache:
directories:
@@ -33,6 +34,7 @@ script:
- python -m pytest --cov-append --cov=./ framework_unittests/test_A_Server.py
- python -m pytest --cov-append --cov=./ test_server_alive.py
- python -m pytest --cov-append --cov=./ framework_unittests/test_A_IPC.py
+ - python -m pytest --cov-append --cov=./ framework_unittests/test_A_cli.py
- python -m pytest --ff -n auto --cov-append --cov=./ framework_unittests/test_AP_*.py
- "python -m pep8 --exclude=migrations --ignore=E501,E225,E126 */*.py *.py"
diff --git a/cli.py b/cli.py
index 7002ec1..634ae87 100755
--- a/cli.py
+++ b/cli.py
@@ -152,6 +152,22 @@ def create_parsers():
help="List of routers", nargs="+")
parser_online.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers")
+ # subparser for test set
+ parser_test_set = subparsers.add_parser("start", help="Start a test set")
+ parser_test_set.add_argument("-r", "--routers", metavar="Router ID", type=int, default=[], action="store",
+ help="", nargs="+")
+ parser_test_set.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers")
+ parser_test_set.add_argument("-s", "--set", metavar="Test set", type=str, default=[], action="store",
+ help="Name of set")
+
+ # subparser for test results
+ parser_test_result = subparsers.add_parser("results", help="Manage the test results")
+ parser_test_result.add_argument("-r", "--routers", metavar="Router ID", type=int, default=[], action="store",
+ help="", nargs="+")
+ parser_test_result.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers")
+ parser_test_result.add_argument("-rm", "--remove", action="store_true", default=False,
+ help="Remove all results. Ignoring parameter -r.")
+
return parser
@@ -249,6 +265,32 @@ def main():
online_all = args.all
server_proxy.router_online(args.routers, online_all)
+ elif args.mode == "start":
+ """
+ subparse: start
+ """
+ if args.all:
+ router_id = -1
+ else:
+ router_id = args.routers[0]
+ set_name = args.set
+ server_proxy.start_test_set(router_id, set_name)
+
+ elif args.mode == "results":
+ """
+ subparse: results
+ """
+
+ if args.remove:
+ removed = server_proxy.delete_test_results()
+ print("Removed all " + str(removed) + " results.")
+ else:
+ if args.all:
+ router_id = -1
+ else:
+ router_id = args.routers[0]
+ util.print_test_results(server_proxy.get_test_results(router_id))
+
else:
logging.info("Check --help for help")
diff --git a/config/configmanager.py b/config/configmanager.py
index cc4985c..440d828 100644
--- a/config/configmanager.py
+++ b/config/configmanager.py
@@ -222,7 +222,7 @@ class ConfigManager:
return None
@staticmethod
- def get_test_config() -> []:
+ def _get_test_config() -> []:
"""
Read the Test Config file
@@ -232,29 +232,15 @@ class ConfigManager:
return ConfigManager.read_file(path)
@staticmethod
- def get_test_dict() -> []:
+ def get_test_sets() -> []:
"""
Read the Test Config file
:return: Dictionary with a specific output from the file
"""
- output = ConfigManager.get_test_config()
+ output = ConfigManager._get_test_config()
return output
- @staticmethod
- def get_test_list() -> []:
- """
- Read the Test Config file
-
- :return: List with a specific output from the file
- """
- output = ConfigManager.get_test_config()
- test_list = []
- for x in output:
- for v in x.values():
- test_list.append(v)
- return test_list
-
@staticmethod
def get_firmware_config() -> []:
"""
diff --git a/firmware/firmware_handler.py b/firmware/firmware_handler.py
index a28c6c8..e0c4dd0 100644
--- a/firmware/firmware_handler.py
+++ b/firmware/firmware_handler.py
@@ -138,6 +138,8 @@ class FirmwareHandler:
hashs = []
non_parsed_firmwares = self._read_firmwares_from_manifest(release_model)
for firmware in non_parsed_firmwares:
+ if firmware.find("---\n") != -1: # skip the end of the file
+ continue
firmware_name = "gluon" + firmware.split("gluon")[1].split("-sysupgrade")[0] + "-" + \
FirmwareHandler.UPDATE_TYPE + "." + firmware.split(".")[-1].replace("\n", "")
hash_firmware = firmware.split(' ')[4]
diff --git a/requirements.txt b/requirements.txt
index 33608a8..595564a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,18 +1,17 @@
-pyflakes
-pep8
-pyyaml
-typing
-paramiko
-pyroute2
-pylint
-recommonmark
-sphinx
-selenium
-sphinx_rtd_theme
-pytest-cov
-pytest-cache
-pytest-xdist
-codecov
-pycrypto
-ecdsa
-ipaddress
\ No newline at end of file
+pyflakes==1.0.0
+pep8==1.6.2
+PyYAML==3.11
+typing==3.5.0.1
+paramiko==1.16.0
+pyroute2==0.3.15
+pylint==1.5.3
+recommonmark==0.4.0
+Sphinx==1.3.3
+sphinx-rtd-theme==0.1.9
+selenium==2.48.0
+pytest-cov==2.2.0
+pytest-cache==1.0
+pytest-xdist==1.13.1
+pycrypto==2.6.1
+ecdsa==0.13
+ipaddress==1.0.16
\ No newline at end of file
diff --git a/server/server.py b/server/server.py
index d494619..9c018da 100755
--- a/server/server.py
+++ b/server/server.py
@@ -9,6 +9,7 @@ from log.loggersetup import LoggerSetup
import logging
from concurrent.futures import ThreadPoolExecutor
from unittest.result import TestResult
+import importlib
from threading import Event, Semaphore
from network.remote_system import RemoteSystem, RemoteSystemJob
from unittest import defaultTestLoader
@@ -45,7 +46,7 @@ class Server(ServerProxy):
# runtime vars
_routers = [] # all registered routers on the system
- _reports = [] # all test reports
+ _test_results = [] # all test reports in form (router.id, str(test), TestResult)
_stopped = False # marks if the server is still running
_max_subprocesses = 0 # will be set at start. describes how many Processes are needed in the Pool
@@ -56,6 +57,7 @@ class Server(ServerProxy):
_task_pool = None # multiprocessing.pool.Pool for task execution
_job_wait_executor = None # ThreadPoolExecutor for I/O handling on tasks
_semaphore_task_management = Semaphore(1)
+ _test_sets = {} # Dict[List[str]]
# NVAssistent
_nv_assistent = None
@@ -135,6 +137,7 @@ class Server(ServerProxy):
def __load_configuration(cls):
logging.debug("Load configuration")
cls._routers = ConfigManager.get_router_manual_list()
+ cls._test_sets = ConfigManager.get_test_sets()
@classmethod
def stop(cls) -> None:
@@ -247,31 +250,28 @@ class Server(ServerProxy):
return cls.__start_task(remote_sys, remote_job)
@classmethod
- def start_test(cls, router_id: int, test_name: str) -> bool:
+ def start_test_set(cls, router_id: int, test_set_name: str) -> bool:
"""
Start an specific test on a router
- :param router_id: The id of the router on which the test will run
- :param test_name: The name of the test to execute
+ :param router_id: The id of the router on which the test will run.
+ If id is -1 the test will be executed on all routers.
+ :param test_set_name: The name of the test set to execute
:return: True if test was successful added in the queue
"""
- router = cls.get_router_by_id(router_id)
- if router is None:
- logging.error("Router ID unknown")
- return False
-
- # TODO: Testverwaltung - ermittlung des passenden Tests #36
- # cls.get_test_by_name
- from firmware_tests.connection_test import ConnectionTest, VeryLongTest
- if test_name == "ConnectionTest":
- demo_test = ConnectionTest # Important: Param is a class and not an object
- elif test_name == "VeryLongTest":
- demo_test = VeryLongTest
- else:
- logging.error("Testname unknown")
- return False
- return cls.__start_task(router, demo_test)
+ for file_name in cls._test_sets[test_set_name]:
+ module = importlib.import_module("firmware_tests." + file_name)
+ import inspect
+
+ for name, obj in inspect.getmembers(module):
+ if inspect.isclass(obj) and issubclass(obj, FirmwareTest) and name != "FirmwareTest":
+ if router_id == -1:
+ for router in cls._routers:
+ cls.__start_task(router, obj)
+ else:
+ cls.__start_task(cls.get_router_by_id(router_id), obj)
+ return True
@classmethod
def __start_task(cls, remote_sys: RemoteSystem, job: Union[RemoteSystemJobClass, RemoteSystemJob]) -> bool:
@@ -282,7 +282,7 @@ class Server(ServerProxy):
:param remote_sys: the RemoteSystem
:param job: the Job
- :return: true if job directly started, false if
+ :return: true if job directly started, false if not
"""
assert(cls._pid == os.getpid())
# Check if it is the the same PID as the PID Process which started the ProcessPool
@@ -390,7 +390,7 @@ class Server(ServerProxy):
logging.debug("%sTest done " + str(test), LoggerSetup.get_log_deep(1))
logging.debug("%sFrom " + str(router), LoggerSetup.get_log_deep(2))
- cls._reports.append(result)
+ cls._test_results.append((router.id, str(test), result))
except Exception as e:
# TODO #105
logging.error("%sTest raised an Exception: " + str(e), LoggerSetup.get_log_deep(1))
@@ -401,11 +401,11 @@ class Server(ServerProxy):
# result.addError(None, (type(exception), exception, None))
# TODO exception handling for failed Tests
- cls._reports.append(result)
+ cls._test_results.append((router.id, str(test), result))
finally:
cls.set_running_task(router, None)
- # logging.debug(str(cls._reports))
+ # logging.debug(str(cls._test_results))
# start next test in the queue
cls.__start_task(router, None)
@@ -468,10 +468,6 @@ class Server(ServerProxy):
:return: List is a copy of the original list.
"""
- # check if list is still valid
- for router in cls._routers:
- assert isinstance(router, Router)
-
return cls._routers.copy()
@classmethod
@@ -502,13 +498,33 @@ class Server(ServerProxy):
return cls._running_tasks.copy()
@classmethod
- def get_reports(cls) -> []:
+ def get_test_results(cls, router_id: int = -1) -> [(int, str, TestResult)]:
+ """
+ Returns the firmware test results for the router
+
+ :param router_id: the specific router or all router if id = -1
+ :return: List of results
+ """
+
+ if router_id == -1:
+ return cls._test_results
+ else:
+ results = []
+ for result in cls._test_results:
+ if result[0] == router_id:
+ results.append(result)
+ return results
+
+ @classmethod
+ def delete_test_results(cls) -> int:
"""
- Returns the test results.
+ Remove all test results
- :return: List of reports
+ :return: Number of deleted results
"""
- return cls._reports
+ size_results = len(cls._test_results)
+ cls._test_results = []
+ return size_results
@classmethod
def get_tests(cls) -> List[FirmwareTestClass]:
diff --git a/server/serverproxy.py b/server/serverproxy.py
index 102cb67..a072aa8 100644
--- a/server/serverproxy.py
+++ b/server/serverproxy.py
@@ -1,6 +1,7 @@
from abc import ABCMeta, abstractclassmethod
from typing import List
from router.router import Router
+from unittest import TestResult
class ServerProxy(metaclass=ABCMeta):
@@ -11,12 +12,12 @@ class ServerProxy(metaclass=ABCMeta):
the return value is given by copy and not by reference!
"""""
@abstractclassmethod
- def start_test(self, router_id: int, test_name: str) -> bool:
+ def start_test_set(self, router_id: int, test_set_name: str) -> bool:
"""
Start an specific test on a router
:param router_id: The id of the router on which the test will run
- :param test_name: The name of the test to execute
+ :param test_set_name: The name of the test set to execute
:return: True if test was successful added in the queue
"""
pass
@@ -50,11 +51,21 @@ class ServerProxy(metaclass=ABCMeta):
pass
@abstractclassmethod
- def get_reports(self) -> []:
+ def get_test_results(self, router_id: int = -1) -> [(int, str, TestResult)]:
"""
- Returns the test results.
+ Returns the firmware test results for the router
- :return: List of reports
+ :param router_id: the specific router or all router if id = -1
+ :return: List of results
+ """
+ pass
+
+ @abstractclassmethod
+ def delete_test_results(self) -> int:
+ """
+ Remove all test results
+
+ :return: Number of deleted results
"""
pass
diff --git a/util/cli_util.py b/util/cli_util.py
index abac537..ad95e15 100644
--- a/util/cli_util.py
+++ b/util/cli_util.py
@@ -1,4 +1,5 @@
import logging
+from unittest import TestResult
class CLIUtil:
@@ -11,6 +12,7 @@ class CLIUtil:
def print_dynamic_table(content, headers):
"""
prints a dynamically formatted table
+
:param content: list of lists of data
:param headers: list of headers
"""
@@ -48,6 +50,7 @@ class CLIUtil:
def print_status(self, routers, headers):
"""
prints the status of all routers
+
:param routers: list of routers
:param headers: list of headers
"""
@@ -62,6 +65,7 @@ class CLIUtil:
def print_header():
"""
prints header for the command line
+
:return:
"""
print("\v\t" + OutputColors.bold + "Freifunk Testframework\v" + OutputColors.clear)
@@ -70,6 +74,7 @@ class CLIUtil:
def return_progressbar(router, tid, percentage):
"""
returns the visual progress of a test on a router
+
:param router: router name
:param tid: ID of test
:param percentage: progress of test in percent
@@ -84,6 +89,7 @@ class CLIUtil:
def print_list(content):
"""
prints a simple list(table) sorted by the first row and formatted
+
:param content: list of list (table)
:return:
"""
@@ -107,6 +113,7 @@ class CLIUtil:
def print_router(router_list):
"""
prints a detailed list of info on a router
+
:param router_list: list of info on router
:return:
"""
@@ -114,6 +121,25 @@ class CLIUtil:
for elem in router_list:
print("{:<15}{:<20}".format(str(elem[0]) + ":", str(elem[1])))
+ @staticmethod
+ def print_test_results(result_list: [(int, str, TestResult)]):
+ """
+ Prints a the TestResult list
+
+ :param result_list:
+ :return:
+ """
+ headers = ["Router ID", "Test", "(S|F|E)"]
+ content = []
+ print("------Testresults------")
+ for result in result_list:
+ content.append([str(result[0]), result[1], "(" + str(result[2].testsRun - len(result[2].failures) -
+ len(result[2].errors)) +
+ "|" + str(len(result[2].failures)) +
+ "|" + str(len(result[2].errors)) + ")"])
+
+ CLIUtil.print_dynamic_table(content, headers)
+
class OutputColors:
green = '\033[92m'
| (29) TestResult Verwaltung
Ziel:
Testergebnisse sollten verwaltet werden können über die CLI.
Aufgabe:
Die CLI muss TestResults ausgeben können und sie auch ggf. löschen können.
MetaDaten zum dazugehörigen Test können womöglich schon von der Testverwaltung #36 geladen werden. Beim Anzeigen der Tests oder direkt nach der Ausführung + Speicherung.
Vor der Umsetzung ist zu überlegen, ob die TestResults über das IPC bezogen werden oder über das Dateisystem oder einer externen DB.
Ggf. kann also in diesem Ticket auch die QS Maßnahme für die Persistenz der TestResults erfolgen und somit insgesamt Zeit gespart werden.
Infos:
Die Testergebnisse werden während der Laufzeit im Server in der Liste _reports gespeichert.
DatenTyp: unittest.TestResult
Unteraufgaben:
- [x] TestResults ausgeben auf der CLI
- [x] TestResults löschen über die CLI
Neue UserStory/Tickets:
- [ ] Optional: TestResults persistent
- [ ] Optional: MetaDaten zum TestResult | PumucklOnTheAir/TestFramework | diff --git a/config/test_config.yaml b/config/test_config.yaml
index b977360..3d8be05 100644
--- a/config/test_config.yaml
+++ b/config/test_config.yaml
@@ -1,3 +1,3 @@
--- # config for the Test-Files
-- {Test_Name: Test1, Params: [Param1]}
-- {Test_Name: Test2, Params: [Param1, Param2]}
\ No newline at end of file
+set_1: [demo_1]
+set_2: [demo_2, demo_1]
\ No newline at end of file
diff --git a/firmware_tests/__init__.py b/firmware_tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/firmware_tests/connection_test.py b/firmware_tests/connection_test.py
deleted file mode 100644
index e509601..0000000
--- a/firmware_tests/connection_test.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from server.test import FirmwareTest
-import os
-
-
-class ConnectionTest(FirmwareTest):
- """
- This is a demo test - only to test the functionality of the framework itself.
- """""
- def test_self_router(self):
- # print(str(self.router))
- assert self.remote_system.id == 0
-
- def test_ping_static(self):
- # print("connection test: " + str(getpid()))
- # os.system("ip a")
- response = os.system("ping -t 5 -c 1 " + "www.p8h.de")
- assert response == 0 # not working because no normal eth0 stack available
- # from subprocess import Popen, PIPE
- # process = Popen(["ip", "a"], stdout=PIPE, stderr=PIPE)
- # stdout, sterr = process.communicate()
- # print(str(stdout.decode('utf-8')))
- # response = os.system("ping -c 1 " + "p8h.de")
- # assert response == #0
-
- def test_ping_router(self):
- hostname = self.remote_system.ip
- response = os.system("ping -t 5 -c 1 " + hostname)
- print(hostname)
- assert response == 0
-
-
-class VeryLongTest(FirmwareTest):
- """
- This is a demo test - only to test the functionality of the framework itself and it is very short..
- """""
- def test_very_long_test(self):
- lol = True
- assert lol
- assert not not lol
-
- def test_buzz1(self):
- lol = True
- assert lol
- assert not not lol
-
- def test_foo2(self):
- lol = True
- assert lol
- assert not not lol
diff --git a/firmware_tests/demo_1.py b/firmware_tests/demo_1.py
new file mode 100644
index 0000000..6f3a139
--- /dev/null
+++ b/firmware_tests/demo_1.py
@@ -0,0 +1,14 @@
+from server.test import FirmwareTest
+import os
+
+
+class ConnectionTest(FirmwareTest):
+ """
+ This is a demo test - only to test the functionality of the framework itself.
+ """""
+ def test_self_router(self):
+ assert self.remote_system.id == 0
+
+ def test_ping_local(self):
+ response = os.system("ping -t 5 -c 1 " + "localhost")
+ assert response == 0
diff --git a/firmware_tests/demo_2.py b/firmware_tests/demo_2.py
new file mode 100644
index 0000000..a63e2b6
--- /dev/null
+++ b/firmware_tests/demo_2.py
@@ -0,0 +1,21 @@
+from server.test import FirmwareTest
+
+
+class StupidTest(FirmwareTest):
+ """
+ This is a demo test - only to test the functionality of the framework itself and it is very short..
+ """""
+ def test_not_very_long_test(self):
+ lol = True
+ assert lol
+ assert not not lol
+
+ def test_buzz1(self):
+ lol = True
+ assert lol
+ assert not not lol
+
+ def test_foo2(self):
+ lol = True
+ assert lol
+ assert not not lol
diff --git a/framework_unittests/configs/config_no_vlan/server_config.yaml b/framework_unittests/configs/config_no_vlan/server_config.yaml
index 37a304b..c38a8d8 100644
--- a/framework_unittests/configs/config_no_vlan/server_config.yaml
+++ b/framework_unittests/configs/config_no_vlan/server_config.yaml
@@ -4,7 +4,5 @@ Server_Name: TestServer,
# set the level for what you want to log: NOTSET: 0, DEBUG: 10, INFO: 20, WARNING: 30, ERROR: 40, CRITICAL: 50
Log_Level: 10,
# set True or False
-Vlan_On: False,
-# set True or False to use output on every console
-Use_Console_Output: False
+Vlan_On: False
}
\ No newline at end of file
diff --git a/framework_unittests/configs/config_no_vlan/test_config.yaml b/framework_unittests/configs/config_no_vlan/test_config.yaml
index b977360..3d8be05 100644
--- a/framework_unittests/configs/config_no_vlan/test_config.yaml
+++ b/framework_unittests/configs/config_no_vlan/test_config.yaml
@@ -1,3 +1,3 @@
--- # config for the Test-Files
-- {Test_Name: Test1, Params: [Param1]}
-- {Test_Name: Test2, Params: [Param1, Param2]}
\ No newline at end of file
+set_1: [demo_1]
+set_2: [demo_2, demo_1]
\ No newline at end of file
diff --git a/framework_unittests/test_AP_Yaml.py b/framework_unittests/test_AP_Yaml.py
index 23863a2..f639ac8 100644
--- a/framework_unittests/test_AP_Yaml.py
+++ b/framework_unittests/test_AP_Yaml.py
@@ -79,31 +79,6 @@ class MyTestCase(unittest.TestCase):
data = ConfigManager.get_server_property("Server_Name")
self.assertEqual(data, "TestServer", "test_Yaml: Wrong size of the List")
- # test tests
- def test_config_test(self):
- """
- Tests the test config
- :return: Tests results
- """
- data = ConfigManager.get_test_config()
- self.assertEqual(len(data), 2, "test_Yaml: Wrong size of the List")
-
- def test_config_test_dict(self):
- """
- Tests the test config
- :return: Tests results
- """
- data = ConfigManager.get_test_dict()
- self.assertEqual(len(data), 2, "test_Yaml: Wrong size of the List")
-
- def test_config_test_list(self):
- """
- Tests the test config
- :return: Tests results
- """
- data = ConfigManager.get_test_list()
- self.assertEqual(len(data), 4, "test_Yaml: Wrong size of the List")
-
# firmware tests
def test_firmware_property(self):
"""
diff --git a/framework_unittests/test_A_cli_util.py b/framework_unittests/test_AP_cli_util.py
similarity index 100%
rename from framework_unittests/test_A_cli_util.py
rename to framework_unittests/test_AP_cli_util.py
diff --git a/framework_unittests/test_A_IPC.py b/framework_unittests/test_A_IPC.py
index 9f370dd..790d852 100644
--- a/framework_unittests/test_A_IPC.py
+++ b/framework_unittests/test_A_IPC.py
@@ -38,7 +38,7 @@ class TestIPC(TestCase):
ipc_client.connect(False)
server_proxy = ipc_client.get_server_proxy()
- rep = server_proxy.get_reports()
+ rep = server_proxy.get_test_results()
# print(rep)
assert rep[1].text == "test"
@@ -76,7 +76,7 @@ class DummyServer(ServerProxy):
return ["lol"]
@classmethod
- def get_reports(cls) -> []:
+ def get_test_results(cls) -> []:
d = DummyObject("test")
return [id(d), d]
@@ -127,3 +127,11 @@ class DummyServer(ServerProxy):
@classmethod
def get_routers_task_queue_size(cls, router_id: int) -> [str]:
pass
+
+ @classmethod
+ def start_test_set(cls, router_id: int, test_set_name: str) -> bool:
+ pass
+
+ @classmethod
+ def delete_test_results(cls) -> int:
+ pass
diff --git a/framework_unittests/test_A_Server_2.py b/framework_unittests/test_A_Server_2.py
index 870fccf..e507cc8 100644
--- a/framework_unittests/test_A_Server_2.py
+++ b/framework_unittests/test_A_Server_2.py
@@ -11,7 +11,7 @@ import socket
def block_until_server_is_online():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("wait", flush=True)
- while not not sock.connect_ex(('localhost', 5000)):
+ while sock.connect_ex(('localhost', 5000)):
time.sleep(3)
print('.', end="", flush=True)
sock.close()
@@ -32,7 +32,7 @@ class ServerCore(object):
for i in range(2): # do it two times to be sure
routers = server_proxy.get_routers()
for router in routers:
- while not not server_proxy.get_routers_task_queue_size(router.id):
+ while server_proxy.get_routers_task_queue_size(router.id):
time.sleep(2)
print('.', end="", flush=True)
@@ -47,7 +47,7 @@ class ServerCore(object):
for i in range(2): # do it two times to be sure
routers = self.server_proxy.get_routers()
for router in routers:
- while not not self.server_proxy.get_routers_task_queue_size(router.id):
+ while self.server_proxy.get_routers_task_queue_size(router.id):
time.sleep(2)
print('.', end="", flush=True)
@@ -56,38 +56,54 @@ class ServerCore(object):
assert len(routers) != 0
assert isinstance(routers[0], Router)
- def test_little_self_check(self):
- started = self.server_proxy.start_test(0, "ConnectionTest")
+ def test_test_set(self):
+ started = self.server_proxy.start_test_set(0, "set_2")
assert started
# wait until tests are done, assumes that exactly two tests are already finished
- while not self.server_proxy.get_reports():
+ while not len(self.server_proxy.get_test_results()) == 2:
time.sleep(2)
print('.', end="", flush=True)
- reports = self.server_proxy.get_reports()
- assert len(reports) != 0
- assert len(reports[-1].errors) == 0 # check last report
+ reports = self.server_proxy.get_test_results()
+ assert len(reports) == 2
+ assert len(reports[-1][2].errors) == 0 # check last report
+
+ started = self.server_proxy.start_test_set(0, "set_1")
- def test_long_self_check(self):
- started = self.server_proxy.start_test(0, "ConnectionTest")
assert started
- started2 = self.server_proxy.start_test(0, "VeryLongTest")
- assert not started2
- if started and not started2:
- while not len(self.server_proxy.get_reports()) == 3:
- time.sleep(2)
- print('.', end="", flush=True)
+ # wait until tests are done, assumes that exactly two tests are already finished
+ while not len(self.server_proxy.get_test_results()) == 3:
+ time.sleep(2)
+ print('.', end="", flush=True)
+
+ reports = self.server_proxy.get_test_results()
+ assert len(reports) == 3
+ assert len(reports[2][-1].errors) == 0 # check last report
+
+ def test_test_results(self):
+ self.server_proxy.delete_test_results()
+
+ started = self.server_proxy.start_test_set(0, "set_2")
+ assert started
+
+ while not len(self.server_proxy.get_test_results()) == 2:
+ time.sleep(2)
+ print('.', end="", flush=True)
- self.server_proxy.stop_all_tasks()
+ reports = self.server_proxy.get_test_results()
- reports = self.server_proxy.get_reports()
- assert reports[-1].wasSuccessful() # check last report
+ for report in reports:
+ assert report[0] == 0
+ assert report[1] != ""
+ assert len(report[2].errors) == 0
- # def test_jobs(self):
- # raise NotImplemented
+ removed_results = self.server_proxy.delete_test_results()
+ assert len(reports) == removed_results
+ time.sleep(0.5)
+ assert not len(self.server_proxy.get_test_results())
class ServerTestCase2(ServerCore, unittest.TestCase):
diff --git a/framework_unittests/test_A_cli.py b/framework_unittests/test_A_cli.py
index 2184b19..f00dceb 100644
--- a/framework_unittests/test_A_cli.py
+++ b/framework_unittests/test_A_cli.py
@@ -124,7 +124,7 @@ class TestCLItoServerConnection(unittest.TestCase):
@staticmethod
def serverStartWithParams():
base_dir = os.path.dirname(os.path.dirname(__file__)) # This is your Project Root
- config_path = os.path.join(base_dir, 'tests/configs/config_no_vlan') # Join Project Root with config
+ config_path = os.path.join(base_dir, 'framework_unittests/configs/config_no_vlan')
Server.start(config_path=config_path)
def setUp(self):
@@ -136,8 +136,49 @@ class TestCLItoServerConnection(unittest.TestCase):
response = os.system(self.path_cli)
assert response == 0
+ def test_cli_start_test_set(self):
+ response = os.system(self.path_cli + " start -s set_1 -r 0")
+ assert response == 0
+
+ # assumes that there is only one test in the set
+ while self.server_proxy.get_routers_task_queue_size(0):
+ time.sleep(2)
+ print('.', end="", flush=True)
+ assert len(self.server_proxy.get_test_results())
+
+ response = os.system(self.path_cli + " start -s set_1 -a")
+ assert response == 0
+
+ routers = self.server_proxy.get_routers()
+ for router in routers:
+ while self.server_proxy.get_routers_task_queue_size(router.id):
+ time.sleep(2)
+ print('.', end="", flush=True)
+ assert len(self.server_proxy.get_test_results()) == len(routers) + 1
+
+ def test_cli_test_results(self):
+ assert not os.system(self.path_cli + " results -rm -a")
+ os.system(self.path_cli + " start -s set_1 -a")
+
+ routers = self.server_proxy.get_routers()
+ for router in routers:
+ while self.server_proxy.get_routers_task_queue_size(router.id):
+ time.sleep(2)
+ print('.', end="", flush=True)
+
+ response = os.system(self.path_cli + " results -r 0")
+ assert response == 0
+ response = os.system(self.path_cli + " results -a")
+ assert response == 0
+
+ response = os.system(self.path_cli + " results -rm")
+ assert response == 0
+ response = os.system(self.path_cli + " results -rm -a")
+ assert response == 0
+ response = os.system(self.path_cli + " results -rm -r 0")
+ assert response == 0
+ assert not len(self.server_proxy.get_test_results())
+
def test_get_version(self):
version = self.server_proxy.get_server_version()
assert version == Server.VERSION
-
- # TODO compare Version with Version from Server.VERSION and ./cli version (exists?)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 8
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-cache",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
astroid==3.3.9
attrs==25.3.0
babel==2.17.0
bcrypt==4.3.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
codecov==2.1.13
commonmark==0.9.1
coverage==7.8.0
cryptography==44.0.2
dill==0.3.9
docutils==0.21.2
ecdsa==0.19.1
exceptiongroup==1.2.2
execnet==2.1.1
h11==0.14.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
ipaddress==1.0.23
isort==6.0.1
Jinja2==3.1.6
MarkupSafe==3.0.2
mccabe==0.7.0
outcome==1.3.0.post0
packaging==24.2
paramiko==3.5.1
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
pycrypto==2.6.1
pyflakes==3.3.2
Pygments==2.19.1
pylint==3.3.6
PyNaCl==1.5.0
pyroute2==0.8.1
PySocks==1.7.1
pytest==8.3.5
pytest-cache==1.0
pytest-cov==6.0.0
pytest-xdist==3.6.1
PyYAML==6.0.2
recommonmark==0.7.1
requests==2.32.3
selenium==4.30.0
six==1.17.0
sniffio==1.3.1
snowballstemmer==2.2.0
sortedcontainers==2.4.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
-e git+https://github.com/PumucklOnTheAir/TestFramework.git@8d1c52e7c2569834f12f7a0739748e7333ca0099#egg=TestFramework
tomli==2.2.1
tomlkit==0.13.2
trio==0.29.0
trio-websocket==0.12.2
typing==3.7.4.3
typing_extensions==4.13.0
urllib3==2.3.0
websocket-client==1.8.0
wsproto==1.2.0
zipp==3.21.0
| name: TestFramework
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- astroid==3.3.9
- attrs==25.3.0
- babel==2.17.0
- bcrypt==4.3.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- codecov==2.1.13
- commonmark==0.9.1
- coverage==7.8.0
- cryptography==44.0.2
- dill==0.3.9
- docutils==0.21.2
- ecdsa==0.19.1
- exceptiongroup==1.2.2
- execnet==2.1.1
- h11==0.14.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipaddress==1.0.23
- isort==6.0.1
- jinja2==3.1.6
- markupsafe==3.0.2
- mccabe==0.7.0
- outcome==1.3.0.post0
- packaging==24.2
- paramiko==3.5.1
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pycrypto==2.6.1
- pyflakes==3.3.2
- pygments==2.19.1
- pylint==3.3.6
- pynacl==1.5.0
- pyroute2==0.8.1
- pysocks==1.7.1
- pytest==8.3.5
- pytest-cache==1.0
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- recommonmark==0.7.1
- requests==2.32.3
- selenium==4.30.0
- six==1.17.0
- sniffio==1.3.1
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tomlkit==0.13.2
- trio==0.29.0
- trio-websocket==0.12.2
- typing==3.7.4.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- websocket-client==1.8.0
- wsproto==1.2.0
- zipp==3.21.0
prefix: /opt/conda/envs/TestFramework
| [
"framework_unittests/test_A_IPC.py::TestIPC::test_proxy_object"
]
| [
"framework_unittests/test_A_Server_2.py::ServerTestCase2::test_test_set",
"framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_get_version",
"firmware_tests/demo_1.py::ConnectionTest::test_ping_local",
"firmware_tests/demo_1.py::ConnectionTest::test_self_router",
"framework_unittests/test_A_Server_2.py::ServerTestCase2::test_get_routers",
"framework_unittests/test_A_Server_2.py::ServerTestCase2::test_test_results",
"framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_connected",
"framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_start_test_set",
"framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_test_results"
]
| [
"firmware_tests/demo_2.py::StupidTest::test_buzz1",
"firmware_tests/demo_2.py::StupidTest::test_foo2",
"firmware_tests/demo_2.py::StupidTest::test_not_very_long_test",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_auto",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_auto_with_length",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_manual",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_dict",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_list",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_prop",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_config",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_dict",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_list",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_property",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_get_config_router_auto",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_get_config_router_manual",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_config",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_dict",
"framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_list",
"framework_unittests/test_AP_cli_util.py::MyTestCase::test_create_util",
"framework_unittests/test_A_IPC.py::TestIPC::test_exist_stop_event",
"framework_unittests/test_A_cli.py::CLITestClass::test_no_args",
"framework_unittests/test_A_cli.py::CLITestClass::test_reboot",
"framework_unittests/test_A_cli.py::CLITestClass::test_status",
"framework_unittests/test_A_cli.py::CLITestClass::test_sysupdate",
"framework_unittests/test_A_cli.py::CLITestClass::test_sysupgrade",
"framework_unittests/test_A_cli.py::CLITestClass::test_update_info",
"framework_unittests/test_A_cli.py::CLITestClass::test_webconfig"
]
| []
| null | 466 | [
"cli.py",
"firmware/firmware_handler.py",
"config/configmanager.py",
"server/server.py",
".travis.yml",
".github/PULL_REQUEST_TEMPLATE.md",
"util/cli_util.py",
"server/serverproxy.py",
"requirements.txt"
]
| [
"cli.py",
"firmware/firmware_handler.py",
"config/configmanager.py",
"server/server.py",
".travis.yml",
".github/PULL_REQUEST_TEMPLATE.md",
"util/cli_util.py",
"server/serverproxy.py",
"requirements.txt"
]
|
|
juju-solutions__charms.reactive-58 | 1ae5c5b86dff4cecfb261ebccbca7780e3546fa3 | 2016-03-07 23:46:27 | 59b07bd9447d8a4cb027ea2515089216b8d20549 | diff --git a/charms/reactive/__init__.py b/charms/reactive/__init__.py
index 1cdcca0..15209ec 100644
--- a/charms/reactive/__init__.py
+++ b/charms/reactive/__init__.py
@@ -36,6 +36,7 @@ from .decorators import only_once # noqa
from .decorators import when_file_changed # noqa
from . import bus
+from . import relations
from charmhelpers.core import hookenv
from charmhelpers.core import unitdata
@@ -59,6 +60,9 @@ def main(relation_name=None):
if 'JUJU_HOOK_NAME' not in os.environ:
os.environ['JUJU_HOOK_NAME'] = os.path.basename(sys.argv[0])
+ # update data to be backwards compatible after fix for issue 28
+ relations._migrate_conversations()
+
def flush_kv():
if unitdata._KV:
unitdata._KV.flush()
diff --git a/charms/reactive/relations.py b/charms/reactive/relations.py
index fe513ac..efdfcc4 100644
--- a/charms/reactive/relations.py
+++ b/charms/reactive/relations.py
@@ -22,6 +22,7 @@ from six import with_metaclass
from charmhelpers.core import hookenv
from charmhelpers.core import unitdata
from charmhelpers.cli import cmdline
+from charms.reactive.bus import get_states
from charms.reactive.bus import get_state
from charms.reactive.bus import set_state
from charms.reactive.bus import remove_state
@@ -377,17 +378,25 @@ class Conversation(object):
Conversations use the idea of :class:`scope` to determine how units and
services are grouped together.
"""
- def __init__(self, relation_name=None, units=None, scope=None):
- self.relation_name = relation_name or hookenv.relation_type()
- self.units = set(units or [hookenv.remote_unit()])
- self.scope = scope or hookenv.remote_unit()
+ def __init__(self, namespace, units, scope):
+ self.namespace = namespace
+ self.units = set(units)
+ self.scope = scope
+
+ @classmethod
+ def _key(cls, namespace, scope):
+ return 'reactive.conversations.%s.%s' % (namespace, scope)
@property
def key(self):
"""
The key under which this conversation will be stored.
"""
- return 'reactive.conversations.%s.%s' % (self.relation_name, self.scope)
+ return self._key(self.namespace, self.scope)
+
+ @property
+ def relation_name(self):
+ return self.namespace.split(':')[0]
@property
def relation_ids(self):
@@ -395,12 +404,13 @@ class Conversation(object):
The set of IDs of the specific relation instances that this conversation
is communicating with.
"""
- relation_ids = []
- services = set(unit.split('/')[0] for unit in self.units)
- for relation_id in hookenv.relation_ids(self.relation_name):
- if hookenv.remote_service_name(relation_id) in services:
- relation_ids.append(relation_id)
- return relation_ids
+ if self.scope == scopes.GLOBAL:
+ # the namespace is the relation name and this conv speaks for all
+ # connected instances of that relation
+ return hookenv.relation_ids(self.namespace)
+ else:
+ # the namespace is the relation ID
+ return [self.namespace]
@classmethod
def join(cls, scope):
@@ -414,14 +424,20 @@ class Conversation(object):
:meth:`~charmhelpers.core.unitdata.Storage.flush` be called.
"""
relation_name = hookenv.relation_type()
+ relation_id = hookenv.relation_id()
unit = hookenv.remote_unit()
service = hookenv.remote_service_name()
if scope is scopes.UNIT:
scope = unit
+ namespace = relation_id
elif scope is scopes.SERVICE:
scope = service
- key = 'reactive.conversations.%s.%s' % (relation_name, scope)
- conversation = cls.deserialize(unitdata.kv().get(key, {'scope': scope}))
+ namespace = relation_id
+ else:
+ namespace = relation_name
+ key = cls._key(namespace, scope)
+ data = unitdata.kv().get(key, {'namespace': namespace, 'scope': scope, 'units': []})
+ conversation = cls.deserialize(data)
conversation.units.add(unit)
unitdata.kv().set(key, cls.serialize(conversation))
return conversation
@@ -454,8 +470,8 @@ class Conversation(object):
Serialize a conversation instance for storage.
"""
return {
- 'relation_name': conversation.relation_name,
- 'units': list(conversation.units),
+ 'namespace': conversation.namespace,
+ 'units': sorted(conversation.units),
'scope': conversation.scope,
}
@@ -643,6 +659,48 @@ class Conversation(object):
return unitdata.kv().get(key, default)
+def _migrate_conversations():
+ """
+ Due to issue #28 (https://github.com/juju-solutions/charms.reactive/issues/28),
+ conversations needed to be updated to be namespaced per relation ID for SERVICE
+ and UNIT scope. To ensure backwards compatibility, this updates all convs in
+ the old format to the new.
+
+ TODO: Remove in 2.0.0
+ """
+ for key, data in unitdata.kv().getrange('reactive.conversations.').items():
+ if 'namespace' in data:
+ continue
+ relation_name = data.pop('relation_name')
+ if data['scope'] == scopes.GLOBAL:
+ data['namespace'] = relation_name
+ unitdata.kv().set(key, data)
+ else:
+ # split the conv based on the relation ID
+ new_keys = []
+ for rel_id in hookenv.relation_ids(relation_name):
+ new_key = Conversation._key(rel_id, data['scope'])
+ new_units = set(hookenv.related_units(rel_id)) & set(data['units'])
+ if new_units:
+ unitdata.kv().set(new_key, {
+ 'namespace': rel_id,
+ 'scope': data['scope'],
+ 'units': sorted(new_units),
+ })
+ new_keys.append(new_key)
+ unitdata.kv().unset(key)
+ # update the states pointing to the old conv key to point to the
+ # (potentially multiple) new key(s)
+ for state, value in get_states().items():
+ if not value:
+ continue
+ if key not in value['conversations']:
+ continue
+ value['conversations'].remove(key)
+ value['conversations'].extend(new_keys)
+ set_state(state, value)
+
+
@cmdline.subcommand()
def relation_call(method, relation_name=None, state=None, *args):
"""Invoke a method on the class implementing a relation via the CLI"""
diff --git a/tox.ini b/tox.ini
index cabe5d9..04bcddf 100644
--- a/tox.ini
+++ b/tox.ini
@@ -19,8 +19,8 @@ commands = flake8 --ignore=E501 {toxinidir}/charms
[testenv:py2]
basepython = python2
-commands = nosetests
+commands = nosetests {posargs}
[testenv:py3]
basepython = python3
-commands = nosetests
+commands = nosetests {posargs}
| charms.reactive.relations assumes the relation name is a key
The RelationBase and Conversation model assumes that a relation name name is a key, rather than using the relation id + unit name. This will cause a reactive charm to fail, depending how the related service chooses to use it. For example, here is a metadata.yaml snippet from a client that stores its data in three separate silos:
```yaml
requires:
session:
interface: pgsql
public:
interface: pgsql
confidential:
interface: pgsql
```
On the client side, everything is fine. However, on the server side we have this:
```yaml
provides:
db:
interface: pgsql
```
If the client is related three times to the same server, which would be common for development and staging, then the reactive model fails as the relation name can not be used as a key to identify which of the three db interfaces is needed. The relation id must be used. | juju-solutions/charms.reactive | diff --git a/tests/test_relations.py b/tests/test_relations.py
index 197f210..c4977da 100644
--- a/tests/test_relations.py
+++ b/tests/test_relations.py
@@ -31,6 +31,7 @@ class TestAutoAccessors(unittest.TestCase):
kv_p = mock.patch.object(relations.unitdata, 'kv')
self.kv = kv_p.start()
self.addCleanup(kv_p.stop)
+ self.kv.return_value.get.side_effect = lambda k, v=None: v
def test_accessor_doc(self):
self.assertEqual(DummyRelationSubclass.field_one.__doc__, 'Get the field-one, if available, or None.')
@@ -151,6 +152,25 @@ class TestRelationBase(unittest.TestCase):
rb.conversation.assert_called_once_with('scope')
conv.remove_state.assert_called_once_with('state')
+ def test_is_state(self):
+ conv = mock.Mock(name='conv')
+ rb = relations.RelationBase('relname', 'unit')
+ rb.conversation = mock.Mock(return_value=conv)
+ rb.conversation.return_value.is_state.return_value = False
+ assert not rb.is_state('state', 'scope')
+ rb.conversation.assert_called_once_with('scope')
+ conv.is_state.assert_called_once_with('state')
+ rb.conversation.return_value.is_state.return_value = True
+ assert rb.is_state('state', 'scope')
+
+ def test_toggle_state(self):
+ conv = mock.Mock(name='conv')
+ rb = relations.RelationBase('relname', 'unit')
+ rb.conversation = mock.Mock(return_value=conv)
+ rb.toggle_state('state', 'active', 'scope')
+ rb.conversation.assert_called_once_with('scope')
+ conv.toggle_state.assert_called_once_with('state', 'active')
+
def test_set_remote(self):
conv = mock.Mock(name='conv')
rb = relations.RelationBase('relname', 'unit')
@@ -190,82 +210,86 @@ class TestConversation(unittest.TestCase):
if not hasattr(cls, 'assertItemsEqual'):
cls.assertItemsEqual = cls.assertCountEqual
- @mock.patch.object(relations, 'hookenv')
- def test_init(self, hookenv):
- hookenv.relation_type.return_value = 'relation_type'
- hookenv.remote_unit.return_value = 'remote_unit'
-
- c1 = relations.Conversation()
- self.assertEqual(c1.relation_name, 'relation_type')
- self.assertEqual(c1.units, set(['remote_unit']))
- self.assertEqual(c1.scope, 'remote_unit')
-
- c2 = relations.Conversation('rel', ['unit'], 'scope')
- self.assertEqual(c2.relation_name, 'rel')
- self.assertEqual(c2.units, set(['unit']))
- self.assertEqual(c2.scope, 'scope')
-
def test_key(self):
c1 = relations.Conversation('rel', ['unit'], 'scope')
self.assertEqual(c1.key, 'reactive.conversations.rel.scope')
- @mock.patch.object(relations.hookenv, 'remote_service_name')
@mock.patch.object(relations.hookenv, 'relation_ids')
- def test_relation_ids(self, relation_ids, remote_service_name):
- relation_ids.return_value = ['rel:1', 'rel:2', 'rel:3']
- remote_service_name.side_effect = ['foo', 'bar', 'foo']
- c1 = relations.Conversation('rel', ['foo/1', 'qux/1', 'foo/2'], 'scope')
- self.assertEqual(c1.relation_ids, ['rel:1', 'rel:3'])
- self.assertEqual(remote_service_name.call_args_list, [
- mock.call('rel:1'),
- mock.call('rel:2'),
- mock.call('rel:3'),
- ])
+ def test_relation_ids(self, relation_ids):
+ relation_ids.return_value = ['rel:1', 'rel:2']
+ c1 = relations.Conversation('rel:0', [], 'scope')
+ self.assertEqual(c1.relation_ids, ['rel:0'])
+ assert not relation_ids.called
+
+ c2 = relations.Conversation('rel', [], relations.scopes.GLOBAL)
+ self.assertEqual(c2.relation_ids, ['rel:1', 'rel:2'])
relation_ids.assert_called_once_with('rel')
@mock.patch.object(relations, 'unitdata')
@mock.patch.object(relations, 'hookenv')
def test_join(self, hookenv, unitdata):
hookenv.relation_type.return_value = 'relation_type'
+ hookenv.relation_id.return_value = 'relation_type:0'
hookenv.remote_unit.return_value = 'service/0'
hookenv.remote_service_name.return_value = 'service'
unitdata.kv().get.side_effect = [
- {'scope': 'scope'},
{
- 'relation_name': 'relation_type',
- 'units': {'service/1'},
+ 'namespace': 'relation_type',
+ 'units': [],
+ 'scope': 'my-global',
+ },
+ {
+ 'namespace': 'relation_type:0',
+ 'units': ['service/1'],
'scope': 'service',
},
- {'scope': 'service/0'},
+ {
+ 'namespace': 'relation_type:0',
+ 'units': [],
+ 'scope': 'service/0',
+ },
]
- conv = relations.Conversation.join('scope')
- self.assertEqual(conv.relation_name, 'relation_type')
+ conv = relations.Conversation.join('my-global')
+ self.assertEqual(conv.namespace, 'relation_type')
self.assertEqual(conv.units, {'service/0'})
- self.assertEqual(conv.scope, 'scope')
- unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.scope', {'scope': 'scope'})
- unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.scope', {
- 'relation_name': 'relation_type',
+ self.assertEqual(conv.scope, 'my-global')
+ unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.my-global', {
+ 'namespace': 'relation_type',
+ 'scope': 'my-global',
+ 'units': [],
+ })
+ unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.my-global', {
+ 'namespace': 'relation_type',
'units': ['service/0'],
- 'scope': 'scope',
+ 'scope': 'my-global',
})
conv = relations.Conversation.join(relations.scopes.SERVICE)
- self.assertEqual(conv.relation_name, 'relation_type')
+ self.assertEqual(conv.namespace, 'relation_type:0')
self.assertEqual(conv.units, {'service/0', 'service/1'})
self.assertEqual(conv.scope, 'service')
- unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.service', {'scope': 'service'})
- self.assertEqual(unitdata.kv().set.call_args[0][0], 'reactive.conversations.relation_type.service')
- self.assertEqual(unitdata.kv().set.call_args[0][1]['relation_name'], 'relation_type')
- self.assertItemsEqual(unitdata.kv().set.call_args[0][1]['units'], ['service/0', 'service/1'])
- self.assertEqual(unitdata.kv().set.call_args[0][1]['scope'], 'service')
+ unitdata.kv().get.assert_called_with('reactive.conversations.relation_type:0.service', {
+ 'namespace': 'relation_type:0',
+ 'scope': 'service',
+ 'units': [],
+ })
+ unitdata.kv().set.assert_called_with('reactive.conversations.relation_type:0.service', {
+ 'namespace': 'relation_type:0',
+ 'units': ['service/0', 'service/1'],
+ 'scope': 'service',
+ })
conv = relations.Conversation.join(relations.scopes.UNIT)
self.assertEqual(conv.relation_name, 'relation_type')
self.assertEqual(conv.units, {'service/0'})
self.assertEqual(conv.scope, 'service/0')
- unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.service/0', {'scope': 'service/0'})
- unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.service/0', {
- 'relation_name': 'relation_type',
+ unitdata.kv().get.assert_called_with('reactive.conversations.relation_type:0.service/0', {
+ 'namespace': 'relation_type:0',
+ 'scope': 'service/0',
+ 'units': [],
+ })
+ unitdata.kv().set.assert_called_with('reactive.conversations.relation_type:0.service/0', {
+ 'namespace': 'relation_type:0',
'units': ['service/0'],
'scope': 'service/0',
})
@@ -278,7 +302,7 @@ class TestConversation(unittest.TestCase):
conv.depart()
self.assertEqual(conv.units, {'service/1'}, 'scope')
unitdata.kv().set.assert_called_with(conv.key, {
- 'relation_name': 'rel',
+ 'namespace': 'rel',
'units': ['service/1'],
'scope': 'scope',
})
@@ -293,16 +317,16 @@ class TestConversation(unittest.TestCase):
@mock.patch.object(relations, 'unitdata')
def test_load(self, unitdata):
unitdata.kv().get.side_effect = [
- {'relation_name': 'rel1', 'units': ['service/0'], 'scope': 'scope'},
+ {'namespace': 'rel:1', 'units': ['service/0'], 'scope': 'scope'},
None,
- {'relation_name': 'rel2', 'units': ['service/1'], 'scope': 'service'},
+ {'namespace': 'rel:2', 'units': ['service/1'], 'scope': 'service'},
]
convs = relations.Conversation.load(['key1', 'key2', 'key3'])
self.assertEqual(len(convs), 2)
- self.assertEqual(convs[0].relation_name, 'rel1')
+ self.assertEqual(convs[0].relation_name, 'rel')
self.assertEqual(convs[0].units, {'service/0'})
self.assertEqual(convs[0].scope, 'scope')
- self.assertEqual(convs[1].relation_name, 'rel2')
+ self.assertEqual(convs[1].relation_name, 'rel')
self.assertEqual(convs[1].units, {'service/1'})
self.assertEqual(convs[1].scope, 'service')
self.assertEqual(unitdata.kv().get.call_args_list, [
@@ -460,6 +484,91 @@ class TestConversation(unittest.TestCase):
kv().get.assert_called_once_with('reactive.conversations.rel.scope.local-data.foo', 'default')
+class TestMigrateConvs(unittest.TestCase):
+ @mock.patch.object(relations, 'set_state')
+ @mock.patch.object(relations, 'get_states')
+ @mock.patch.object(relations, 'hookenv')
+ @mock.patch.object(relations.unitdata, 'kv')
+ def test_migrate(self, kv, mhookenv, get_states, set_state):
+ kv().getrange.side_effect = [
+ {'reactive.conversations.rel:0.service': {
+ 'namespace': 'rel:0',
+ }},
+ {'reactive.conversations.rel.global': {
+ 'relation_name': 'rel',
+ 'scope': 'global',
+ 'units': ['service/0', 'service/1', 'service/3'],
+ }},
+ {'reactive.conversations.rel.service': {
+ 'relation_name': 'rel',
+ 'scope': 'service',
+ 'units': ['service/0', 'service/1', 'service/3'],
+ }},
+ {'reactive.conversations.rel.service/3': {
+ 'relation_name': 'rel',
+ 'scope': 'service/3',
+ 'units': ['service/3'],
+ }},
+ ]
+ mhookenv.relation_ids.return_value = ['rel:1', 'rel:2']
+ mhookenv.related_units.side_effect = [
+ ['service/0', 'service/2'], ['service/3'],
+ ['service/0', 'service/2'], ['service/3'],
+ ]
+ get_states.side_effect = [
+ {
+ 'rel.joined': {'conversations': ['reactive.conversations.rel.service']},
+ 'foo': None,
+ },
+ {
+ 'rel.joined': {'conversations': ['reactive.conversations.rel.service/3']},
+ 'foo': {'conversations': []},
+ },
+ ]
+ relations._migrate_conversations()
+ assert not kv().set.called
+
+ kv().set.reset_mock()
+ relations._migrate_conversations()
+ kv().set.assert_called_with('reactive.conversations.rel.global', {
+ 'namespace': 'rel',
+ 'scope': 'global',
+ 'units': ['service/0', 'service/1', 'service/3'],
+ })
+ assert not kv().unset.called
+ assert not set_state.called
+
+ kv().set.reset_mock()
+ relations._migrate_conversations()
+ kv().set.assert_any_call('reactive.conversations.rel:1.service', {
+ 'namespace': 'rel:1',
+ 'scope': 'service',
+ 'units': ['service/0'],
+ })
+ kv().set.assert_called_with('reactive.conversations.rel:2.service', {
+ 'namespace': 'rel:2',
+ 'scope': 'service',
+ 'units': ['service/3'],
+ })
+ kv().unset.assert_called_with('reactive.conversations.rel.service')
+ set_state.assert_called_with('rel.joined', {'conversations': [
+ 'reactive.conversations.rel:1.service',
+ 'reactive.conversations.rel:2.service',
+ ]})
+
+ kv().set.reset_mock()
+ relations._migrate_conversations()
+ kv().set.assert_called_with('reactive.conversations.rel:2.service/3', {
+ 'namespace': 'rel:2',
+ 'scope': 'service/3',
+ 'units': ['service/3'],
+ })
+ kv().unset.assert_called_with('reactive.conversations.rel.service/3')
+ set_state.assert_called_with('rel.joined', {'conversations': [
+ 'reactive.conversations.rel:2.service/3',
+ ]})
+
+
class TestRelationCall(unittest.TestCase):
def setUp(self):
self.r1 = mock.Mock(name='r1')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"flake8",
"ipython",
"ipdb",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backcall==0.2.0
certifi==2021.5.30
charmhelpers==1.2.1
-e git+https://github.com/juju-solutions/charms.reactive.git@1ae5c5b86dff4cecfb261ebccbca7780e3546fa3#egg=charms.reactive
coverage==6.2
decorator==5.1.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
ipdb==0.13.13
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
netaddr==0.10.1
nose==1.3.7
packaging==21.3
parso==0.7.1
pbr==6.1.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pyaml==23.5.8
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
six==1.17.0
tomli==1.2.3
traitlets==4.3.3
typing_extensions==4.1.1
wcwidth==0.2.13
zipp==3.6.0
| name: charms.reactive
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backcall==0.2.0
- charmhelpers==1.2.1
- coverage==6.2
- decorator==5.1.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipdb==0.13.13
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- netaddr==0.10.1
- nose==1.3.7
- packaging==21.3
- parso==0.7.1
- pbr==6.1.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pyaml==23.5.8
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- tomli==1.2.3
- traitlets==4.3.3
- typing-extensions==4.1.1
- wcwidth==0.2.13
- zipp==3.6.0
prefix: /opt/conda/envs/charms.reactive
| [
"tests/test_relations.py::TestConversation::test_depart",
"tests/test_relations.py::TestConversation::test_join",
"tests/test_relations.py::TestConversation::test_load",
"tests/test_relations.py::TestConversation::test_relation_ids",
"tests/test_relations.py::TestMigrateConvs::test_migrate"
]
| []
| [
"tests/test_relations.py::TestAutoAccessors::test_accessor",
"tests/test_relations.py::TestAutoAccessors::test_accessor_doc",
"tests/test_relations.py::TestRelationBase::test_conversation",
"tests/test_relations.py::TestRelationBase::test_find_impl",
"tests/test_relations.py::TestRelationBase::test_find_subclass",
"tests/test_relations.py::TestRelationBase::test_from_name",
"tests/test_relations.py::TestRelationBase::test_from_state",
"tests/test_relations.py::TestRelationBase::test_get_local",
"tests/test_relations.py::TestRelationBase::test_get_remote",
"tests/test_relations.py::TestRelationBase::test_is_state",
"tests/test_relations.py::TestRelationBase::test_remove_state",
"tests/test_relations.py::TestRelationBase::test_set_local",
"tests/test_relations.py::TestRelationBase::test_set_remote",
"tests/test_relations.py::TestRelationBase::test_set_state",
"tests/test_relations.py::TestRelationBase::test_toggle_state",
"tests/test_relations.py::TestConversation::test_get_local",
"tests/test_relations.py::TestConversation::test_get_remote",
"tests/test_relations.py::TestConversation::test_is_state",
"tests/test_relations.py::TestConversation::test_key",
"tests/test_relations.py::TestConversation::test_remove_state",
"tests/test_relations.py::TestConversation::test_set_local",
"tests/test_relations.py::TestConversation::test_set_remote",
"tests/test_relations.py::TestConversation::test_set_state",
"tests/test_relations.py::TestConversation::test_toggle_state",
"tests/test_relations.py::TestRelationCall::test_call_conversations",
"tests/test_relations.py::TestRelationCall::test_call_name",
"tests/test_relations.py::TestRelationCall::test_call_state",
"tests/test_relations.py::TestRelationCall::test_no_impl"
]
| []
| Apache License 2.0 | 468 | [
"charms/reactive/relations.py",
"charms/reactive/__init__.py",
"tox.ini"
]
| [
"charms/reactive/relations.py",
"charms/reactive/__init__.py",
"tox.ini"
]
|
|
BrandonLMorris__auacm-cli-11 | 5c13a4843e281aa1470d2bd28fe39c07f4e39e92 | 2016-03-11 15:15:59 | 5c13a4843e281aa1470d2bd28fe39c07f4e39e92 | diff --git a/src/auacm/competition.py b/src/auacm/competition.py
index f2d9561..794f8a4 100644
--- a/src/auacm/competition.py
+++ b/src/auacm/competition.py
@@ -1,6 +1,6 @@
"""Subcommands related to competitions"""
-import auacm, requests, textwrap
+import auacm, requests, textwrap, argparse
from datetime import datetime
from auacm.utils import subcommand
from auacm.exceptions import CompetitionNotFoundError
@@ -10,7 +10,7 @@ from auacm.exceptions import CompetitionNotFoundError
def get_comps(args=None):
"""Retrieve one or more competitions from the server"""
if args:
- return _get_one_comp(args)
+ return get_one_comp(args)
response = requests.get(auacm.BASE_URL + 'competitions')
@@ -35,13 +35,30 @@ def get_comps(args=None):
{}
''').format(current, upcoming, past).strip()
-def _get_one_comp(args):
+def get_one_comp(args):
"""Retrieve info on one specific competition"""
- response = requests.get(auacm.BASE_URL + 'competitions/' + str(args[0]))
+ parser = argparse.ArgumentParser(
+ add_help=False,
+ usage='competition [-i/--id] <competition>'
+ )
+ parser.add_argument('-i', '--id', action='store_true')
+ parser.add_argument('competition')
+ args = parser.parse_args(args)
+
+ if not args.id:
+ cid = _cid_from_name(args.competition)
+ if cid == -1:
+ raise CompetitionNotFoundError(
+ 'Could not find a competition with the name ' +
+ args.competition)
+ else:
+ cid = args.competition
+
+ response = requests.get(auacm.BASE_URL + 'competitions/' + str(cid))
if not response.ok or response.status_code == 404:
raise CompetitionNotFoundError(
- 'Could not find competition with id: ' + str(args[0]))
+ 'Could not find competition with id: ' + str(args.competition))
comp = response.json()['data']
@@ -62,6 +79,21 @@ def _get_one_comp(args):
{}
''').format(comp_str, teams, problems)
+def _cid_from_name(comp_name):
+ """Return the competition of an id based on it's name"""
+ comps = requests.get(auacm.BASE_URL + 'competitions').json()['data']
+ for comp in comps['upcoming']:
+ if comp_name.lower() in comp['name'].lower():
+ return int(comp['cid'])
+ for comp in comps['ongoing']:
+ if comp_name.lower() in comp['name'].lower():
+ return int(comp['cid'])
+ for comp in comps['past']:
+ if comp_name.lower() in comp['name'].lower():
+ return int(comp['cid'])
+
+ return -1
+
def _format_comps(comps):
"""Return a formatted string for a list of competitions"""
result = list()
@@ -85,7 +117,7 @@ def _format_teams(teams):
def _format_problems(probs):
"""Return a formatted string of the problems passed in"""
result = ''
- for label, prob in probs.items():
+ for label, prob in sorted(probs.items()):
result += '{}\t{} ({})\n'.format(label, prob['name'], prob['pid'])
return result.strip()
diff --git a/src/auacm/main.py b/src/auacm/main.py
index 32d55e0..35e463b 100644
--- a/src/auacm/main.py
+++ b/src/auacm/main.py
@@ -7,7 +7,7 @@ The central entry point of the auacm app.
import requests, sys, textwrap
import auacm
import auacm.utils as utils
-from auacm.exceptions import ConnectionError, ProblemNotFoundError, UnauthorizedException, InvalidSubmission
+from auacm.exceptions import ConnectionError, ProblemNotFoundError, UnauthorizedException, InvalidSubmission, CompetitionNotFoundError
def main(args):
"""
@@ -44,7 +44,8 @@ def main(args):
print(utils.callbacks[args[0]](args[1:]) or '')
except (ProblemNotFoundError,
UnauthorizedException,
- InvalidSubmission) as exp:
+ InvalidSubmission,
+ CompetitionNotFoundError) as exp:
print(exp.message)
exit(1)
except (requests.exceptions.ConnectionError, ConnectionError):
| List recent and ongoing competitions
A `comp[etition]` command that will simply list (chronologically) the recent and ongoing competitions. | BrandonLMorris/auacm-cli | diff --git a/tests/competition_tests.py b/tests/competition_tests.py
index 7f7c381..b89e62e 100644
--- a/tests/competition_tests.py
+++ b/tests/competition_tests.py
@@ -21,16 +21,38 @@ class CompetitionTests(unittest.TestCase):
self.assertTrue('past fake mock' in result.lower())
@patch('requests.get')
- def testGetOneCompetition(self, mock_get):
- """Successfully get one competition by it's id"""
+ def testGetOneCompetitionById(self, mock_get):
+ """Successfully get one competition by its id"""
mock_get.return_value = MockResponse(json=COMPETITION_DETAIL)
- result = auacm.competition.get_comps(['2'])
+ result = auacm.competition.get_comps(['-i', '2'])
self.assertTrue('ongoing fake mock' in result.lower())
self.assertTrue('fake problem a' in result.lower())
self.assertTrue('brando the mando' in result.lower())
+ @patch('requests.get')
+ def testGetOneCompetitionByName(self, mock_get):
+ """Successfully get one competition by its name"""
+ mock_get.side_effect = [
+ MockResponse(json=COMPETITIONS_RESPONSE),
+ MockResponse(json=COMPETITION_DETAIL)]
+
+ result = auacm.competition.get_comps(['ongoing'])
+
+ self.assertTrue('ongoing fake mock' in result.lower())
+ self.assertTrue('fake problem a' in result.lower())
+ self.assertTrue('brando the mando' in result.lower())
+
+ @patch('requests.get')
+ def testGetOneCompetitionBadName(self, mock_get):
+ """Attempt to get a competition that doesn't exist by name"""
+ mock_get.side_effect = [
+ MockResponse(json=COMPETITIONS_RESPONSE)]
+
+ self.assertRaises(
+ auacm.exceptions.CompetitionNotFoundError,
+ auacm.competition.get_comps, ['not real'])
@patch('requests.get')
def testGetOneCompetitionBad(self, mock_get):
@@ -39,7 +61,7 @@ class CompetitionTests(unittest.TestCase):
self.assertRaises(
auacm.exceptions.CompetitionNotFoundError,
- auacm.competition.get_comps, ['99999999'])
+ auacm.competition.get_comps, ['-i', '99999999'])
if __name__ == '__main__':
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requests",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/BrandonLMorris/auacm-cli.git@5c13a4843e281aa1470d2bd28fe39c07f4e39e92#egg=auacm
Brotli @ file:///croot/brotli-split_1736182456865/work
certifi @ file:///croot/certifi_1738623731865/work/certifi
charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work
exceptiongroup==1.2.2
idna @ file:///croot/idna_1714398848350/work
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work
pytest==8.3.5
requests @ file:///croot/requests_1730999120400/work
tomli==2.2.1
urllib3 @ file:///croot/urllib3_1737133630106/work
| name: auacm-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- brotli-python=1.0.9=py39h6a678d5_9
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2025.1.31=py39h06a4308_0
- charset-normalizer=3.3.2=pyhd3eb1b0_0
- idna=3.7=py39h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- pysocks=1.7.1=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- requests=2.32.3=py39h06a4308_1
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- urllib3=2.3.0=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/auacm-cli
| [
"tests/competition_tests.py::CompetitionTests::testGetOneCompetitionBadName",
"tests/competition_tests.py::CompetitionTests::testGetOneCompetitionByName"
]
| []
| [
"tests/competition_tests.py::CompetitionTests::testGetAllCompetitons",
"tests/competition_tests.py::CompetitionTests::testGetOneCompetitionBad",
"tests/competition_tests.py::CompetitionTests::testGetOneCompetitionById"
]
| []
| MIT License | 471 | [
"src/auacm/competition.py",
"src/auacm/main.py"
]
| [
"src/auacm/competition.py",
"src/auacm/main.py"
]
|
|
docker__docker-py-988 | fa7068cb7cf2ae1efcc2b3b99f24f4c7aa29e989 | 2016-03-11 20:04:00 | 4c34be5d4ab8a5a017950712e9c96b56d78d1c58 | diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index bc26ce82..d4393d58 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -460,16 +460,16 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None):
tls_verify = os.environ.get('DOCKER_TLS_VERIFY')
if tls_verify == '':
tls_verify = False
- enable_tls = True
else:
tls_verify = tls_verify is not None
- enable_tls = cert_path or tls_verify
+ enable_tls = cert_path or tls_verify
params = {}
if host:
- params['base_url'] = (host.replace('tcp://', 'https://')
- if enable_tls else host)
+ params['base_url'] = (
+ host.replace('tcp://', 'https://') if enable_tls else host
+ )
if not enable_tls:
return params
| Certificate error in docker ci for test-docker-py
https://jenkins.dockerproject.org/job/Docker-PRs/24848/console for detail.
in docker-py, when checkout to the commit of 387db11009f4b4f64a4f2c6fd64d3eeb01828585,the error appears,if I remove the commit ,we will not have the error.
```
==================================== ERRORS ====================================
_________________ ERROR at setup of InformationTest.test_info __________________
/docker-py/tests/integration/conftest.py:17: in setup_test_session
c = docker_client()
/docker-py/tests/helpers.py:61: in docker_client
return docker.Client(**docker_client_kwargs(**kwargs))
/docker-py/tests/helpers.py:65: in docker_client_kwargs
client_kwargs = docker.utils.kwargs_from_env(assert_hostname=False)
/docker-py/docker/utils/utils.py:486: in kwargs_from_env
assert_fingerprint=tls_verify)
/docker-py/docker/tls.py:47: in __init__
'Path to a certificate and key files must be provided'
E TLSParameterError: Path to a certificate and key files must be provided through the client_config
param. TLS configurations should map the Docker CLI client configurations.
See https://docs.docker.com/engine/articles/https/ for API details.
________________ ERROR at setup of InformationTest.test_search _________________
...
``` | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 87796d11..65b7cf8a 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -228,19 +228,7 @@ class KwargsFromEnvTest(base.BaseTestCase):
DOCKER_TLS_VERIFY='')
os.environ.pop('DOCKER_CERT_PATH', None)
kwargs = kwargs_from_env(assert_hostname=True)
- self.assertEqual('https://192.168.59.103:2376', kwargs['base_url'])
- self.assertTrue('ca.pem' in kwargs['tls'].ca_cert)
- self.assertTrue('cert.pem' in kwargs['tls'].cert[0])
- self.assertTrue('key.pem' in kwargs['tls'].cert[1])
- self.assertEqual(True, kwargs['tls'].assert_hostname)
- self.assertEqual(False, kwargs['tls'].verify)
- try:
- client = Client(**kwargs)
- self.assertEqual(kwargs['base_url'], client.base_url)
- self.assertEqual(kwargs['tls'].cert, client.cert)
- self.assertFalse(kwargs['tls'].verify)
- except TypeError as e:
- self.fail(e)
+ self.assertEqual('tcp://192.168.59.103:2376', kwargs['base_url'])
def test_kwargs_from_env_no_cert_path(self):
try:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_git_commit_hash"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
-e git+https://github.com/docker/docker-py.git@fa7068cb7cf2ae1efcc2b3b99f24f4c7aa29e989#egg=docker_py
exceptiongroup==1.2.2
importlib-metadata==6.7.0
iniconfig==2.0.0
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
requests==2.5.3
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
websocket-client==0.32.0
zipp==3.15.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- requests==2.5.3
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- websocket-client==0.32.0
- zipp==3.15.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert"
]
| [
"tests/unit/utils_test.py::SSLAdapterTest::test_only_uses_tls"
]
| [
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 472 | [
"docker/utils/utils.py"
]
| [
"docker/utils/utils.py"
]
|
|
cdent__gabbi-119 | 040821d39bd4fa067ea947bbf37687f76f4f325b | 2016-03-14 11:56:49 | 079743e562b576956841bf7c3ee49e35123e1d69 | diff --git a/gabbi/handlers.py b/gabbi/handlers.py
index 33a4175..6de7f5b 100644
--- a/gabbi/handlers.py
+++ b/gabbi/handlers.py
@@ -128,6 +128,8 @@ class HeadersResponseHandler(ResponseHandler):
If a header value is wrapped in ``/`` it is treated as a raw
regular expression.
+
+ Headers values are always treated as strings.
"""
test_key_suffix = 'headers'
@@ -137,10 +139,10 @@ class HeadersResponseHandler(ResponseHandler):
header = header.lower() # case-insensitive comparison
response = test.response
- header_value = test.replace_template(value)
+ header_value = test.replace_template(str(value))
try:
- response_value = response[header]
+ response_value = str(response[header])
except KeyError:
raise AssertionError(
"'%s' header not present in response: %s" % (
@@ -153,6 +155,6 @@ class HeadersResponseHandler(ResponseHandler):
'Expect header %s to match /%s/, got %s' %
(header, header_value, response_value))
else:
- test.assertEqual(header_value, response[header],
+ test.assertEqual(header_value, response_value,
'Expect header %s with value %s, got %s' %
(header, header_value, response[header]))
| If the test value for a response header is not treated by yaml as a string there is an error
```
File "/Users/cdent/src/enamel/.tox/functional/lib/python2.7/site-packages/gabbi/handlers.py", line 149, in action
if header_value.startswith('/') and header_value.endswith('/'):
AttributeError: 'float' object has no attribute 'startswith'
```
when:
```yaml
- name: default version
desc: no header sent
GET: /
request_headers:
content-type: application/json
response_headers:
vary: /openstack-enamel-api-version/
openstack-enamel-api-version: 0.1
``` | cdent/gabbi | diff --git a/gabbi/tests/test_handlers.py b/gabbi/tests/test_handlers.py
index a92d53e..d5688b8 100644
--- a/gabbi/tests/test_handlers.py
+++ b/gabbi/tests/test_handlers.py
@@ -173,6 +173,17 @@ class HandlersTest(unittest.TestCase):
self.assertIn("'location' header not present in response:",
str(failure.exception))
+ def test_resonse_headers_stringify(self):
+ handler = handlers.HeadersResponseHandler(self.test_class)
+ self.test.test_data = {'response_headers': {
+ 'x-alpha-beta': 2.0,
+ }}
+ self.test.response = {'x-alpha-beta': '2.0'}
+ self._assert_handler(handler)
+
+ self.test.response = {'x-alpha-beta': 2.0}
+ self._assert_handler(handler)
+
def _assert_handler(self, handler):
# Instantiate our contained test class by naming its test
# method and then run its tests to confirm.
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock",
"testrepository",
"coverage",
"hacking",
"sphinx",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
decorator==5.1.1
docutils==0.18.1
extras==1.0.0
fixtures==4.0.1
flake8==3.8.4
-e git+https://github.com/cdent/gabbi.git@040821d39bd4fa067ea947bbf37687f76f4f325b#egg=gabbi
hacking==4.1.0
httplib2==0.22.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
MarkupSafe==2.0.1
mccabe==0.6.1
mock==5.2.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testrepository==0.0.21
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- decorator==5.1.1
- docutils==0.18.1
- extras==1.0.0
- fixtures==4.0.1
- flake8==3.8.4
- hacking==4.1.0
- httplib2==0.22.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- markupsafe==2.0.1
- mccabe==0.6.1
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pycodestyle==2.6.0
- pyflakes==2.2.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testrepository==0.0.21
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_resonse_headers_stringify"
]
| []
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_header",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_regex",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_path",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_output",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_payload"
]
| []
| Apache License 2.0 | 474 | [
"gabbi/handlers.py"
]
| [
"gabbi/handlers.py"
]
|
|
keenlabs__KeenClient-Python-91 | add5f7f54acdc32577db690b682c4708a783c735 | 2016-03-14 23:31:52 | 142a9aad4e402985163dc472c6ae76b7e3e60bc4 | diff --git a/README.md b/README.md
index 47f5fcb..25f9da7 100644
--- a/README.md
+++ b/README.md
@@ -267,6 +267,9 @@ python setup.py tests
### Changelog
+##### 0.3.21
++ Fix bug with scoped key generation not working with newer Keen projects.
+
##### 0.3.20
+ Add `saved_queries` support
+ Add Python 3.4 support
diff --git a/keen/Padding.py b/keen/Padding.py
index 5615340..b213370 100644
--- a/keen/Padding.py
+++ b/keen/Padding.py
@@ -103,7 +103,10 @@ def appendCMSPadding(str, blocksize=AES_blocksize):
def removeCMSPadding(str, blocksize=AES_blocksize):
'''CMS padding: Remove padding with bytes containing the number of padding bytes '''
- pad_len = ord(str[-1]) # last byte contains number of padding bytes
+ try:
+ pad_len = ord(str[-1]) # last byte contains number of padding bytes
+ except TypeError:
+ pad_len = str[-1]
assert pad_len <= blocksize, 'padding error'
assert pad_len <= len(str), 'padding error'
@@ -152,7 +155,10 @@ def appendZeroLenPadding(str, blocksize=AES_blocksize):
def removeZeroLenPadding(str, blocksize=AES_blocksize):
'Remove Padding with zeroes + last byte equal to the number of padding bytes'
- pad_len = ord(str[-1]) # last byte contains number of padding bytes
+ try:
+ pad_len = ord(str[-1]) # last byte contains number of padding bytes
+ except TypeError:
+ pad_len = str[-1]
assert pad_len < blocksize, 'padding error'
assert pad_len < len(str), 'padding error'
diff --git a/keen/scoped_keys.py b/keen/scoped_keys.py
index 4697c3b..e064c9b 100644
--- a/keen/scoped_keys.py
+++ b/keen/scoped_keys.py
@@ -8,22 +8,22 @@ from keen import Padding
__author__ = 'dkador'
# the block size for the cipher object; must be 16, 24, or 32 for AES
-BLOCK_SIZE = 32
+OLD_BLOCK_SIZE = 32
-def _pad(s):
+def pad_aes256(s):
"""
Pads an input string to a given block size.
:param s: string
:returns: The padded string.
"""
- if len(s) % BLOCK_SIZE == 0:
+ if len(s) % AES.block_size == 0:
return s
- return Padding.appendPadding(s, blocksize=BLOCK_SIZE)
+ return Padding.appendPadding(s, blocksize=AES.block_size)
-def _unpad(s):
+def unpad_aes256(s):
"""
Removes padding from an input string based on a given block size.
:param s: string
@@ -33,14 +33,96 @@ def _unpad(s):
return s
try:
- return Padding.removePadding(s, blocksize=BLOCK_SIZE)
+ return Padding.removePadding(s, blocksize=AES.block_size)
except AssertionError:
# if there's an error while removing padding, just return s.
return s
-# encrypt with AES, encode with hex
-def _encode_aes(key, plaintext):
+def old_pad(s):
+ """
+ Pads an input string to a given block size.
+ :param s: string
+ :returns: The padded string.
+ """
+ if len(s) % OLD_BLOCK_SIZE == 0:
+ return s
+
+ return Padding.appendPadding(s, blocksize=OLD_BLOCK_SIZE)
+
+
+def old_unpad(s):
+ """
+ Removes padding from an input string based on a given block size.
+ :param s: string
+ :returns: The unpadded string.
+ """
+ if not s:
+ return s
+
+ try:
+ return Padding.removePadding(s, blocksize=OLD_BLOCK_SIZE)
+ except AssertionError:
+ # if there's an error while removing padding, just return s.
+ return s
+
+
+# encrypt with AES-256-CBC, encode with hex
+def encode_aes256(key, plaintext):
+ """
+ Utility method to encode some given plaintext with the given key. Important thing to note:
+
+ This is not a general purpose encryption method - it has specific semantics (see below for
+ details).
+
+ Takes the given hex string key and converts it to a 256 bit binary blob. Then pads the given
+ plaintext to AES block size which is always 16 bytes, regardless of AES key size. Then
+ encrypts using AES-256-CBC using a random IV. Then converts both the IV and the ciphertext
+ to hex. Finally returns the IV appended by the ciphertext.
+
+ :param key: string, 64 hex chars long
+ :param plaintext: string, any amount of data
+ """
+ if len(key) != 64:
+ raise TypeError("encode_aes256() expects a 256 bit key encoded as a 64 hex character string")
+
+ # generate AES.block_size cryptographically secure random bytes for our IV (initial value)
+ iv = os.urandom(AES.block_size)
+ # set up an AES cipher object
+ cipher = AES.new(binascii.unhexlify(key.encode('ascii')), mode=AES.MODE_CBC, IV=iv)
+ # encrypt the plaintext after padding it
+ ciphertext = cipher.encrypt(pad_aes256(plaintext))
+ # append the hexed IV and the hexed ciphertext
+ iv_plus_encrypted = binascii.hexlify(iv) + binascii.hexlify(ciphertext)
+ # return that
+ return iv_plus_encrypted
+
+
+def decode_aes256(key, iv_plus_encrypted):
+ """
+ Utility method to decode a payload consisting of the hexed IV + the hexed ciphertext using
+ the given key. See above for more details.
+
+ :param key: string, 64 hex characters long
+ :param iv_plus_encrypted: string, a hexed IV + hexed ciphertext
+ """
+ # grab first AES.block_size bytes (aka 2 * AES.block_size characters of hex) - that's the IV
+ iv_size = 2 * AES.block_size
+ hexed_iv = iv_plus_encrypted[:iv_size]
+ # grab everything else - that's the ciphertext (aka encrypted message)
+ hexed_ciphertext = iv_plus_encrypted[iv_size:]
+ # unhex the iv and ciphertext
+ iv = binascii.unhexlify(hexed_iv)
+ ciphertext = binascii.unhexlify(hexed_ciphertext)
+ # set up the correct AES cipher object
+ cipher = AES.new(binascii.unhexlify(key.encode('ascii')), mode=AES.MODE_CBC, IV=iv)
+ # decrypt!
+ plaintext = cipher.decrypt(ciphertext)
+ # return the unpadded version of this
+ return unpad_aes256(plaintext)
+
+
+def old_encode_aes(key, plaintext):
"""
Utility method to encode some given plaintext with the given key. Important thing to note:
@@ -57,16 +139,16 @@ def _encode_aes(key, plaintext):
# generate 16 cryptographically secure random bytes for our IV (initial value)
iv = os.urandom(16)
# set up an AES cipher object
- cipher = AES.new(_pad(key), mode=AES.MODE_CBC, IV=iv)
+ cipher = AES.new(old_pad(key), mode=AES.MODE_CBC, IV=iv)
# encrypte the plaintext after padding it
- ciphertext = cipher.encrypt(_pad(plaintext))
+ ciphertext = cipher.encrypt(old_pad(plaintext))
# append the hexed IV and the hexed ciphertext
iv_plus_encrypted = binascii.hexlify(iv) + binascii.hexlify(ciphertext)
# return that
return iv_plus_encrypted
-def _decode_aes(key, iv_plus_encrypted):
+def old_decode_aes(key, iv_plus_encrypted):
"""
Utility method to decode a payload consisting of the hexed IV + the hexed ciphertext using
the given key. See above for more details.
@@ -82,18 +164,27 @@ def _decode_aes(key, iv_plus_encrypted):
iv = binascii.unhexlify(hexed_iv)
ciphertext = binascii.unhexlify(hexed_ciphertext)
# set up the correct AES cipher object
- cipher = AES.new(_pad(key), mode=AES.MODE_CBC, IV=iv)
+ cipher = AES.new(old_pad(key), mode=AES.MODE_CBC, IV=iv)
# decrypt!
plaintext = cipher.decrypt(ciphertext)
# return the unpadded version of this
- return _unpad(plaintext)
+ return old_unpad(plaintext)
def encrypt(api_key, options):
options_string = json.dumps(options)
- return _encode_aes(api_key, options_string)
+ if len(api_key) == 64:
+ return encode_aes256(api_key, options_string)
+ else:
+ return old_encode_aes(api_key, options_string)
def decrypt(api_key, scoped_key):
- json_string = _decode_aes(api_key, scoped_key)
- return json.loads(json_string)
\ No newline at end of file
+ if len(api_key) == 64:
+ json_string = decode_aes256(api_key, scoped_key)
+ else:
+ json_string = old_decode_aes(api_key, scoped_key)
+ try:
+ return json.loads(json_string)
+ except TypeError:
+ return json.loads(json_string.decode())
diff --git a/setup.py b/setup.py
index 1aa6a03..79eeb52 100644
--- a/setup.py
+++ b/setup.py
@@ -28,7 +28,7 @@ if sys.version_info < (2, 7):
setup(
name="keen",
- version="0.3.20",
+ version="0.3.21",
description="Python Client for Keen IO",
author="Keen IO",
author_email="[email protected]",
| New projects have 64-byte master keys, pycrypto supports only 16/24/32-byte AES
Generating scoped keys is breaking on newer projects, as the master keys are now 64-byte. They seemed to have changed from 32-byte keys in the past few months. I created a project a month or two ago that has a 32-byte key, and year+ old projects have 32-byte keys. Only the newer one from a week or two ago has 64-byte.
``` python
scoped_key = scoped_keys.encrypt(master_key, {
"allowed_operations": ["read"],
"filters": filters
})
```
Exception being thrown:
`ValueError: AES key must be either 16, 24, or 32 bytes long` from pycrypto.
My `keen` module is the lastest `0.3.20`, and `pycrypto` is the latest `2.6.1`. | keenlabs/KeenClient-Python | diff --git a/keen/tests/scoped_key_tests.py b/keen/tests/scoped_key_tests.py
new file mode 100644
index 0000000..ff65c31
--- /dev/null
+++ b/keen/tests/scoped_key_tests.py
@@ -0,0 +1,42 @@
+from keen import scoped_keys
+from keen.tests.base_test_case import BaseTestCase
+
+
+class ScopedKeyTests(BaseTestCase):
+ api_key = "24077ACBCB198BAAA2110EDDB673282F8E34909FD823A15C55A6253A664BE368"
+ bad_api_key = "24077ACBCB198BAAA2110EDDB673282F8E34909FD823A15C55A6253A664BE369"
+ old_api_key = "ab428324dbdbcfe744"
+ old_bad_api_key = "badbadbadbad"
+ options = {
+ "filters": [{
+ "property_name": "accountId",
+ "operator": "eq",
+ "property_value": "123456"
+ }]
+ }
+
+ def test_scoped_key_encrypts_and_decrypts(self):
+ encrypted = scoped_keys.encrypt(self.api_key, self.options)
+ decrypted = scoped_keys.decrypt(self.api_key, encrypted)
+ self.assert_equal(decrypted, self.options)
+
+ def test_scoped_key_fails_decryption_bad_key(self):
+ encrypted = scoped_keys.encrypt(self.api_key, self.options)
+ try:
+ scoped_keys.decrypt(self.bad_api_key, encrypted)
+ self.fail("shouldn't get here")
+ except ValueError as e:
+ self.assert_not_equal(e, None)
+
+ def test_old_scoped_key_encrypts_and_decrypts(self):
+ encrypted = scoped_keys.encrypt(self.old_api_key, self.options)
+ decrypted = scoped_keys.decrypt(self.old_api_key, encrypted)
+ self.assert_equal(decrypted, self.options)
+
+ def test_old_scoped_key_fails_decryption_on_bad_key(self):
+ encrypted = scoped_keys.encrypt(self.old_api_key, self.options)
+ try:
+ scoped_keys.decrypt(self.old_bad_api_key, encrypted)
+ self.fail("shouldn't get here")
+ except ValueError as e:
+ self.assert_not_equal(e, None)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 4
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"responses",
"unittest2",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
-e git+https://github.com/keenlabs/KeenClient-Python.git@add5f7f54acdc32577db690b682c4708a783c735#egg=keen
linecache2==1.0.0
mock==5.2.0
nose==1.3.7
packaging==24.2
Padding==0.5
pluggy==1.5.0
pycrypto==2.6.1
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
responses==0.25.7
six==1.17.0
tomli==2.2.1
traceback2==1.4.0
unittest2==1.1.0
urllib3==2.3.0
| name: KeenClient-Python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- linecache2==1.0.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- padding==0.5
- pluggy==1.5.0
- pycrypto==2.6.1
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- responses==0.25.7
- six==1.17.0
- tomli==2.2.1
- traceback2==1.4.0
- unittest2==1.1.0
- urllib3==2.3.0
prefix: /opt/conda/envs/KeenClient-Python
| [
"keen/tests/scoped_key_tests.py::ScopedKeyTests::test_old_scoped_key_encrypts_and_decrypts",
"keen/tests/scoped_key_tests.py::ScopedKeyTests::test_old_scoped_key_fails_decryption_on_bad_key",
"keen/tests/scoped_key_tests.py::ScopedKeyTests::test_scoped_key_encrypts_and_decrypts",
"keen/tests/scoped_key_tests.py::ScopedKeyTests::test_scoped_key_fails_decryption_bad_key"
]
| []
| []
| []
| MIT License | 476 | [
"setup.py",
"keen/scoped_keys.py",
"README.md",
"keen/Padding.py"
]
| [
"setup.py",
"keen/scoped_keys.py",
"README.md",
"keen/Padding.py"
]
|
|
dask__dask-1051 | cf5a6599fbd6269b67792842fc38e0b4dbbb778f | 2016-03-15 02:57:04 | 7f398f2a00e23b105790b6aca4edc6816379089b | diff --git a/dask/array/core.py b/dask/array/core.py
index d621540a0..d277652e7 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -1045,6 +1045,8 @@ class Array(Base):
def astype(self, dtype, **kwargs):
""" Copy of the array, cast to a specified type """
+ if dtype == self._dtype:
+ return self
name = tokenize('astype', self, dtype, kwargs)
return elemwise(lambda x: x.astype(dtype, **kwargs), self,
dtype=dtype, name=name)
@@ -1798,6 +1800,13 @@ def concatenate(seq, axis=0):
+ seq[0].chunks[axis + 1:])
cum_dims = [0] + list(accumulate(add, [len(a.chunks[axis]) for a in seq]))
+
+ if all(a._dtype is not None for a in seq):
+ dt = reduce(np.promote_types, [a._dtype for a in seq])
+ seq = [x.astype(dt) for x in seq]
+ else:
+ dt = None
+
names = [a.name for a in seq]
name = 'concatenate-' + tokenize(names, axis)
@@ -1812,10 +1821,7 @@ def concatenate(seq, axis=0):
dsk = dict(zip(keys, values))
dsk2 = merge(dsk, *[a.dask for a in seq])
- if all(a._dtype is not None for a in seq):
- dt = reduce(np.promote_types, [a._dtype for a in seq])
- else:
- dt = None
+
return Array(dsk2, name, chunks, dtype=dt)
diff --git a/dask/dataframe/io.py b/dask/dataframe/io.py
index f90419c38..3d2d37c4c 100644
--- a/dask/dataframe/io.py
+++ b/dask/dataframe/io.py
@@ -565,11 +565,11 @@ def from_dask_array(x, columns=None):
>>> x = da.ones((4, 2), chunks=(2, 2))
>>> df = dd.io.from_dask_array(x, columns=['a', 'b'])
>>> df.compute()
- a b
- 0 1 1
- 1 1 1
- 2 1 1
- 3 1 1
+ a b
+ 0 1.0 1.0
+ 1 1.0 1.0
+ 2 1.0 1.0
+ 3 1.0 1.0
"""
dummy = _dummy_from_array(x, columns)
| Bug (?) concatenating arrays of strings
Hi,
I've run across what I think is a small bug with concatenating dask arrays of strings in which the dtypes of the arrays to be concatenated are different:
```
In [106]: a = np.array(['CA-0', 'CA-1'])
In [107]: b = np.array(['TX-0', 'TX-10', 'TX-101', 'TX-102'])
In [108]: a = da.from_array(a, chunks=2)
In [109]: b = da.from_array(b, chunks=4)
In [110]: da.concatenate([a, b]).compute()
Out[110]:
array(['CA-0', 'CA-1', 'TX-0', 'TX-1', 'TX-1', 'TX-1'],
dtype='|S4')
In [111]: da.concatenate([b, a]).compute()
Out[111]:
array(['TX-0', 'TX-10', 'TX-101', 'TX-102', 'CA-0', 'CA-1'],
dtype='|S6')
```
If the array with the "smaller" dtype (in this case, S4) is the first array in the sequence to be concatenated, then this "smaller" dtype is used for the end result, truncating the entries in the array with the "larger" dtype (in this case, S6). If the order of the arrays is swapped so that the array with the "larger" dtype comes first, then the concatenation works properly.
It looks to me like the error occurs in the [dask.array.core.concatenate3](https://github.com/dask/dask/blob/master/dask/array/core.py#L2952) function where the dtype of the result is inferred from the first array in the sequence, rather than using the dtype computed in the [concatenate](https://github.com/dask/dask/blob/master/dask/array/core.py#L1748) function itself.
Todd | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index f0b15ea2b..447304c49 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -285,6 +285,17 @@ def test_concatenate():
assert raises(ValueError, lambda: concatenate([a, b, c], axis=2))
+def test_concatenate_fixlen_strings():
+ x = np.array(['a', 'b', 'c'])
+ y = np.array(['aa', 'bb', 'cc'])
+
+ a = da.from_array(x, chunks=(2,))
+ b = da.from_array(y, chunks=(2,))
+
+ assert_eq(np.concatenate([x, y]),
+ da.concatenate([a, b]))
+
+
def test_vstack():
x = np.arange(5)
y = np.ones(5)
@@ -967,6 +978,8 @@ def test_astype():
assert_eq(d.astype('i8'), x.astype('i8'))
assert same_keys(d.astype('i8'), d.astype('i8'))
+ assert d.astype(d.dtype) is d
+
def test_arithmetic():
x = np.arange(5).astype('f4') + 2
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@cf5a6599fbd6269b67792842fc38e0b4dbbb778f#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- distributed=2021.3.0=py36h06a4308_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- msgpack-python=1.0.2=py36hff7bd54_1
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_astype"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_unravel",
"dask/array/tests/test_array_core.py::test_reshape",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_to_hdf5",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_h5py_newaxis",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_h5py_tokenize",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_imperative",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors"
]
| []
| BSD 3-Clause "New" or "Revised" License | 477 | [
"dask/array/core.py",
"dask/dataframe/io.py"
]
| [
"dask/array/core.py",
"dask/dataframe/io.py"
]
|
|
rackerlabs__lambda-uploader-69 | d4b3a4a582baf41325e0551e987a9d6ab6a0b02d | 2016-03-15 18:40:55 | a03a2743a7b668930f12d63da81a944af7e6cff3 | diff --git a/lambda_uploader/package.py b/lambda_uploader/package.py
index 5e22d83..6a0fe87 100644
--- a/lambda_uploader/package.py
+++ b/lambda_uploader/package.py
@@ -228,12 +228,11 @@ class Package(object):
for p in self._extra_files:
LOG.info('Copying extra %s into package' % p)
+ ignore += ["%s" % p]
if os.path.isdir(p):
- utils.copy_tree(p, package)
- ignore += ["^%s/*" % p]
+ utils.copy_tree(p, package, include_parent=True)
else:
shutil.copy(p, package)
- ignore += ["%s" % p]
# Append the temp workspace to the ignore list:
ignore += ["^%s/*" % TEMP_WORKSPACE_NAME]
diff --git a/lambda_uploader/utils.py b/lambda_uploader/utils.py
index 2a3c841..e7664b6 100644
--- a/lambda_uploader/utils.py
+++ b/lambda_uploader/utils.py
@@ -21,8 +21,18 @@ import re
LOG = logging.getLogger(__name__)
-def copy_tree(src, dest, ignore=[]):
+def copy_tree(src, dest, ignore=[], include_parent=False):
+ if os.path.isfile(src):
+ raise Exception('Cannot use copy_tree with a file as the src')
+
LOG.info('Copying source files')
+ if include_parent:
+ # if src is foo, make dest/foo and copy files there
+ nested_dest = os.path.join(dest, os.path.basename(src))
+ os.makedirs(nested_dest)
+ else:
+ nested_dest = dest
+
# Re-create directory structure
for root, _, files in os.walk(src):
for filename in files:
@@ -33,7 +43,7 @@ def copy_tree(src, dest, ignore=[]):
sub_dirs = os.path.dirname(os.path.relpath(path,
start=src))
- pkg_path = os.path.join(dest, sub_dirs)
+ pkg_path = os.path.join(nested_dest, sub_dirs)
if not os.path.isdir(pkg_path):
os.makedirs(pkg_path)
| Easy sharing of code between lambda functions
Is there a way to share python code between lambdas, without having to create an additional package to install via requirements?
e.g., in the Serverless framework, there's the concept of a "lib" folder which can contain modules shared between multiple lambdas.
| rackerlabs/lambda-uploader | diff --git a/test/test_package.py b/test/test_package.py
index 5de87eb..104bed9 100644
--- a/test/test_package.py
+++ b/test/test_package.py
@@ -120,7 +120,7 @@ def test_package_with_extras():
assert path.isfile(expected_extra_file1)
# test a recursive directory
- expected_extra_file2 = path.join(PACKAGE_TEMP_DIR, 'foo/__init__.py')
+ expected_extra_file2 = path.join(PACKAGE_TEMP_DIR, 'extra/foo/__init__.py')
assert path.isfile(expected_extra_file2)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"cython",
"distro",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pytest-bdd",
"pytest-benchmark",
"pytest-randomly",
"responses",
"mock",
"hypothesis",
"freezegun",
"trustme",
"requests-mock",
"requests",
"tomlkit"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
boto3==1.2.2
botocore==1.3.30
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
coverage==7.8.0
cryptography==44.0.2
Cython==3.0.12
distlib==0.3.9
distro==1.9.0
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
freezegun==1.5.1
gherkin-official==29.0.0
hypothesis==6.130.5
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==0.10.0
-e git+https://github.com/rackerlabs/lambda-uploader.git@d4b3a4a582baf41325e0551e987a9d6ab6a0b02d#egg=lambda_uploader
Mako==1.3.9
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
parse==1.20.2
parse_type==0.6.4
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pycparser==2.22
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-bdd==8.1.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-randomly==3.16.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
responses==0.25.7
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
tomlkit==0.13.2
trustme==1.2.1
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: lambda-uploader
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- boto3==1.2.2
- botocore==1.3.30
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- coverage==7.8.0
- cryptography==44.0.2
- cython==3.0.12
- distlib==0.3.9
- distro==1.9.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- freezegun==1.5.1
- gherkin-official==29.0.0
- hypothesis==6.130.5
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==0.10.0
- mako==1.3.9
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- parse==1.20.2
- parse-type==0.6.4
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pycparser==2.22
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-bdd==8.1.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-randomly==3.16.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- responses==0.25.7
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- tomlkit==0.13.2
- trustme==1.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/lambda-uploader
| [
"test/test_package.py::test_package_with_extras"
]
| [
"test/test_package.py::test_install_requirements"
]
| [
"test/test_package.py::test_bad_existing_virtualenv",
"test/test_package.py::test_package",
"test/test_package.py::test_default_virtualenv",
"test/test_package.py::test_prepare_workspace",
"test/test_package.py::test_package_zip_location",
"test/test_package.py::test_omit_virtualenv",
"test/test_package.py::test_package_name",
"test/test_package.py::test_existing_virtualenv"
]
| [
"test/test_package.py::test_package_clean_workspace"
]
| Apache License 2.0 | 478 | [
"lambda_uploader/package.py",
"lambda_uploader/utils.py"
]
| [
"lambda_uploader/package.py",
"lambda_uploader/utils.py"
]
|
|
moogar0880__PyTrakt-54 | f574c1c1dfc6f65f21296184659aadc2879f2be6 | 2016-03-20 01:02:25 | f574c1c1dfc6f65f21296184659aadc2879f2be6 | diff --git a/.landscape.yaml b/.landscape.yaml
new file mode 100644
index 0000000..95d8d32
--- /dev/null
+++ b/.landscape.yaml
@@ -0,0 +1,2 @@
+doc-warnings: yes
+strictness: veryhigh
diff --git a/.travis.yml b/.travis.yml
index 0b1b7a0..18823c2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,5 +5,4 @@ python:
- 3.4
- 3.5
script:
- - make travis
-after_success: coveralls
+ - make ci
diff --git a/HISTORY.rst b/HISTORY.rst
index 9ff86a6..0295563 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -1,5 +1,10 @@
Release History
^^^^^^^^^^^^^^^
+2.4.4 (2016-03-19)
+++++++++++++++++++
+
+* Update `slugify` function to better match trakt slugs (#51)
+
2.4.3 (2016-03-12)
++++++++++++++++++
diff --git a/Makefile b/Makefile
index 5ab43b9..8379202 100644
--- a/Makefile
+++ b/Makefile
@@ -14,11 +14,6 @@ coverage:
ci: init style test
-coveralls:
- pip install coveralls
-
-travis: coveralls ci
-
publish:
python setup.py register
python setup.py sdist upload
diff --git a/README.rst b/README.rst
index 6b953ad..598a0ad 100644
--- a/README.rst
+++ b/README.rst
@@ -4,9 +4,9 @@ PyTrakt
:target: https://travis-ci.org/moogar0880/PyTrakt
:alt: Travis CI Status
-.. image:: https://coveralls.io/repos/moogar0880/PyTrakt/badge.svg
- :target: https://coveralls.io/r/moogar0880/PyTrakt
- :alt: Coverage
+.. image:: https://landscape.io/github/moogar0880/PyTrakt/master/landscape.svg?style=flat
+ :target: https://landscape.io/github/moogar0880/PyTrakt/master
+ :alt: Code Health
.. image:: https://pypip.in/d/trakt/badge.svg?style=flat
:target: https://pypi.python.org/pypi/trakt/
diff --git a/trakt/__init__.py b/trakt/__init__.py
index c53769b..4980572 100644
--- a/trakt/__init__.py
+++ b/trakt/__init__.py
@@ -5,6 +5,6 @@ try:
except ImportError:
pass
-version_info = (2, 4, 3)
+version_info = (2, 4, 4)
__author__ = 'Jon Nappi'
__version__ = '.'.join([str(i) for i in version_info])
diff --git a/trakt/utils.py b/trakt/utils.py
index d05b755..6b79fec 100644
--- a/trakt/utils.py
+++ b/trakt/utils.py
@@ -14,12 +14,11 @@ def slugify(value):
Adapted from django.utils.text.slugify
"""
- if sys.version_info[0] == 2:
- value = unicode(value) # NOQA
- value = unicodedata.normalize('NFKD',
- value).encode('ascii',
- 'ignore').decode('ascii')
- value = re.sub('[^\w\s-]', '', value).strip().lower()
+ if sys.version_info[0] == 2 and isinstance(value, str):
+ value = unicode(value, 'utf-8') # NOQA
+ nfkd_form = unicodedata.normalize('NFKD', value)
+ decoded = nfkd_form.encode('ascii', 'ignore').decode('utf-8')
+ value = re.sub('[^\w\s-]', ' ', decoded).strip().lower()
return re.sub('[-\s]+', '-', value)
| Slugify bug?
There appears to be some differences in how your slugify function works in utils.py compared to the trakt slugs. Are they supposed to match? Without them matching the _get functions in movies.py and tv.py class inits are prone to error as these rely on the slug to pull the information. Here's an example.
title: Marvel's Agents of S.H.I.E.L.D.
pytrakt slug: marvels-agents-of-shield
trakt slug: marvel-s-agents-of-s-h-i-e-l-d
Also, I noticed that in movie.py your joining the year (if passed) to the slug but you don't have the same logic in tv.py. Is there a reason to not include that? TV Shows with the same title also use the year in the slug to differentiate the shows. | moogar0880/PyTrakt | diff --git a/testing-requirements.txt b/testing-requirements.txt
index 35d8447..4c50c00 100644
--- a/testing-requirements.txt
+++ b/testing-requirements.txt
@@ -2,4 +2,3 @@
flake8
pytest
pytest-cov
-coveralls
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 6b75c21..78b1af4 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -11,6 +11,8 @@ def test_slugify():
('IM AN ALL CAPS STRING', 'im-an-all-caps-string'),
('IM A BAD A$$ STRING!@', 'im-a-bad-a-string'),
(' LOOK AT MY WHITESPACE ', 'look-at-my-whitespace'),
+ ("Marvel's Agents of S.H.I.E.L.D.", 'marvel-s-agents-of-s-h-i-e-l-d'),
+ ('Naruto Shippūden', 'naruto-shippuden'),
]
for inp, expected in test_data:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 6
} | 2.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"coveralls"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
oauthlib==3.2.2
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-oauthlib==2.0.0
tomli==2.2.1
-e git+https://github.com/moogar0880/PyTrakt.git@f574c1c1dfc6f65f21296184659aadc2879f2be6#egg=trakt
urllib3==2.3.0
| name: PyTrakt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- oauthlib==3.2.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-oauthlib==2.0.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/PyTrakt
| [
"tests/test_utils.py::test_slugify"
]
| []
| [
"tests/test_utils.py::test_airs_date",
"tests/test_utils.py::test_now",
"tests/test_utils.py::test_timestamp",
"tests/test_utils.py::test_extract_ids"
]
| []
| Apache License 2.0 | 479 | [
"README.rst",
"Makefile",
"trakt/utils.py",
".landscape.yaml",
"HISTORY.rst",
".travis.yml",
"trakt/__init__.py"
]
| [
"README.rst",
"Makefile",
"trakt/utils.py",
".landscape.yaml",
"HISTORY.rst",
".travis.yml",
"trakt/__init__.py"
]
|
|
box__box-python-sdk-125 | 162fc82c044a044284e01a403ecb8bf43da2d23d | 2016-03-21 21:26:37 | ded623f4b6de0530d8f983d3c3d2cafe646c126b | diff --git a/HISTORY.rst b/HISTORY.rst
index 5ca5148..ec26cb8 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -6,6 +6,10 @@ Release History
Upcoming
++++++++
+1.5.1 (2016-03-23)
+++++++++++++++++++
+
+- Added a ``revoke()`` method to the ``OAuth2`` class. Calling it will revoke the current access/refresh token pair.
1.5.0 (2016-03-17)
diff --git a/boxsdk/auth/developer_token_auth.py b/boxsdk/auth/developer_token_auth.py
index 33c7446..9fa9c4e 100644
--- a/boxsdk/auth/developer_token_auth.py
+++ b/boxsdk/auth/developer_token_auth.py
@@ -32,3 +32,9 @@ def _refresh(self, access_token):
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
+
+ def revoke(self):
+ """
+ Base class override.
+ Do nothing; developer tokens can't be revoked without client ID and secret.
+ """
diff --git a/boxsdk/auth/oauth2.py b/boxsdk/auth/oauth2.py
index 9daf9ad..a810e79 100644
--- a/boxsdk/auth/oauth2.py
+++ b/boxsdk/auth/oauth2.py
@@ -213,6 +213,8 @@ def _get_state_csrf_token():
return 'box_csrf_token_' + ''.join(ascii_alphabet[int(system_random.random() * ascii_len)] for _ in range(16))
def _store_tokens(self, access_token, refresh_token):
+ self._access_token = access_token
+ self._refresh_token = refresh_token
if self._store_tokens_callback is not None:
self._store_tokens_callback(access_token, refresh_token)
@@ -240,17 +242,41 @@ def send_token_request(self, data, access_token, expect_refresh_token=True):
url,
data=data,
headers=headers,
- access_token=access_token
+ access_token=access_token,
)
if not network_response.ok:
raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST')
try:
response = network_response.json()
- self._access_token = response['access_token']
- self._refresh_token = response.get('refresh_token', None)
- if self._refresh_token is None and expect_refresh_token:
+ access_token = response['access_token']
+ refresh_token = response.get('refresh_token', None)
+ if refresh_token is None and expect_refresh_token:
raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST')
except (ValueError, KeyError):
raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST')
- self._store_tokens(self._access_token, self._refresh_token)
+ self._store_tokens(access_token, refresh_token)
return self._access_token, self._refresh_token
+
+ def revoke(self):
+ """
+ Revoke the authorization for the current access/refresh token pair.
+ """
+ with self._refresh_lock:
+ access_token, refresh_token = self._get_tokens()
+ token_to_revoke = access_token or refresh_token
+ if token_to_revoke is None:
+ return
+ url = '{base_auth_url}/revoke'.format(base_auth_url=API.OAUTH2_API_URL)
+ network_response = self._network_layer.request(
+ 'POST',
+ url,
+ data={
+ 'client_id': self._client_id,
+ 'client_secret': self._client_secret,
+ 'token': token_to_revoke,
+ },
+ access_token=access_token,
+ )
+ if not network_response.ok:
+ raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST')
+ self._store_tokens(None, None)
diff --git a/boxsdk/version.py b/boxsdk/version.py
index cfa1704..c64a173 100644
--- a/boxsdk/version.py
+++ b/boxsdk/version.py
@@ -3,4 +3,4 @@
from __future__ import unicode_literals, absolute_import
-__version__ = '1.5.0'
+__version__ = '1.5.1'
| Add a way to revoke OAuth tokens
Using the OAuth2 class you can easily authenticate and refresh tokens but there is not an easy way to revoke the token.
Right now to revoke the token, we would have to do something like:
```python
data = {
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
'token': refresh_token
}
client.make_request(
'POST',
'{0}/revoke'.format(API.OAUTH2_API_URL),
data=data
)
```
It would be nice if I could do something like `oauth.revoke()` to revoke and clear the access token and the refresh token. | box/box-python-sdk | diff --git a/test/unit/auth/test_oauth2.py b/test/unit/auth/test_oauth2.py
index 4e5b28b..af4d6ed 100644
--- a/test/unit/auth/test_oauth2.py
+++ b/test/unit/auth/test_oauth2.py
@@ -275,3 +275,42 @@ def test_token_request_allows_missing_refresh_token(mock_network_layer):
network_layer=mock_network_layer,
)
oauth.send_token_request({}, access_token=None, expect_refresh_token=False)
+
+
[email protected](
+ 'access_token,refresh_token,expected_token_to_revoke',
+ (
+ ('fake_access_token', 'fake_refresh_token', 'fake_access_token'),
+ (None, 'fake_refresh_token', 'fake_refresh_token')
+ )
+)
+def test_revoke_sends_revoke_request(
+ client_id,
+ client_secret,
+ mock_network_layer,
+ access_token,
+ refresh_token,
+ expected_token_to_revoke,
+):
+ mock_network_response = Mock()
+ mock_network_response.ok = True
+ mock_network_layer.request.return_value = mock_network_response
+ oauth = OAuth2(
+ client_id=client_id,
+ client_secret=client_secret,
+ access_token=access_token,
+ refresh_token=refresh_token,
+ network_layer=mock_network_layer,
+ )
+ oauth.revoke()
+ mock_network_layer.request.assert_called_once_with(
+ 'POST',
+ '{0}/revoke'.format(API.OAUTH2_API_URL),
+ data={
+ 'client_id': client_id,
+ 'client_secret': client_secret,
+ 'token': expected_token_to_revoke,
+ },
+ access_token=access_token,
+ )
+ assert oauth.access_token is None
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 4
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"mock",
"sqlalchemy",
"bottle"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | async-timeout==4.0.3
bottle==0.13.2
-e git+https://github.com/box/box-python-sdk.git@162fc82c044a044284e01a403ecb8bf43da2d23d#egg=boxsdk
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
execnet==2.0.2
greenlet==3.1.1
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
mock==5.2.0
packaging==24.0
pluggy==1.2.0
pycparser==2.21
PyJWT==2.8.0
pytest==7.4.4
pytest-xdist==3.5.0
redis==5.0.8
requests==2.31.0
requests-toolbelt==1.0.0
six==1.17.0
SQLAlchemy==2.0.40
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: box-python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- async-timeout==4.0.3
- bottle==0.13.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- execnet==2.0.2
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- mock==5.2.0
- packaging==24.0
- pluggy==1.2.0
- pycparser==2.21
- pyjwt==2.8.0
- pytest==7.4.4
- pytest-xdist==3.5.0
- redis==5.0.8
- requests==2.31.0
- requests-toolbelt==1.0.0
- six==1.17.0
- sqlalchemy==2.0.40
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/box-python-sdk
| [
"test/unit/auth/test_oauth2.py::test_revoke_sends_revoke_request[fake_access_token-fake_refresh_token-fake_access_token]",
"test/unit/auth/test_oauth2.py::test_revoke_sends_revoke_request[None-fake_refresh_token-fake_refresh_token]"
]
| []
| [
"test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[https://url.com/foo?bar=baz]",
"test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[https://\\u0215\\u0155\\u013e.com/\\u0192\\u0151\\u0151?\\u0184\\u0201\\u0155=\\u0184\\u0201\\u017c]",
"test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[None]",
"test/unit/auth/test_oauth2.py::test_authenticate_send_post_request_with_correct_params",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[0]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[1]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[2]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[3]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[4]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[5]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[6]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[7]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[8]",
"test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[9]",
"test/unit/auth/test_oauth2.py::test_authenticate_stores_tokens_correctly",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-0]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-1]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-2]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-3]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-4]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-5]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-6]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-7]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-8]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-9]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-0]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-1]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-2]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-3]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-4]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-5]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-6]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-7]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-8]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-9]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-0]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-1]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-2]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-3]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-4]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-5]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-6]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-7]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-8]",
"test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-9]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_getting_bad_network_response[test_method0]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_getting_bad_network_response[test_method1]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_no_json_object_can_be_decoded[test_method0]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_no_json_object_can_be_decoded[test_method1]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens0-test_method0]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens0-test_method1]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens1-test_method0]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens1-test_method1]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens2-test_method0]",
"test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens2-test_method1]",
"test/unit/auth/test_oauth2.py::test_token_request_allows_missing_refresh_token"
]
| []
| Apache License 2.0 | 481 | [
"HISTORY.rst",
"boxsdk/auth/oauth2.py",
"boxsdk/version.py",
"boxsdk/auth/developer_token_auth.py"
]
| [
"HISTORY.rst",
"boxsdk/auth/oauth2.py",
"boxsdk/version.py",
"boxsdk/auth/developer_token_auth.py"
]
|
|
pystorm__pystorm-21 | ee14ec810de20d62929a418fff88285833b6afe5 | 2016-03-22 02:07:10 | 7f0d6b320e9943082bcdfd6de93d161a3b174e12 | diff --git a/pystorm/serializers/serializer.py b/pystorm/serializers/serializer.py
index 03a15de..58f6a0e 100644
--- a/pystorm/serializers/serializer.py
+++ b/pystorm/serializers/serializer.py
@@ -4,6 +4,8 @@ each serializer a Java counterpart needs to exist.
from __future__ import absolute_import, print_function, unicode_literals
+from ..exceptions import StormWentAwayError
+
class Serializer(object):
@@ -21,9 +23,12 @@ class Serializer(object):
def send_message(self, msg_dict):
"""Serialize a message dictionary and write it to the output stream."""
with self._writer_lock:
- self.output_stream.flush()
- self.output_stream.write(self.serialize_dict(msg_dict))
- self.output_stream.flush()
+ try:
+ self.output_stream.flush()
+ self.output_stream.write(self.serialize_dict(msg_dict))
+ self.output_stream.flush()
+ except IOError:
+ raise StormWentAwayError()
def serialize_dict(self, msg_dict):
"""Convert a message dictionary to bytes. Used by send_message"""
| IOError is raised instead of StormWentAwayError if pipe breaks during send_message
```
IOError: [Errno 32] Broken pipe
(2 additional frame(s) were not displayed)
...
File "pystorm/component.py", line 471, in run
self._handle_run_exception(e)
File "pystorm/component.py", line 483, in _handle_run_exception
self.raise_exception(exc)
File "pystorm/component.py", line 321, in raise_exception
self.send_message({'command': 'error', 'msg': str(message)})
File "pystorm/component.py", line 305, in send_message
self.serializer.send_message(message)
File "pystorm/serializers/serializer.py", line 26, in send_message
self.output_stream.flush()
```
This should be a `StormWentAwayError`. Will fix ASAP. | pystorm/pystorm | diff --git a/test/pystorm/serializers/test_json_serializer.py b/test/pystorm/serializers/test_json_serializer.py
index a18a9e8..f460f6e 100644
--- a/test/pystorm/serializers/test_json_serializer.py
+++ b/test/pystorm/serializers/test_json_serializer.py
@@ -7,7 +7,9 @@ except ImportError:
import mock
import simplejson as json
+import pytest
+from pystorm.exceptions import StormWentAwayError
from pystorm.serializers.json_serializer import JSONSerializer
from .serializer import SerializerTestCase
@@ -33,3 +35,12 @@ class TestJSONSerializer(SerializerTestCase):
self.instance.output_stream = StringIO()
self.instance.send_message(msg_dict)
assert self.instance.output_stream.getvalue() == expected_output
+
+ def test_send_message_raises_stormwentaway(self):
+ string_io_mock = mock.MagicMock(autospec=True)
+ def raiser(): # lambdas can't raise
+ raise IOError()
+ string_io_mock.flush.side_effect = raiser
+ self.instance.output_stream = string_io_mock
+ with pytest.raises(StormWentAwayError):
+ self.instance.send_message({'hello': "world",})
diff --git a/test/pystorm/serializers/test_msgpack_serializer.py b/test/pystorm/serializers/test_msgpack_serializer.py
index e36a1ec..d51c19f 100644
--- a/test/pystorm/serializers/test_msgpack_serializer.py
+++ b/test/pystorm/serializers/test_msgpack_serializer.py
@@ -7,7 +7,9 @@ except ImportError:
import mock
import msgpack
+import pytest
+from pystorm.exceptions import StormWentAwayError
from pystorm.serializers.msgpack_serializer import MsgpackSerializer
from .serializer import SerializerTestCase
@@ -32,3 +34,12 @@ class TestMsgpackSerializer(SerializerTestCase):
expected_output = msgpack.packb(msg_dict)
self.instance.send_message(msg_dict)
assert self.instance.output_stream.getvalue() == expected_output
+
+ def test_send_message_raises_stormwentaway(self):
+ bytes_io_mock = mock.MagicMock(autospec=True)
+ def raiser(): # lambdas can't raise
+ raise IOError()
+ bytes_io_mock.flush.side_effect = raiser
+ self.instance.output_stream = bytes_io_mock
+ with pytest.raises(StormWentAwayError):
+ self.instance.send_message({'hello': "world",})
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
msgpack-python==0.5.6
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
-e git+https://github.com/pystorm/pystorm.git@ee14ec810de20d62929a418fff88285833b6afe5#egg=pystorm
pytest==7.0.1
pytest-timeout==2.1.0
simplejson==3.20.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: pystorm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-timeout==2.1.0
- simplejson==3.20.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/pystorm
| [
"test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_send_message_raises_stormwentaway",
"test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_send_message_raises_stormwentaway"
]
| []
| [
"test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_read_message_dict",
"test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_read_message_list",
"test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_send_message",
"test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_read_message_dict",
"test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_read_message_list",
"test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_send_message"
]
| []
| Apache License 2.0 | 482 | [
"pystorm/serializers/serializer.py"
]
| [
"pystorm/serializers/serializer.py"
]
|
|
dpkp__kafka-python-611 | d81963a919fa8161c94b5bef5e6de0697b91c4a6 | 2016-03-23 17:29:58 | 810f08b7996a15e65cdd8af6c1a7167c28f94646 | coveralls:
[](https://coveralls.io/builds/5522917)
Changes Unknown when pulling **bb2548705a3be822be9e17ea6eb824061fc9fb8f on sock_send_bytes** into ** on master**.
coveralls:
[](https://coveralls.io/builds/5664292)
Changes Unknown when pulling **7af174fe0a6bcc5962a8c8008d66e0b3b05e5fc2 on sock_send_bytes** into ** on master**.
| diff --git a/kafka/conn.py b/kafka/conn.py
index 2b82b6d..ffc839e 100644
--- a/kafka/conn.py
+++ b/kafka/conn.py
@@ -188,10 +188,12 @@ class BrokerConnection(object):
# and send bytes asynchronously. For now, just block
# sending each request payload
self._sock.setblocking(True)
- sent_bytes = self._sock.send(size)
- assert sent_bytes == len(size)
- sent_bytes = self._sock.send(message)
- assert sent_bytes == len(message)
+ for data in (size, message):
+ total_sent = 0
+ while total_sent < len(data):
+ sent_bytes = self._sock.send(data[total_sent:])
+ total_sent += sent_bytes
+ assert total_sent == len(data)
self._sock.setblocking(False)
except (AssertionError, ConnectionError) as e:
log.exception("Error sending %s to %s", request, self)
diff --git a/kafka/future.py b/kafka/future.py
index 06b8c3a..c7e0b14 100644
--- a/kafka/future.py
+++ b/kafka/future.py
@@ -15,10 +15,10 @@ class Future(object):
self._errbacks = []
def succeeded(self):
- return self.is_done and not self.exception
+ return self.is_done and not bool(self.exception)
def failed(self):
- return self.is_done and self.exception
+ return self.is_done and bool(self.exception)
def retriable(self):
try:
| kafka.common.ConnectionError on big messages + gevent
i'm getting kafka.common.ConnectionError trying to send big message. Code below
```python
from gevent.monkey import patch_all; patch_all()
from kafka import KafkaProducer
producer = KafkaProducer(bootstrap_servers=xxxxxxxx,
buffer_memory=10 * 1024 * 1024,
max_request_size=10 * 1024 * 1024,)
producer.send('test', 'a' * 1024 * 1024 * 3, ).get(timeout=60)
producer.flush()
```
causing this
```
2016-03-23 11:50:58,147 - kafka.conn - ERROR - Error sending ProduceRequest(required_acks=1, timeout=30000, topics=[(topic='test2', partitions=[(1, <_io.BytesIO object at 0x7fe64edc2950>)])]) to <BrokerConnection host=127.0.0.1 port=9093>
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/kafka/conn.py", line 187, in send
assert sent_bytes == len(message)
AssertionError
2016-03-23 11:50:58,150 - kafka.producer.sender - DEBUG - Error sending produce request to node 2:
2016-03-23 11:50:58,150 - kafka.producer.record_accumulator - DEBUG - Produced messages to topic-partition TopicPartition(topic='test2', partition=1) with base offset -1 and error .
2016-03-23 11:50:58,150 - kafka.client - DEBUG - Initializing connection to node 2 for metadata request
Traceback (most recent call last):
File "test_producer.py", line 15, in <module>
producer.send('test2', 'a' * 1024 * 1024 * 3, ).get(timeout=60)
File "/usr/local/lib/python2.7/dist-packages/kafka/producer/future.py", line 50, in get
raise self.exception # pylint: disable-msg=raising-bad-type
kafka.common.ConnectionError
```
this works well if i comment out **patch_all()** line
Fixing kafka.conn.BrokerConnection.send() method solved it, but i'm not sure about side effects.
```python
def send()
.....
.....
size = Int32.encode(len(message))
try:
self._sock.setblocking(True)
sent_bytes = self._sock.send(size)
assert sent_bytes == len(size)
total_sent = 0
while total_sent < len(message): # sending in loop
sent_bytes = self._sock.send(message[total_sent:])
assert sent_bytes
total_sent += sent_bytes
self._sock.setblocking(False)
except (AssertionError, ConnectionError) as e:
log.exception("Error sending %s to %s", request, self)
....
```
Any chances to have similar fix in master branch? | dpkp/kafka-python | diff --git a/test/test_conn.py b/test/test_conn.py
index d394f74..5432ebd 100644
--- a/test/test_conn.py
+++ b/test/test_conn.py
@@ -2,12 +2,15 @@
from __future__ import absolute_import
from errno import EALREADY, EINPROGRESS, EISCONN, ECONNRESET
-import socket
import time
import pytest
from kafka.conn import BrokerConnection, ConnectionStates
+from kafka.protocol.api import RequestHeader
+from kafka.protocol.metadata import MetadataRequest
+
+import kafka.common as Errors
@pytest.fixture
@@ -20,6 +23,7 @@ def socket(mocker):
@pytest.fixture
def conn(socket):
+ from socket import AF_INET
conn = BrokerConnection('localhost', 9092, socket.AF_INET)
return conn
@@ -61,22 +65,111 @@ def test_connect_timeout(socket, conn):
def test_blacked_out(conn):
- assert not conn.blacked_out()
+ assert conn.blacked_out() is False
conn.last_attempt = time.time()
- assert conn.blacked_out()
+ assert conn.blacked_out() is True
def test_connected(conn):
- assert not conn.connected()
+ assert conn.connected() is False
conn.state = ConnectionStates.CONNECTED
- assert conn.connected()
+ assert conn.connected() is True
def test_connecting(conn):
- assert not conn.connecting()
+ assert conn.connecting() is False
+ conn.state = ConnectionStates.CONNECTING
+ assert conn.connecting() is True
+ conn.state = ConnectionStates.CONNECTED
+ assert conn.connecting() is False
+
+
+def test_send_disconnected(conn):
+ conn.state = ConnectionStates.DISCONNECTED
+ f = conn.send('foobar')
+ assert f.failed() is True
+ assert isinstance(f.exception, Errors.ConnectionError)
+
+
+def test_send_connecting(conn):
conn.state = ConnectionStates.CONNECTING
- assert conn.connecting()
+ f = conn.send('foobar')
+ assert f.failed() is True
+ assert isinstance(f.exception, Errors.NodeNotReadyError)
+
+
+def test_send_max_ifr(conn):
conn.state = ConnectionStates.CONNECTED
- assert not conn.connecting()
+ max_ifrs = conn.config['max_in_flight_requests_per_connection']
+ for _ in range(max_ifrs):
+ conn.in_flight_requests.append('foo')
+ f = conn.send('foobar')
+ assert f.failed() is True
+ assert isinstance(f.exception, Errors.TooManyInFlightRequests)
+
+
+def test_send_no_response(socket, conn):
+ conn.connect()
+ assert conn.state is ConnectionStates.CONNECTED
+ req = MetadataRequest([])
+ header = RequestHeader(req, client_id=conn.config['client_id'])
+ payload_bytes = len(header.encode()) + len(req.encode())
+ third = payload_bytes // 3
+ remainder = payload_bytes % 3
+ socket.send.side_effect = [4, third, third, third, remainder]
+
+ assert len(conn.in_flight_requests) == 0
+ f = conn.send(req, expect_response=False)
+ assert f.succeeded() is True
+ assert f.value is None
+ assert len(conn.in_flight_requests) == 0
+
+
+def test_send_response(socket, conn):
+ conn.connect()
+ assert conn.state is ConnectionStates.CONNECTED
+ req = MetadataRequest([])
+ header = RequestHeader(req, client_id=conn.config['client_id'])
+ payload_bytes = len(header.encode()) + len(req.encode())
+ third = payload_bytes // 3
+ remainder = payload_bytes % 3
+ socket.send.side_effect = [4, third, third, third, remainder]
+
+ assert len(conn.in_flight_requests) == 0
+ f = conn.send(req)
+ assert f.is_done is False
+ assert len(conn.in_flight_requests) == 1
+
+
+def test_send_error(socket, conn):
+ conn.connect()
+ assert conn.state is ConnectionStates.CONNECTED
+ req = MetadataRequest([])
+ header = RequestHeader(req, client_id=conn.config['client_id'])
+ try:
+ error = ConnectionError
+ except NameError:
+ from socket import error
+ socket.send.side_effect = error
+ f = conn.send(req)
+ assert f.failed() is True
+ assert isinstance(f.exception, Errors.ConnectionError)
+ assert socket.close.call_count == 1
+ assert conn.state is ConnectionStates.DISCONNECTED
+
+
+def test_can_send_more(conn):
+ assert conn.can_send_more() is True
+ max_ifrs = conn.config['max_in_flight_requests_per_connection']
+ for _ in range(max_ifrs):
+ assert conn.can_send_more() is True
+ conn.in_flight_requests.append('foo')
+ assert conn.can_send_more() is False
+
+
+def test_recv(socket, conn):
+ pass # TODO
+
-# TODO: test_send, test_recv, test_can_send_more, test_close
+def test_close(conn):
+ pass # TODO
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-pylint",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
dill==0.3.4
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==5.10.1
-e git+https://github.com/dpkp/kafka-python.git@d81963a919fa8161c94b5bef5e6de0697b91c4a6#egg=kafka_python
lazy-object-proxy==1.7.1
lz4tools==1.3.1.2
mccabe==0.7.0
mock==5.2.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-pylint==0.18.0
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- coverage==6.2
- cramjam==2.5.0
- dill==0.3.4
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- lz4tools==1.3.1.2
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-pylint==0.18.0
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_conn.py::test_send_disconnected",
"test/test_conn.py::test_send_connecting",
"test/test_conn.py::test_send_max_ifr",
"test/test_conn.py::test_send_no_response",
"test/test_conn.py::test_send_response",
"test/test_conn.py::test_send_error"
]
| []
| [
"test/test_conn.py::test_connect[states0]",
"test/test_conn.py::test_connect[states1]",
"test/test_conn.py::test_connect[states2]",
"test/test_conn.py::test_connect[states3]",
"test/test_conn.py::test_connect[states4]",
"test/test_conn.py::test_connect_timeout",
"test/test_conn.py::test_blacked_out",
"test/test_conn.py::test_connected",
"test/test_conn.py::test_connecting",
"test/test_conn.py::test_can_send_more",
"test/test_conn.py::test_recv",
"test/test_conn.py::test_close"
]
| []
| Apache License 2.0 | 485 | [
"kafka/future.py",
"kafka/conn.py"
]
| [
"kafka/future.py",
"kafka/conn.py"
]
|
nickstenning__honcho-171 | 824775779ddf30606e7514b4639e81a2d6f25393 | 2016-03-28 14:30:24 | 824775779ddf30606e7514b4639e81a2d6f25393 | nickstenning: @migurski Could you check this out and let me know if it correctly addresses the issue for you?
migurski: Thanks Nick! I checked again with Python 2.7.6 on Ubuntu 14.04. At 84a1f7d it generated a correct script, at d8be8f4 and it generated a bad one, and with your recent efd6292 and it generated this improved working script:
```
start on starting honcho-stuff
stop on stopping honcho-stuff
respawn
env VAR='foo bar'
env PORT=5000
exec su - migurski -m -s /bin/sh -c 'cd /home/migurski/honcho; exec python /home/migurski/honcho/stuff.py >> /var/log/honcho/stuff-1.log 2>&1'
```
migurski: I’ve updated https://github.com/nickstenning/honcho/pull/154, though with the file `honcho/test/integration/test_export.py` gone I’m not sure it will have an effect. | diff --git a/honcho/export/templates/upstart/process.conf b/honcho/export/templates/upstart/process.conf
index 8a05378..fe6b451 100644
--- a/honcho/export/templates/upstart/process.conf
+++ b/honcho/export/templates/upstart/process.conf
@@ -2,8 +2,7 @@ start on starting {{ group_name }}
stop on stopping {{ group_name }}
respawn
-exec su - {{ user }} -s {{ shell }} -c 'cd {{ app_root }};
-{%- for k, v in process.env.items() -%}
- export {{ k }}={{ v | shellquote }};
-{%- endfor -%}
-exec {{ process.cmd }} >> {{ log }}/{{ process.name|dashrepl }}.log 2>&1'
+{% for k, v in process.env.items() -%}
+env {{ k }}={{ v | shellquote }}
+{% endfor %}
+exec su - {{ user }} -m -s {{ shell }} -c 'cd {{ app_root }}; exec {{ process.cmd }} >> {{ log }}/{{ process.name|dashrepl }}.log 2>&1'
diff --git a/honcho/manager.py b/honcho/manager.py
index 218f2b4..ff31a8e 100644
--- a/honcho/manager.py
+++ b/honcho/manager.py
@@ -53,7 +53,7 @@ class Manager(object):
self._terminating = False
- def add_process(self, name, cmd, quiet=False, env=None):
+ def add_process(self, name, cmd, quiet=False, env=None, cwd=None):
"""
Add a process to this manager instance. The process will not be started
until #loop() is called.
@@ -63,7 +63,8 @@ class Manager(object):
name=name,
quiet=quiet,
colour=next(self._colours),
- env=env)
+ env=env,
+ cwd=cwd)
self._processes[name] = {}
self._processes[name]['obj'] = proc
diff --git a/honcho/process.py b/honcho/process.py
index 669c8ef..c211af1 100644
--- a/honcho/process.py
+++ b/honcho/process.py
@@ -21,12 +21,14 @@ class Process(object):
name=None,
colour=None,
quiet=False,
- env=None):
+ env=None,
+ cwd=None):
self.cmd = cmd
self.colour = colour
self.quiet = quiet
self.name = name
self.env = os.environ.copy() if env is None else env
+ self.cwd = cwd
# This is a honcho.environ.Env object, to allow for stubbing of
# external calls, not the operating system environment.
@@ -36,7 +38,7 @@ class Process(object):
def run(self, events=None, ignore_signals=False):
self._events = events
- self._child = self._child_ctor(self.cmd, env=self.env)
+ self._child = self._child_ctor(self.cmd, env=self.env, cwd=self.cwd)
self._send_message({'pid': self._child.pid}, type='start')
# Don't pay attention to SIGINT/SIGTERM. The process itself is
| Exported upstart configuration silently fails with quoted variables
At or near d8be8f4a8 (version 0.5.0), quoted variables in exported upstart scripts became invalid and fail silently. Previously, quoted `.env` variables with spaces generated upstart configurations with correctly-nested double and single quotes. This is an example generated by 84a1f7d (also version 0.5.0):
```
VAR="foo bar"
```
```
start on starting things-stuff
stop on stopping things-stuff
respawn
exec su - migurski -s /bin/sh -c 'cd /home/migurski/things; export PORT=5000; export VAR="foo bar"; python stuff.py >> /var/log/things/stuff-1.log 2>&1'
```
Starting at d8be8f4a8 and still in 0.6.6, the exported configuration from the configuration above began producing this invalid and failing upstart configuration, due to the single quotes:
```
start on starting things-stuff
stop on stopping things-stuff
respawn
exec su - migurski -s /bin/sh -c 'cd /home/migurski/things;export VAR='foo bar';export PORT=5000;python stuff.py >> /var/log/things/stuff-1.log 2>&1'
```
Here are my Procfile and python script for testing:
```
stuff: python stuff.py
```
```python
from sys import stderr
from os import environ
from time import sleep
while True:
print >> stderr, repr(environ['VAR'])
sleep(5)
``` | nickstenning/honcho | diff --git a/tests/integration/test_export.py b/tests/integration/test_export.py
index bbb2397..09543b7 100644
--- a/tests/integration/test_export.py
+++ b/tests/integration/test_export.py
@@ -37,3 +37,37 @@ def test_export_upstart(testenv):
'trunk-web-1.conf'):
expected = testenv.path('elephant', filename)
assert os.path.exists(expected)
+
+
[email protected]('testenv', [{
+ 'Procfile': "web: python web.py",
+ '.env': """
+NORMAL=ok
+SQ_SPACES='sqspace sqspace'
+DQ_SPACES="dqspace dqspace"
+SQ="it's got single quotes"
+DQ='it has "double" quotes'
+EXCL='an exclamation mark!'
+SQ_DOLLAR='costs $UNINTERPOLATED amount'
+DQ_DOLLAR="costs $UNINTERPOLATED amount"
+"""
+}], indirect=True)
+def test_export_upstart_environment(testenv):
+ ret, out, err = testenv.run_honcho([
+ 'export',
+ 'upstart',
+ testenv.path('test'),
+ '-a', 'envvars',
+ ])
+
+ assert ret == 0
+
+ lines = open(testenv.path('test', 'envvars-web-1.conf')).readlines()
+ assert 'env NORMAL=ok\n' in lines
+ assert "env SQ_SPACES='sqspace sqspace'\n" in lines
+ assert "env DQ_SPACES='dqspace dqspace'\n" in lines
+ assert "env SQ='it'\"'\"'s got single quotes'\n" in lines
+ assert "env DQ='it has \"double\" quotes'\n" in lines
+ assert "env EXCL='an exclamation mark!'\n" in lines
+ assert "env SQ_DOLLAR='costs $UNINTERPOLATED amount'\n" in lines
+ assert "env DQ_DOLLAR='costs $UNINTERPOLATED amount'\n" in lines
diff --git a/tests/test_manager.py b/tests/test_manager.py
index abee53c..ae028c6 100644
--- a/tests/test_manager.py
+++ b/tests/test_manager.py
@@ -59,12 +59,13 @@ class FakeEnv(object):
class FakeProcess(object):
- def __init__(self, cmd, name=None, colour=None, quiet=None, env=None):
+ def __init__(self, cmd, name=None, colour=None, quiet=None, env=None, cwd=None):
self.cmd = cmd
self.name = name
self.colour = colour
self.quiet = quiet
self.env = env
+ self.cwd = cwd
self._events = None
self._options = {}
@@ -229,6 +230,10 @@ class TestManager(object):
with pytest.raises(AssertionError):
self.m.add_process('foo', 'another command')
+ def test_add_process_sets_cwd(self):
+ proc = self.m.add_process('foo', 'ruby server.rb', cwd='foo-dir')
+ assert proc.cwd == 'foo-dir'
+
def test_loop_with_empty_manager_returns_immediately(self):
self.m.loop()
diff --git a/tests/test_process.py b/tests/test_process.py
index 59e1538..9002212 100644
--- a/tests/test_process.py
+++ b/tests/test_process.py
@@ -184,3 +184,9 @@ class TestProcess(object):
proc.run(self.q)
msg = self.q.find_message({'returncode': 42})
assert msg.type == 'stop'
+
+ def test_cwd_passed_along(self):
+ proc = Process('echo 123', cwd='fake-dir')
+ proc._child_ctor = FakePopen
+ proc.run(self.q)
+ assert proc._child.kwargs['cwd'] == 'fake-dir'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[export]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
-e git+https://github.com/nickstenning/honcho.git@824775779ddf30606e7514b4639e81a2d6f25393#egg=honcho
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==2.7.3
MarkupSafe==3.0.2
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: honcho
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- jinja2==2.7.3
- markupsafe==3.0.2
prefix: /opt/conda/envs/honcho
| [
"tests/test_manager.py::TestManager::test_add_process_sets_cwd",
"tests/test_process.py::TestProcess::test_cwd_passed_along"
]
| [
"tests/integration/test_export.py::test_export_supervisord[testenv0]",
"tests/integration/test_export.py::test_export_upstart[testenv0]",
"tests/integration/test_export.py::test_export_upstart_environment[testenv0]"
]
| [
"tests/test_manager.py::TestManager::test_init_sets_default_printer_width",
"tests/test_manager.py::TestManager::test_add_process_updates_printer_width",
"tests/test_manager.py::TestManager::test_add_process_sets_name",
"tests/test_manager.py::TestManager::test_add_process_sets_cmd",
"tests/test_manager.py::TestManager::test_add_process_sets_colour",
"tests/test_manager.py::TestManager::test_add_process_sets_unique_colours",
"tests/test_manager.py::TestManager::test_add_process_sets_quiet",
"tests/test_manager.py::TestManager::test_add_process_name_must_be_unique",
"tests/test_manager.py::TestManager::test_loop_with_empty_manager_returns_immediately",
"tests/test_manager.py::TestManager::test_loop_calls_process_run",
"tests/test_manager.py::TestManager::test_printer_receives_messages_in_correct_order",
"tests/test_manager.py::TestManager::test_printer_receives_lines_multi_process",
"tests/test_manager.py::TestManager::test_returncode_set_by_first_exiting_process",
"tests/test_manager.py::TestManager::test_printer_receives_lines_after_stop",
"tests/test_process.py::TestProcess::test_ctor_cmd",
"tests/test_process.py::TestProcess::test_ctor_name",
"tests/test_process.py::TestProcess::test_ctor_colour",
"tests/test_process.py::TestProcess::test_ctor_quiet",
"tests/test_process.py::TestProcess::test_output_receives_start_with_pid",
"tests/test_process.py::TestProcess::test_message_contains_name",
"tests/test_process.py::TestProcess::test_message_contains_time",
"tests/test_process.py::TestProcess::test_message_contains_colour",
"tests/test_process.py::TestProcess::test_output_receives_lines",
"tests/test_process.py::TestProcess::test_output_receives_lines_invalid_utf8",
"tests/test_process.py::TestProcess::test_output_does_not_receive_lines_when_quiet",
"tests/test_process.py::TestProcess::test_output_receives_stop",
"tests/test_process.py::TestProcess::test_output_receives_stop_with_returncode"
]
| []
| MIT License | 488 | [
"honcho/process.py",
"honcho/export/templates/upstart/process.conf",
"honcho/manager.py"
]
| [
"honcho/process.py",
"honcho/export/templates/upstart/process.conf",
"honcho/manager.py"
]
|
alecthomas__voluptuous-155 | 32aeeec65d77940655a2905c9f15114586eb785e | 2016-03-29 07:26:15 | 17204b79ca56b65aa3e4cfb20e7ef4555c2e8592 | diff --git a/README.md b/README.md
index d991125..74490c2 100644
--- a/README.md
+++ b/README.md
@@ -522,7 +522,7 @@ backtracking is attempted:
... raise AssertionError('MultipleInvalid not raised')
... except MultipleInvalid as e:
... exc = e
->>> str(exc) == "invalid list value @ data[0][0]"
+>>> str(exc) == "not a valid value @ data[0][0]"
True
```
diff --git a/voluptuous.py b/voluptuous.py
index 3ab4de4..4b1215e 100644
--- a/voluptuous.py
+++ b/voluptuous.py
@@ -227,10 +227,6 @@ class InclusiveInvalid(Invalid):
"""Not all values found in inclusion group."""
-class SequenceItemInvalid(Invalid):
- """One of the values found in a sequence was invalid."""
-
-
class SequenceTypeInvalid(Invalid):
"""The type found is not a sequence type."""
@@ -664,7 +660,7 @@ class Schema(object):
>>> validator = Schema(['one', 'two', int])
>>> validator(['one'])
['one']
- >>> with raises(MultipleInvalid, 'invalid list value @ data[0]'):
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
... validator([3.5])
>>> validator([1])
[1]
@@ -698,8 +694,6 @@ class Schema(object):
raise
invalid = e
else:
- if len(invalid.path) <= len(index_path):
- invalid = SequenceItemInvalid('invalid %s value' % seq_type_name, index_path)
errors.append(invalid)
if errors:
raise MultipleInvalid(errors)
@@ -714,7 +708,7 @@ class Schema(object):
>>> validator = Schema(('one', 'two', int))
>>> validator(('one',))
('one',)
- >>> with raises(MultipleInvalid, 'invalid tuple value @ data[0]'):
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
... validator((3.5,))
>>> validator((1,))
(1,)
@@ -729,7 +723,7 @@ class Schema(object):
>>> validator = Schema(['one', 'two', int])
>>> validator(['one'])
['one']
- >>> with raises(MultipleInvalid, 'invalid list value @ data[0]'):
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
... validator([3.5])
>>> validator([1])
[1]
@@ -1095,7 +1089,7 @@ class Msg(object):
Messages are only applied to invalid direct descendants of the schema:
>>> validate = Schema(Msg([['one', 'two', int]], 'not okay!'))
- >>> with raises(MultipleInvalid, 'invalid list value @ data[0][0]'):
+ >>> with raises(MultipleInvalid, 'expected int @ data[0][0]'):
... validate([['three']])
The type which is thrown can be overridden but needs to be a subclass of Invalid
| list validation swallows more useful/explicit error messages
Take this validator and schema:
```python
def is_even(value):
if value % 2:
raise Invalid('%i is not even' % value)
return value
schema = Schema(dict(even_number=All(int, is_even)))
schema(dict(even_number=3))
```
We get a useful error message:
```
voluptuous.MultipleInvalid: 3 is not even for dictionary value @ data['even_number']
```
Sadly, as soon as we want a list of these, we lose the useful error message:
```python
schema = Schema(dict(even_numbers=[All(int, is_even)]))
schema(dict(even_numbers=[3]))
````
Instead, we get a much less useful message:
```
voluptuous.MultipleInvalid: invalid list value @ data['even_numbers'][0]
```
Sadly, nowhere in that `MultipleInvalid` exception is the original error stored. | alecthomas/voluptuous | diff --git a/tests.md b/tests.md
index f098c1b..18f6fba 100644
--- a/tests.md
+++ b/tests.md
@@ -16,7 +16,7 @@ value:
... raise AssertionError('MultipleInvalid not raised')
... except MultipleInvalid as e:
... exc = e
- >>> str(exc) == 'invalid list value @ data[1]'
+ >>> str(exc) == 'expected a dictionary @ data[1]'
True
It should also be accurate for nested values:
@@ -35,7 +35,7 @@ It should also be accurate for nested values:
... except MultipleInvalid as e:
... exc = e
>>> str(exc)
- "invalid list value @ data[0]['four'][0]"
+ "not a valid value @ data[0]['four'][0]"
>>> try:
... schema([{'six': {'seven': 'nine'}}])
@@ -116,9 +116,9 @@ Multiple errors are reported:
... schema([1, 2, 3])
... except MultipleInvalid as e:
... print([str(i) for i in e.errors]) # doctest: +NORMALIZE_WHITESPACE
- ['invalid list value @ data[0]',
- 'invalid list value @ data[1]',
- 'invalid list value @ data[2]']
+ ['expected a list @ data[0]',
+ 'expected a list @ data[1]',
+ 'expected a list @ data[2]']
Required fields in dictionary which are invalid should not have required :
diff --git a/tests.py b/tests.py
index 0dd4e9d..32e6694 100644
--- a/tests.py
+++ b/tests.py
@@ -112,7 +112,7 @@ def test_literal():
try:
schema([{"c": 1}])
except Invalid as e:
- assert_equal(str(e), 'invalid list value @ data[0]')
+ assert_equal(str(e), "{'c': 1} not match for {'b': 1} @ data[0]")
else:
assert False, "Did not raise Invalid"
@@ -242,3 +242,23 @@ def test_repr():
)
assert_equal(repr(coerce_), "Coerce(int, msg='moo')")
assert_equal(repr(all_), "All('10', Coerce(int, msg=None), msg='all msg')")
+
+
+def test_list_validation_messages():
+ """ Make sure useful error messages are available """
+
+ def is_even(value):
+ if value % 2:
+ raise Invalid('%i is not even' % value)
+ return value
+
+ schema = Schema(dict(even_numbers=[All(int, is_even)]))
+
+ try:
+ schema(dict(even_numbers=[3]))
+ except Invalid as e:
+ assert_equal(len(e.errors), 1, e.errors)
+ assert_equal(str(e.errors[0]), "3 is not even @ data['even_numbers'][0]")
+ assert_equal(str(e), "3 is not even @ data['even_numbers'][0]")
+ else:
+ assert False, "Did not raise Invalid"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/alecthomas/voluptuous.git@32aeeec65d77940655a2905c9f15114586eb785e#egg=voluptuous
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: voluptuous
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- nose==1.3.7
prefix: /opt/conda/envs/voluptuous
| [
"tests.py::test_literal",
"tests.py::test_list_validation_messages"
]
| []
| [
"tests.py::test_required",
"tests.py::test_extra_with_required",
"tests.py::test_iterate_candidates",
"tests.py::test_in",
"tests.py::test_not_in",
"tests.py::test_remove",
"tests.py::test_extra_empty_errors",
"tests.py::test_url_validation",
"tests.py::test_url_validation_with_none",
"tests.py::test_url_validation_with_empty_string",
"tests.py::test_url_validation_without_host",
"tests.py::test_copy_dict_undefined",
"tests.py::test_sorting",
"tests.py::test_schema_extend",
"tests.py::test_schema_extend_overrides",
"tests.py::test_repr"
]
| []
| BSD 3-Clause "New" or "Revised" License | 489 | [
"voluptuous.py",
"README.md"
]
| [
"voluptuous.py",
"README.md"
]
|
|
SciLifeLab__genologics-154 | 33b08db9bc8d85427c79f5e10792b51f10b27a47 | 2016-04-02 05:57:33 | dc79dc55dc1218582f30a5e76e9a0fff428cbf30 | diff --git a/genologics/entities.py b/genologics/entities.py
index 22e0625..4b7bcf7 100644
--- a/genologics/entities.py
+++ b/genologics/entities.py
@@ -271,7 +271,6 @@ class BaseDescriptor(object):
def __get__(self, instance, cls):
raise NotImplementedError
-
class TagDescriptor(BaseDescriptor):
"""Abstract base descriptor for an instance attribute
represented by an XML element.
@@ -280,7 +279,6 @@ class TagDescriptor(BaseDescriptor):
def __init__(self, tag):
self.tag = tag
-
class StringDescriptor(TagDescriptor):
"""An instance attribute containing a string value
represented by an XML element.
@@ -307,7 +305,6 @@ class StringDescriptor(TagDescriptor):
else:
return instance.root
-
class StringAttributeDescriptor(TagDescriptor):
"""An instance attribute containing a string value
represented by an XML attribute.
@@ -317,7 +314,6 @@ class StringAttributeDescriptor(TagDescriptor):
instance.get()
return instance.root.attrib[self.tag]
-
class StringListDescriptor(TagDescriptor):
"""An instance attribute containing a list of strings
represented by multiple XML elements.
@@ -330,7 +326,6 @@ class StringListDescriptor(TagDescriptor):
result.append(node.text)
return result
-
class StringDictionaryDescriptor(TagDescriptor):
"""An instance attribute containing a dictionary of string key/values
represented by a hierarchical XML element.
@@ -345,7 +340,6 @@ class StringDictionaryDescriptor(TagDescriptor):
result[node2.tag] = node2.text
return result
-
class IntegerDescriptor(StringDescriptor):
"""An instance attribute containing an integer value
represented by an XMl element.
@@ -359,7 +353,6 @@ class IntegerDescriptor(StringDescriptor):
else:
return int(node.text)
-
class BooleanDescriptor(StringDescriptor):
"""An instance attribute containing a boolean value
represented by an XMl element.
@@ -373,7 +366,6 @@ class BooleanDescriptor(StringDescriptor):
else:
return node.text.lower() == 'true'
-
class UdfDictionary(object):
"Dictionary-like container of UDFs, optionally within a UDT."
@@ -536,8 +528,6 @@ class UdfDictionary(object):
def get(self, key, default=None):
return self._lookup.get(key, default)
-
-
class UdfDictionaryDescriptor(BaseDescriptor):
"""An instance attribute containing a dictionary of UDF values
represented by multiple XML elements.
@@ -557,7 +547,6 @@ class UdtDictionaryDescriptor(UdfDictionaryDescriptor):
_UDT = True
-
class PlacementDictionaryDescriptor(TagDescriptor):
"""An instance attribute containing a dictionary of locations
keys and artifact values represented by multiple XML elements.
@@ -571,7 +560,6 @@ class PlacementDictionaryDescriptor(TagDescriptor):
self.value[key] = Artifact(instance.lims,uri=node.attrib['uri'])
return self.value
-
class ExternalidListDescriptor(BaseDescriptor):
"""An instance attribute yielding a list of tuples (id, uri) for
external identifiers represented by multiple XML elements.
@@ -584,7 +572,6 @@ class ExternalidListDescriptor(BaseDescriptor):
result.append((node.attrib.get('id'), node.attrib.get('uri')))
return result
-
class EntityDescriptor(TagDescriptor):
"An instance attribute referencing another entity instance."
@@ -600,7 +587,6 @@ class EntityDescriptor(TagDescriptor):
else:
return self.klass(instance.lims, uri=node.attrib['uri'])
-
class EntityListDescriptor(EntityDescriptor):
"""An instance attribute yielding a list of entity instances
represented by multiple XML elements.
@@ -668,6 +654,7 @@ class NestedEntityListDescriptor(EntityListDescriptor):
for node in rootnode.findall(self.tag):
result.append(self.klass(instance.lims, uri=node.attrib['uri']))
return result
+
class DimensionDescriptor(TagDescriptor):
"""An instance attribute containing a dictionary specifying
the properties of a dimension of a container type.
@@ -680,7 +667,6 @@ class DimensionDescriptor(TagDescriptor):
offset = int(node.find('offset').text),
size = int(node.find('size').text))
-
class LocationDescriptor(TagDescriptor):
"""An instance attribute containing a tuple (container, value)
specifying the location of an analyte in a container.
@@ -750,7 +736,6 @@ class Entity(object):
raise ValueError("Entity uri and id can't be both None")
else:
uri = lims.get_uri(cls._URI, id)
-
try:
return lims.cache[uri]
except KeyError:
@@ -950,6 +935,7 @@ class Process(Entity):
udf = UdfDictionaryDescriptor()
udt = UdtDictionaryDescriptor()
files = EntityListDescriptor(nsmap('file:file'), File)
+
# instrument XXX
# process_parameters XXX
@@ -1042,6 +1028,11 @@ class Process(Entity):
cs.append(o_a.container)
return list(frozenset(cs))
+ @property
+ def step(self):
+ """Retrive the Step coresponding to this process. They share the same id"""
+ return Step(self.lims, id=self.id)
+
class Artifact(Entity):
"Any process input or output; analyte or file."
@@ -1105,28 +1096,76 @@ class Artifact(Entity):
stateless = property(stateless)
class StepActions(Entity):
- """Small hack to be able to query the actions subentity of
- the Step entity. Right now, only the escalation is parsed."""
+ """Actions associated with a step"""
+ _escalation = None
- def __init__(self, lims, uri=None, id=None):
- super(StepActions, self).__init__(lims,uri,id)
- self.escalation={}
- self.lims=lims
- self.root=self.lims.get(self.uri)
- for node in self.root.findall('escalation'):
- self.escalation['artifacts']=[]
- self.escalation['author']=Researcher(lims,uri=node.find('request').find('author').attrib.get('uri'))
- self.escalation['request']=uri=node.find('request').find('comment').text
- if node.find('review') is not None: #recommended by the Etree doc
- self.escalation['status']='Reviewed'
- self.escalation['reviewer']= Researcher(lims,uri=node.find('review').find('author').attrib.get('uri'))
- self.escalation['answer']=uri=node.find('review').find('comment').text
- else:
- self.escalation['status']='Pending'
+ @property
+ def escalation(self):
+ if not self._escalation:
+ self.get()
+ self._escalation={}
+ for node in self.root.findall('escalation'):
+ self._escalation['artifacts']=[]
+ self._escalation['author']=Researcher(self.lims,uri=node.find('request').find('author').attrib.get('uri'))
+ self._escalation['request']=uri=node.find('request').find('comment').text
+ if node.find('review') is not None: #recommended by the Etree doc
+ self._escalation['status']='Reviewed'
+ self._escalation['reviewer']= Researcher(self.lims,uri=node.find('review').find('author').attrib.get('uri'))
+ self._escalation['answer']=uri=node.find('review').find('comment').text
+ else:
+ self._escalation['status']='Pending'
+
+ for node2 in node.findall('escalated-artifacts'):
+ art= self.lims.get_batch([Artifact(self.lims, uri=ch.attrib.get('uri')) for ch in node2])
+ self._escalation['artifacts'].extend(art)
+ return self._escalation
+
+ @property
+ def next_actions(self):
+ actions = []
+ self.get()
+ if self.root.find('next-actions') is not None:
+ for node in self.root.find('next-actions').findall('next-action'):
+ action = {
+ 'artifact': Artifact(self.lims, node.attrib.get('artifact-uri')),
+ 'action': node.attrib.get('action'),
+ }
+ if node.attrib.get('step-uri'):
+ action['step']=Step(self.lims, uri=node.attrib.get('step-uri'))
+ if node.attrib.get('rework-step-uri'):
+ action['rework-step']=Step(self.lims, uri=node.attrib.get('rework-step-uri'))
+ actions.append(action)
+ return actions
+
+class ReagentKit(Entity):
+ """Type of Reagent with information about the provider"""
+ _URI="reagenttypes"
+ _TAG="reagent-kit"
+
+ name = StringDescriptor('name')
+ supplier = StringDescriptor('supplier')
+ website = StringDescriptor('website')
+ archived = BooleanDescriptor('archived')
- for node2 in node.findall('escalated-artifacts'):
- art= lims.get_batch([Artifact(lims,uri=ch.attrib.get('uri')) for ch in node2])
- self.escalation['artifacts'].extend(art)
+class ReagentLot(Entity):
+ """Reagent Lots contain information about a particualr lot of reagent used in a step"""
+ _URI="reagentlot"
+ _TAG="reagent-lot"
+
+ reagent_kit = EntityDescriptor('reagent-kit', ReagentKit)
+ name = StringDescriptor('name')
+ lot_number = StringDescriptor('lot-number')
+ created_date = StringDescriptor('created-date')
+ last_modified_date = StringDescriptor('last-modified-date')
+ expiry_date = StringDescriptor('expiry-date')
+ created_by = EntityDescriptor('created-by', Researcher)
+ last_modified_by = EntityDescriptor('last-modified-by', Researcher)
+ status = StringDescriptor('status')
+ usage_count = IntegerDescriptor('usage-count')
+
+
+class StepReagentLots(Entity):
+ reagent_lots = NestedEntityListDescriptor('reagent-lot', ReagentLot, 'reagent-lots')
class Step(Entity):
@@ -1134,16 +1173,15 @@ class Step(Entity):
_URI = 'steps'
- def __init__(self, lims, uri=None, id=None):
- super(Step, self).__init__(lims,uri,id)
- assert self.uri is not None
- actionsuri="{0}/actions".format(self.uri)
- self.actions= StepActions(lims,uri=actionsuri)
-
+ _reagent_lots = EntityDescriptor('reagent-lots', StepReagentLots)
+ actions = EntityDescriptor('actions', StepActions)
#placements = EntityDescriptor('placements', StepPlacements)
#program_status = EntityDescriptor('program-status',StepProgramStatus)
#details = EntityListDescriptor(nsmap('file:file'), StepDetails)
+ @property
+ def reagent_lots(self):
+ return self._reagent_lots.reagent_lots
class ProtocolStep(Entity):
"""Steps key in the Protocol object"""
@@ -1173,6 +1211,7 @@ class Stage(Entity):
"""Holds Protocol/Workflow"""
protocol = EntityDescriptor('protocol', Protocol)
+
class Workflow(Entity):
""" Workflow, introduced in 3.5"""
_URI="configuration/workflows"
@@ -1200,10 +1239,10 @@ class ReagentType(Entity):
if child.attrib.get("name") == "Sequence":
self.sequence=child.attrib.get("value")
+
Sample.artifact = EntityDescriptor('artifact', Artifact)
StepActions.step = EntityDescriptor('step', Step)
Stage.workflow = EntityDescriptor('workflow', Workflow)
Artifact.workflow_stages = NestedEntityListDescriptor('workflow-stage', Stage, 'workflow-stages')
Step.configuration = EntityDescriptor('configuration', ProtocolStep)
-
diff --git a/genologics/lims.py b/genologics/lims.py
index 3ee432a..316e9b1 100644
--- a/genologics/lims.py
+++ b/genologics/lims.py
@@ -489,9 +489,7 @@ class Lims(object):
root = self.post(uri, data)
def route_artifacts(self, artifact_list, workflow_uri=None, stage_uri=None, unassign=False):
-
root = ElementTree.Element(nsmap('rt:routing'))
-
if unassign:
s = ElementTree.SubElement(root, 'unassign')
else:
@@ -509,7 +507,6 @@ class Lims(object):
auth=(self.username, self.password),
headers={'content-type': 'application/xml',
'accept': 'application/xml'})
-
self.validate_response(r)
diff --git a/genologics/version.py b/genologics/version.py
index 18a95ec..b4cd250 100644
--- a/genologics/version.py
+++ b/genologics/version.py
@@ -1,1 +1,1 @@
-__version__="0.3.1"
+__version__="0.3.2"
diff --git a/setup.py b/setup.py
index 7aa61db..772f4e8 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,8 @@ version_py = os.path.join(os.path.dirname(__file__), 'genologics', 'version.py')
version = subprocess.Popen(["git", "describe", "--abbrev=0"],stdout=subprocess.PIPE, universal_newlines=True).communicate()[0].rstrip()
if not version:
version = __version__
-
+else:
+ version = version.decode("utf-8")
setup(name='genologics',
| Change to StepActions
Hi,
I need to rework the way Step stores actions and that might results in backward incompatible changes to the API.
I was wondering how much this would affect you and the potential users of the API as it seems to be still work in progress
Cheers
Tim | SciLifeLab/genologics | diff --git a/tests/test_entities.py b/tests/test_entities.py
index dbe16c6..2163bce 100644
--- a/tests/test_entities.py
+++ b/tests/test_entities.py
@@ -3,16 +3,18 @@ from xml.etree import ElementTree
from sys import version_info
from io import BytesIO
+from genologics.lims import Lims
+from genologics.entities import StringDescriptor, StringAttributeDescriptor, StringListDescriptor, \
+ StringDictionaryDescriptor, IntegerDescriptor, BooleanDescriptor, UdfDictionary, StepActions, Researcher, Artifact, \
+ Step
+
if version_info.major == 2:
from mock import patch, Mock
- import __builtin__ as builtins
else:
from unittest.mock import patch, Mock
- import builtins
-from genologics.entities import StringDescriptor, StringAttributeDescriptor, StringListDescriptor, \
- StringDictionaryDescriptor, IntegerDescriptor, BooleanDescriptor, UdfDictionary
+
class TestEntities(TestCase):
@@ -214,3 +216,86 @@ class TestUdfDictionary(TestCase):
def test_get(self):
pass
+
+
+class TestEntities(TestCase):
+ url = 'http://testgenologics.com:4040'
+ dummy_xml="""<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+ <dummy></dummy>"""
+
+ def setUp(self):
+ self.lims = Lims(self.url, username='test', password='password')
+
+class TestStepActions(TestEntities):
+ url = 'http://testgenologics.com:4040'
+ step_actions_xml = """<stp:actions xmlns:stp="http://genologics.com/ri/step" uri="...">
+ <step rel="..." uri="{url}/steps/s1">
+ </step>
+ <configuration uri="{url}/config/1">...</configuration>
+ <next-actions>
+ <next-action artifact-uri="{url}/artifacts/a1" action="requeue" step-uri="..." rework-step-uri="...">
+ </next-action>
+ </next-actions>
+ <escalation>
+ <request>
+ <author uri="{url}/researchers/r1">
+ <first-name>foo</first-name>
+ <last-name>bar</last-name>
+ </author>
+ <reviewer uri="{url}/researchers/r1">
+ <first-name>foo</first-name>
+ <last-name>bar</last-name>
+ </reviewer>
+ <date>01-01-1970</date>
+ <comment>no comments</comment>
+ </request>
+ <review>
+ <author uri="{url}/researchers/r1">
+ <first-name>foo</first-name>
+ <last-name>bar</last-name>
+ </author>
+ <date>01-01-1970</date>
+ <comment>no comments</comment>
+ </review>
+ <escalated-artifacts>
+ <escalated-artifact uri="{url}/artifacts/r1">
+ </escalated-artifact>
+ </escalated-artifacts>
+ </escalation>
+</stp:actions>""".format(url=url)
+
+ step_actions_no_escalation_xml = """<stp:actions xmlns:stp="http://genologics.com/ri/step" uri="...">
+ <step rel="..." uri="{url}/steps/s1">
+ </step>
+ <configuration uri="{url}/config/1">...</configuration>
+ <next-actions>
+ <next-action artifact-uri="{url}/artifacts/a1" action="requeue" step-uri="{url}/steps/s1" rework-step-uri="{url}/steps/s2">
+ </next-action>
+ </next-actions>
+</stp:actions>""".format(url=url)
+
+ def test_escalation(self):
+ s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims)
+ with patch('requests.Session.get',return_value=Mock(content = self.step_actions_xml, status_code=200)),\
+ patch('requests.post', return_value=Mock(content = self.dummy_xml, status_code=200)):
+ r = Researcher(uri='http://testgenologics.com:4040/researchers/r1', lims=self.lims)
+ a = Artifact(uri='http://testgenologics.com:4040/artifacts/r1', lims=self.lims)
+ expected_escalation = {
+ 'status': 'Reviewed',
+ 'author': r,
+ 'artifacts': [a], 'request': 'no comments',
+ 'answer': 'no comments',
+ 'reviewer': r}
+
+ assert s.escalation == expected_escalation
+
+ def test_next_actions(self):
+ s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims)
+ with patch('requests.Session.get',return_value=Mock(content = self.step_actions_no_escalation_xml, status_code=200)):
+ step1 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s1')
+ step2 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s2')
+ artifact = Artifact(self.lims, uri='http://testgenologics.com:4040/artifacts/a1')
+ expected_next_actions = [{'artifact': artifact, 'action': 'requeue',
+ 'step': step1, 'rework-step': step2}]
+ assert s.next_actions == expected_next_actions
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 4
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
-e git+https://github.com/SciLifeLab/genologics.git@33b08db9bc8d85427c79f5e10792b51f10b27a47#egg=genologics
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: genologics
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/genologics
| [
"tests/test_entities.py::TestStepActions::test_next_actions"
]
| [
"tests/test_entities.py::TestStringDictionaryDescriptor::test__get__",
"tests/test_entities.py::TestUdfDictionary::test___contains__",
"tests/test_entities.py::TestUdfDictionary::test___delitem__",
"tests/test_entities.py::TestUdfDictionary::test___getitem__",
"tests/test_entities.py::TestUdfDictionary::test___iter__",
"tests/test_entities.py::TestUdfDictionary::test___next__",
"tests/test_entities.py::TestUdfDictionary::test___setitem__",
"tests/test_entities.py::TestUdfDictionary::test__prepare_lookup",
"tests/test_entities.py::TestUdfDictionary::test__update_elems",
"tests/test_entities.py::TestUdfDictionary::test_clear",
"tests/test_entities.py::TestUdfDictionary::test_get",
"tests/test_entities.py::TestUdfDictionary::test_get_udt",
"tests/test_entities.py::TestUdfDictionary::test_items",
"tests/test_entities.py::TestUdfDictionary::test_set_udt",
"tests/test_entities.py::TestStepActions::test_escalation"
]
| [
"tests/test_entities.py::TestStringDescriptor::test__get__",
"tests/test_entities.py::TestStringDescriptor::test__set__",
"tests/test_entities.py::TestStringAttributeDescriptor::test__get__",
"tests/test_entities.py::TestStringListDescriptor::test__get__",
"tests/test_entities.py::TestIntegerDescriptor::test__get__",
"tests/test_entities.py::TestIntegerDescriptor::test__set__",
"tests/test_entities.py::TestBooleanDescriptor::test__get__",
"tests/test_entities.py::TestBooleanDescriptor::test__set__"
]
| []
| MIT License | 490 | [
"setup.py",
"genologics/lims.py",
"genologics/version.py",
"genologics/entities.py"
]
| [
"setup.py",
"genologics/lims.py",
"genologics/version.py",
"genologics/entities.py"
]
|
|
dpkp__kafka-python-620 | b96f4ccf070109a022deb98b569e61d23e4e75b9 | 2016-04-03 16:29:40 | 810f08b7996a15e65cdd8af6c1a7167c28f94646 | diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py
index a5e3067..b2ef1ea 100644
--- a/kafka/coordinator/consumer.py
+++ b/kafka/coordinator/consumer.py
@@ -91,8 +91,10 @@ class ConsumerCoordinator(BaseCoordinator):
log.warning('Broker version (%s) does not support offset'
' commits; disabling auto-commit.',
self.config['api_version'])
+ self.config['enable_auto_commit'] = False
elif self.config['group_id'] is None:
log.warning('group_id is None: disabling auto-commit.')
+ self.config['enable_auto_commit'] = False
else:
interval = self.config['auto_commit_interval_ms'] / 1000.0
self._auto_commit_task = AutoCommitTask(weakref.proxy(self), interval)
@@ -192,7 +194,7 @@ class ConsumerCoordinator(BaseCoordinator):
assignor.on_assignment(assignment)
# restart the autocommit task if needed
- if self.config['enable_auto_commit']:
+ if self._auto_commit_task:
self._auto_commit_task.enable()
assigned = set(self._subscription.assigned_partitions())
@@ -364,27 +366,27 @@ class ConsumerCoordinator(BaseCoordinator):
time.sleep(self.config['retry_backoff_ms'] / 1000.0)
def _maybe_auto_commit_offsets_sync(self):
- if self.config['api_version'] < (0, 8, 1):
+ if self._auto_commit_task is None:
return
- if self.config['enable_auto_commit']:
- # disable periodic commits prior to committing synchronously. note that they will
- # be re-enabled after a rebalance completes
- self._auto_commit_task.disable()
- try:
- self.commit_offsets_sync(self._subscription.all_consumed_offsets())
-
- # The three main group membership errors are known and should not
- # require a stacktrace -- just a warning
- except (Errors.UnknownMemberIdError,
- Errors.IllegalGenerationError,
- Errors.RebalanceInProgressError):
- log.warning("Offset commit failed: group membership out of date"
- " This is likely to cause duplicate message"
- " delivery.")
- except Exception:
- log.exception("Offset commit failed: This is likely to cause"
- " duplicate message delivery")
+ # disable periodic commits prior to committing synchronously. note that they will
+ # be re-enabled after a rebalance completes
+ self._auto_commit_task.disable()
+
+ try:
+ self.commit_offsets_sync(self._subscription.all_consumed_offsets())
+
+ # The three main group membership errors are known and should not
+ # require a stacktrace -- just a warning
+ except (Errors.UnknownMemberIdError,
+ Errors.IllegalGenerationError,
+ Errors.RebalanceInProgressError):
+ log.warning("Offset commit failed: group membership out of date"
+ " This is likely to cause duplicate message"
+ " delivery.")
+ except Exception:
+ log.exception("Offset commit failed: This is likely to cause"
+ " duplicate message delivery")
def _send_offset_commit_request(self, offsets):
"""Commit offsets for the specified list of topics and partitions.
| Consumer exception on close when group id is None
Following the conversation in #601, setting the `group_id` to `None` in a Consumer causes an exception to be raised when the consumer is closed.
```
>>> from kafka import KafkaConsumer
>>> k = KafkaConsumer('example', bootstrap_servers=['server'], group_id=None)
>>> k.close()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/consumer/group.py", line 257, in close
self._coordinator.close()
File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/coordinator/consumer.py", line 306, in close
self._maybe_auto_commit_offsets_sync()
File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/coordinator/consumer.py", line 372, in _maybe_auto_commit_offsets_sync
self._auto_commit_task.disable()
AttributeError: 'NoneType' object has no attribute 'disable'
>>> k = KafkaConsumer('example', bootstrap_servers=['server'], group_id='stuff')
>>> k.close()
>>>
``` | dpkp/kafka-python | diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index 847cbc1..44db808 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -52,12 +52,16 @@ def test_init(conn):
@pytest.mark.parametrize("api_version", [(0, 8, 0), (0, 8, 1), (0, 8, 2), (0, 9)])
def test_autocommit_enable_api_version(conn, api_version):
- coordinator = ConsumerCoordinator(
- KafkaClient(), SubscriptionState(), api_version=api_version)
+ coordinator = ConsumerCoordinator(KafkaClient(), SubscriptionState(),
+ enable_auto_commit=True,
+ group_id='foobar',
+ api_version=api_version)
if api_version < (0, 8, 1):
assert coordinator._auto_commit_task is None
+ assert coordinator.config['enable_auto_commit'] is False
else:
assert coordinator._auto_commit_task is not None
+ assert coordinator.config['enable_auto_commit'] is True
def test_protocol_type(coordinator):
@@ -349,28 +353,40 @@ def test_commit_offsets_sync(mocker, coordinator, offsets):
@pytest.mark.parametrize(
- 'api_version,enable,error,task_disable,commit_offsets,warn,exc', [
- ((0, 8), True, None, False, False, False, False),
- ((0, 9), False, None, False, False, False, False),
- ((0, 9), True, Errors.UnknownMemberIdError(), True, True, True, False),
- ((0, 9), True, Errors.IllegalGenerationError(), True, True, True, False),
- ((0, 9), True, Errors.RebalanceInProgressError(), True, True, True, False),
- ((0, 9), True, Exception(), True, True, False, True),
- ((0, 9), True, None, True, True, False, False),
+ 'api_version,group_id,enable,error,has_auto_commit,commit_offsets,warn,exc', [
+ ((0, 8), 'foobar', True, None, False, False, True, False),
+ ((0, 9), 'foobar', False, None, False, False, False, False),
+ ((0, 9), 'foobar', True, Errors.UnknownMemberIdError(), True, True, True, False),
+ ((0, 9), 'foobar', True, Errors.IllegalGenerationError(), True, True, True, False),
+ ((0, 9), 'foobar', True, Errors.RebalanceInProgressError(), True, True, True, False),
+ ((0, 9), 'foobar', True, Exception(), True, True, False, True),
+ ((0, 9), 'foobar', True, None, True, True, False, False),
+ ((0, 9), None, True, None, False, False, True, False),
])
-def test_maybe_auto_commit_offsets_sync(mocker, coordinator,
- api_version, enable, error, task_disable,
- commit_offsets, warn, exc):
- auto_commit_task = mocker.patch.object(coordinator, '_auto_commit_task')
- commit_sync = mocker.patch.object(coordinator, 'commit_offsets_sync',
- side_effect=error)
+def test_maybe_auto_commit_offsets_sync(mocker, api_version, group_id, enable,
+ error, has_auto_commit, commit_offsets,
+ warn, exc):
mock_warn = mocker.patch('kafka.coordinator.consumer.log.warning')
mock_exc = mocker.patch('kafka.coordinator.consumer.log.exception')
+ coordinator = ConsumerCoordinator(KafkaClient(), SubscriptionState(),
+ api_version=api_version,
+ enable_auto_commit=enable,
+ group_id=group_id)
+ commit_sync = mocker.patch.object(coordinator, 'commit_offsets_sync',
+ side_effect=error)
+ if has_auto_commit:
+ assert coordinator._auto_commit_task is not None
+ coordinator._auto_commit_task.enable()
+ assert coordinator._auto_commit_task._enabled is True
+ else:
+ assert coordinator._auto_commit_task is None
- coordinator.config['api_version'] = api_version
- coordinator.config['enable_auto_commit'] = enable
assert coordinator._maybe_auto_commit_offsets_sync() is None
- assert auto_commit_task.disable.call_count == (1 if task_disable else 0)
+
+ if has_auto_commit:
+ assert coordinator._auto_commit_task is not None
+ assert coordinator._auto_commit_task._enabled is False
+
assert commit_sync.call_count == (1 if commit_offsets else 0)
assert mock_warn.call_count == (1 if warn else 0)
assert mock_exc.call_count == (1 if exc else 0)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-pylint",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
dill==0.3.4
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==5.10.1
-e git+https://github.com/dpkp/kafka-python.git@b96f4ccf070109a022deb98b569e61d23e4e75b9#egg=kafka_python
lazy-object-proxy==1.7.1
lz4tools==1.3.1.2
mccabe==0.7.0
mock==5.2.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-pylint==0.18.0
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- coverage==6.2
- cramjam==2.5.0
- dill==0.3.4
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- lz4tools==1.3.1.2
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-pylint==0.18.0
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-None-True-None-False-False-True-False]"
]
| [
"test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False-False]"
]
| [
"test/test_coordinator.py::test_init",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]",
"test/test_coordinator.py::test_protocol_type",
"test/test_coordinator.py::test_group_protocols",
"test/test_coordinator.py::test_pattern_subscription[api_version0]",
"test/test_coordinator.py::test_pattern_subscription[api_version1]",
"test/test_coordinator.py::test_pattern_subscription[api_version2]",
"test/test_coordinator.py::test_pattern_subscription[api_version3]",
"test/test_coordinator.py::test_lookup_assignor",
"test/test_coordinator.py::test_join_complete",
"test/test_coordinator.py::test_subscription_listener",
"test/test_coordinator.py::test_subscription_listener_failure",
"test/test_coordinator.py::test_perform_assignment",
"test/test_coordinator.py::test_on_join_prepare",
"test/test_coordinator.py::test_need_rejoin",
"test/test_coordinator.py::test_refresh_committed_offsets_if_needed",
"test/test_coordinator.py::test_fetch_committed_offsets",
"test/test_coordinator.py::test_close",
"test/test_coordinator.py::test_commit_offsets_async",
"test/test_coordinator.py::test_commit_offsets_sync",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-False-None-False-False-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-error2-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-True-error3-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-False-True]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_send_offset_commit_request_fail",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]",
"test/test_coordinator.py::test_send_offset_commit_request_failure",
"test/test_coordinator.py::test_send_offset_commit_request_success",
"test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response7-UnknownMemberIdError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response8-IllegalGenerationError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response9-RebalanceInProgressError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_send_offset_fetch_request_fail",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_failure",
"test/test_coordinator.py::test_send_offset_fetch_request_success",
"test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False-False]",
"test/test_coordinator.py::test_heartbeat"
]
| []
| Apache License 2.0 | 491 | [
"kafka/coordinator/consumer.py"
]
| [
"kafka/coordinator/consumer.py"
]
|
|
andialbrecht__sqlparse-231 | ee5799fbb60e9739e42922861cd9f24990fc52dd | 2016-04-05 08:28:29 | 058f6fdcfdb0c84bedbaea0745d9b6b92cb20fe4 | diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py
index 4e45f65..68960d5 100644
--- a/sqlparse/engine/grouping.py
+++ b/sqlparse/engine/grouping.py
@@ -135,7 +135,8 @@ def group_comparison(tlist):
T.Name, T.Number, T.Number.Float,
T.Number.Integer, T.Literal,
T.Literal.Number.Integer, T.Name.Placeholder)
- or isinstance(token, (sql.Identifier, sql.Parenthesis))
+ or isinstance(token, (sql.Identifier, sql.Parenthesis,
+ sql.Function))
or (token.ttype is T.Keyword
and token.value.upper() in ['NULL', ]))
_group_left_right(tlist, T.Operator.Comparison, None, sql.Comparison,
| Functions are not grouped into a Comparison
I.e. `foo = DATE(bar.baz)` is not grouped. | andialbrecht/sqlparse | diff --git a/tests/test_grouping.py b/tests/test_grouping.py
index e846176..a6c4028 100644
--- a/tests/test_grouping.py
+++ b/tests/test_grouping.py
@@ -325,6 +325,29 @@ def test_comparison_with_strings(): # issue148
assert p.tokens[0].right.ttype == T.String.Single
+def test_comparison_with_functions(): # issue230
+ p = sqlparse.parse('foo = DATE(bar.baz)')[0]
+ assert len(p.tokens) == 1
+ assert isinstance(p.tokens[0], sql.Comparison)
+ assert len(p.tokens[0].tokens) == 5
+ assert p.tokens[0].left.value == 'foo'
+ assert p.tokens[0].right.value == 'DATE(bar.baz)'
+
+ p = sqlparse.parse('DATE(foo.bar) = DATE(bar.baz)')[0]
+ assert len(p.tokens) == 1
+ assert isinstance(p.tokens[0], sql.Comparison)
+ assert len(p.tokens[0].tokens) == 5
+ assert p.tokens[0].left.value == 'DATE(foo.bar)'
+ assert p.tokens[0].right.value == 'DATE(bar.baz)'
+
+ p = sqlparse.parse('DATE(foo.bar) = bar.baz')[0]
+ assert len(p.tokens) == 1
+ assert isinstance(p.tokens[0], sql.Comparison)
+ assert len(p.tokens[0].tokens) == 5
+ assert p.tokens[0].left.value == 'DATE(foo.bar)'
+ assert p.tokens[0].right.value == 'bar.baz'
+
+
@pytest.mark.parametrize('start', ['FOR', 'FOREACH'])
def test_forloops(start):
p = sqlparse.parse('%s foo in bar LOOP foobar END LOOP' % start)[0]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
-e git+https://github.com/andialbrecht/sqlparse.git@ee5799fbb60e9739e42922861cd9f24990fc52dd#egg=sqlparse
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: sqlparse
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- pytest-cov==6.0.0
prefix: /opt/conda/envs/sqlparse
| [
"tests/test_grouping.py::test_comparison_with_functions"
]
| []
| [
"tests/test_grouping.py::TestGrouping::test_alias",
"tests/test_grouping.py::TestGrouping::test_alias_case",
"tests/test_grouping.py::TestGrouping::test_alias_returns_none",
"tests/test_grouping.py::TestGrouping::test_assignment",
"tests/test_grouping.py::TestGrouping::test_comments",
"tests/test_grouping.py::TestGrouping::test_comparison_exclude",
"tests/test_grouping.py::TestGrouping::test_function",
"tests/test_grouping.py::TestGrouping::test_function_not_in",
"tests/test_grouping.py::TestGrouping::test_identifier_as_invalid",
"tests/test_grouping.py::TestGrouping::test_identifier_extended",
"tests/test_grouping.py::TestGrouping::test_identifier_function",
"tests/test_grouping.py::TestGrouping::test_identifier_invalid",
"tests/test_grouping.py::TestGrouping::test_identifier_list",
"tests/test_grouping.py::TestGrouping::test_identifier_list_case",
"tests/test_grouping.py::TestGrouping::test_identifier_list_other",
"tests/test_grouping.py::TestGrouping::test_identifier_list_with_inline_comments",
"tests/test_grouping.py::TestGrouping::test_identifier_name_wildcard",
"tests/test_grouping.py::TestGrouping::test_identifier_wildcard",
"tests/test_grouping.py::TestGrouping::test_identifiers",
"tests/test_grouping.py::TestGrouping::test_idlist_function",
"tests/test_grouping.py::TestGrouping::test_parenthesis",
"tests/test_grouping.py::TestGrouping::test_typecast",
"tests/test_grouping.py::TestGrouping::test_varchar",
"tests/test_grouping.py::TestGrouping::test_where",
"tests/test_grouping.py::TestStatement::test_get_type",
"tests/test_grouping.py::test_identifier_with_operators",
"tests/test_grouping.py::test_identifier_with_op_trailing_ws",
"tests/test_grouping.py::test_identifier_with_string_literals",
"tests/test_grouping.py::test_identifier_consumes_ordering",
"tests/test_grouping.py::test_comparison_with_keywords",
"tests/test_grouping.py::test_comparison_with_floats",
"tests/test_grouping.py::test_comparison_with_parenthesis",
"tests/test_grouping.py::test_comparison_with_strings",
"tests/test_grouping.py::test_forloops[FOR]",
"tests/test_grouping.py::test_forloops[FOREACH]",
"tests/test_grouping.py::test_nested_for",
"tests/test_grouping.py::test_begin",
"tests/test_grouping.py::test_nested_begin",
"tests/test_grouping.py::test_aliased_column_without_as",
"tests/test_grouping.py::test_qualified_function",
"tests/test_grouping.py::test_aliased_function_without_as",
"tests/test_grouping.py::test_aliased_literal_without_as"
]
| []
| BSD 3-Clause "New" or "Revised" License | 492 | [
"sqlparse/engine/grouping.py"
]
| [
"sqlparse/engine/grouping.py"
]
|
|
docker__docker-py-1022 | e743254b42080e6d199fc10f4812a42ecb8d536f | 2016-04-05 19:54:10 | 299ffadb95c90eb7134b9cee2648fb683912c303 | dnephin: LGTM when build is green. Not sure why they all failed, maybe just a jenkins issue? | diff --git a/docker/auth/auth.py b/docker/auth/auth.py
index eedb7944..d23e6f3c 100644
--- a/docker/auth/auth.py
+++ b/docker/auth/auth.py
@@ -117,7 +117,7 @@ def parse_auth(entries, raise_on_error=False):
conf = {}
for registry, entry in six.iteritems(entries):
- if not (isinstance(entry, dict) and 'auth' in entry):
+ if not isinstance(entry, dict):
log.debug(
'Config entry for key {0} is not auth config'.format(registry)
)
@@ -130,6 +130,16 @@ def parse_auth(entries, raise_on_error=False):
'Invalid configuration for registry {0}'.format(registry)
)
return {}
+ if 'auth' not in entry:
+ # Starting with engine v1.11 (API 1.23), an empty dictionary is
+ # a valid value in the auths config.
+ # https://github.com/docker/compose/issues/3265
+ log.debug(
+ 'Auth data for {0} is absent. Client might be using a '
+ 'credentials store instead.'
+ )
+ return {}
+
username, password = decode_auth(entry['auth'])
log.debug(
'Found entry (registry={0}, username={1})'
@@ -189,6 +199,9 @@ def load_config(config_path=None):
if data.get('HttpHeaders'):
log.debug("Found 'HttpHeaders' section")
res.update({'HttpHeaders': data['HttpHeaders']})
+ if data.get('credsStore'):
+ log.debug("Found 'credsStore' section")
+ res.update({'credsStore': data['credsStore']})
if res:
return res
else:
| Empty auth dictionary should be valid
docker/compose#3265 | docker/docker-py | diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index 921aae00..4ea40477 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -459,6 +459,5 @@ class LoadConfigTest(base.Cleanup, base.BaseTestCase):
with open(dockercfg_path, 'w') as f:
json.dump(config, f)
- self.assertRaises(
- errors.InvalidConfigFile, auth.load_config, dockercfg_path
- )
+ cfg = auth.load_config(dockercfg_path)
+ assert cfg == {}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 1.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/docker/docker-py.git@e743254b42080e6d199fc10f4812a42ecb8d536f#egg=docker_py
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_invalid_auth_dict"
]
| []
| [
"tests/unit/auth_test.py::RegressionTest::test_803_urlsafe_encode",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_hub_index_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_legacy_hub_index_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_invalid_index_name",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_dotted_hub_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_library_image",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost_with_username",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port_and_username",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_port",
"tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_username",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_explicit_none",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_registry",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_fully_explicit",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_hostname_only",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_legacy_config",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_match",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_trailing_slash",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_insecure_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_secure_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_protocol",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_path_wrong_proto",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_hub",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_legacy_hub",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_hub_image",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_library_image",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_private_registry",
"tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_unauthenticated_registry",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_utf8",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_auths",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_headers",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_no_file",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_unknown_keys",
"tests/unit/auth_test.py::LoadConfigTest::test_load_config_with_random_name"
]
| []
| Apache License 2.0 | 493 | [
"docker/auth/auth.py"
]
| [
"docker/auth/auth.py"
]
|
thesquelched__suggestive-9 | b5c3a2f0a1734d8948778fbb0b252d3ea5059def | 2016-04-06 17:15:24 | b5c3a2f0a1734d8948778fbb0b252d3ea5059def | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 414275e..949e00f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,11 @@
+Version 0.4.1
+=============
+
+Bug Fixes
+---------
+- Add mpd connection retries to MPD loader
+- Fix error when log directory doesn't exist
+
Version 0.4.0
=============
diff --git a/README.md b/README.md
index c5c8d15..8bbf90d 100644
--- a/README.md
+++ b/README.md
@@ -57,6 +57,8 @@ Here is a minimal `suggestive` configuration file:
[lastfm]
user = my_lastfm_user
api_key = 0123456789abcdefghijklmnopqrstuv
+
+# For LastFM write access (optional)
api_secret = 141iojhu789uihy78uiho9uih89080
```
diff --git a/suggestive/_version.py b/suggestive/_version.py
index 0ed5398..90134aa 100644
--- a/suggestive/_version.py
+++ b/suggestive/_version.py
@@ -1,2 +1,2 @@
-__version_info__ = (0, 4, 0)
+__version_info__ = (0, 4, 1)
__version__ = '.'.join(str(piece) for piece in __version_info__)
diff --git a/suggestive/app.py b/suggestive/app.py
index d751041..1de628d 100644
--- a/suggestive/app.py
+++ b/suggestive/app.py
@@ -25,8 +25,7 @@ import urwid
import logging
from logging.handlers import RotatingFileHandler
import threading
-import os.path
-from os import remove
+import os
import sys
import gzip
@@ -509,7 +508,12 @@ def initialize_logging(conf):
for line in sf:
df.write(line)
- remove(source)
+ os.remove(source)
+
+ try:
+ os.makedirs(os.path.dirname(conf.log_file()))
+ except IOError:
+ pass
handler = RotatingFileHandler(
conf.log_file(),
diff --git a/suggestive/mstat.py b/suggestive/mstat.py
index d6d22a7..61ff8d8 100644
--- a/suggestive/mstat.py
+++ b/suggestive/mstat.py
@@ -125,19 +125,22 @@ def database_tracks_from_mpd(conf, tracks_info):
"""
Return the database Track object corresponding to track info from MPD
"""
+ track_filenames = [info['file'] for info in tracks_info]
+
with session_scope(conf, commit=False) as session:
- filenames = [info['file'] for info in tracks_info]
- db_tracks = session.query(Track).\
- options(
- subqueryload(Track.album),
- subqueryload(Track.artist),
- subqueryload(Track.lastfm_info)
- ).\
- filter(Track.filename.in_(filenames)).\
- all()
+ tracks_by_filename = {}
- tracks_by_filename = {t.filename: t for t in db_tracks}
- return [tracks_by_filename[info['file']] for info in tracks_info]
+ for chunk in partition(track_filenames, 128):
+ db_tracks = (session.query(Track).
+ options(subqueryload(Track.album),
+ subqueryload(Track.artist),
+ subqueryload(Track.lastfm_info)).
+ filter(Track.filename.in_(chunk)).
+ all())
+
+ tracks_by_filename.update({t.filename: t for t in db_tracks})
+
+ return [tracks_by_filename[filename] for filename in track_filenames]
def get_scrobbles(conf, limit, offset=None):
@@ -319,11 +322,11 @@ class ScrobbleLoader(object):
if not len(scrobbles):
return 0
- first = next(scrobbles)
+ first = scrobbles[0]
self.load_scrobble(session, first)
track = None # Necessary if there was only one scrobble total
- for track in scrobbles:
+ for track in scrobbles[1:]:
self.load_scrobble(session, track)
last = track or first
@@ -339,8 +342,17 @@ class MpdLoader(object):
Synchronizes the MPD and suggestive databases
"""
- def __init__(self, mpd):
- self.mpd = mpd
+ def __init__(self, conf):
+ self._conf = conf
+ self._mpd = initialize_mpd(conf)
+
+ @property
+ def mpd(self):
+ return self._mpd
+
+ @property
+ def conf(self):
+ return self._conf
def load_track(self, session, db_artist, db_album, info):
"""
@@ -442,6 +454,7 @@ class MpdLoader(object):
logger.debug('Deleted {} empty albums'.format(len(empty)))
+ @mpd_retry
def check_duplicates(self, session):
"""
Check for albums with duplicate tracks
@@ -492,11 +505,19 @@ class MpdLoader(object):
return by_artist_album
+ @mpd_retry
+ def _list_mpd_files(self):
+ return self.mpd.list('file')
+
+ @mpd_retry
+ def _mpd_info(self, path):
+ return self.mpd.listallinfo(path)
+
def load(self, session):
"""
Synchronize MPD and suggestive databases
"""
- files_in_mpd = set(self.mpd.list('file'))
+ files_in_mpd = set(self._list_mpd_files())
files_in_db = set(item.filename for item in session.query(
Track.filename).all())
@@ -509,8 +530,7 @@ class MpdLoader(object):
logger.debug('Missing files:\n {}'.format(
'\n '.join(missing)))
missing_info = list(
- chain.from_iterable(self.mpd.listallinfo(path)
- for path in missing))
+ chain.from_iterable(self._mpd_info(path) for path in missing))
by_artist_album = self.segregate_track_info(missing_info)
self.load_by_artist_album(session, by_artist_album)
@@ -750,8 +770,7 @@ def update_mpd(config):
albums_start = session.query(Album).count()
tracks_start = session.query(Track).count()
- mpdclient = initialize_mpd(config)
- mpd_loader = MpdLoader(mpdclient)
+ mpd_loader = MpdLoader(config)
mpd_loader.load(session)
session.commit()
diff --git a/tox.ini b/tox.ini
index d5c3ff3..410c552 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,10 +5,12 @@ envlist = py33,py34,py35
deps = -r{toxinidir}/test-requirements.txt
commands = py.test -m "not live" --ignore=build --ignore=suggestive/alembic \
- --pep8 --flakes --cov={envsitepackagesdir}/suggestive -rs -v {posargs}
+ --ignore=venv --pep8 --flakes \
+ --cov={envsitepackagesdir}/suggestive -rs -v {posargs}
[testenv:coverage]
deps = -r{toxinidir}/test-requirements.txt
commands = py.test -m "not live" --ignore=build --ignore=suggestive/alembic \
- --cov={envsitepackagesdir}/suggestive --cov-report=html
+ --ignore=venv --cov={envsitepackagesdir}/suggestive \
+ --cov-report=html
| crash on startup
When starting up suggestive it crashes with a KeyError.
```
carnager@caprica ~/suggestive/suggestive > suggestive
Traceback (most recent call last):
File "/usr/bin/suggestive", line 9, in <module>
load_entry_point('suggestive==0.4.0', 'console_scripts', 'suggestive')()
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 586, in main
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 566, in run
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 223, in __init__
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 53, in __init__
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 97, in initialize_buffers
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 114, in create_playlist_buffer
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 471, in __init__
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 90, in __init__
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 220, in update_model
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 191, in track_models
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 165, in playlist_tracks
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/mstat.py", line 140, in database_tracks_from_mpd
File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/mstat.py", line 140, in <listcomp>
KeyError: 'flac/Motorpsycho/1993 Demon Box/CD 1/01-Waiting for the One.flac'
``` | thesquelched/suggestive | diff --git a/tests/test_mstat.py b/tests/test_mstat.py
new file mode 100644
index 0000000..77a90ea
--- /dev/null
+++ b/tests/test_mstat.py
@@ -0,0 +1,45 @@
+from unittest.mock import patch, MagicMock
+from suggestive import mstat
+
+
+class TestMpdLoader:
+
+ @patch('suggestive.mstat.initialize_mpd')
+ def test_check_duplicates(self, init_mpd):
+ init_mpd.side_effect = [
+ MagicMock(find=MagicMock(side_effect=OSError)),
+ MagicMock(),
+ ]
+
+ session = MagicMock()
+ (session.query.return_value.join.return_value.group_by.return_value
+ .having.return_value.all.return_value) = [MagicMock()]
+
+ loader = mstat.MpdLoader(None)
+ loader.check_duplicates(session)
+
+ assert init_mpd.call_count == 2
+
+ @patch('suggestive.mstat.initialize_mpd')
+ def test_list_mpd_files(self, init_mpd):
+ init_mpd.side_effect = [
+ MagicMock(list=MagicMock(side_effect=OSError)),
+ MagicMock(),
+ ]
+
+ loader = mstat.MpdLoader(None)
+ loader._list_mpd_files()
+
+ assert init_mpd.call_count == 2
+
+ @patch('suggestive.mstat.initialize_mpd')
+ def test_mpd_info(self, init_mpd):
+ init_mpd.side_effect = [
+ MagicMock(listallinfo=MagicMock(side_effect=OSError)),
+ MagicMock(),
+ ]
+
+ loader = mstat.MpdLoader(None)
+ loader._mpd_info(None)
+
+ assert init_mpd.call_count == 2
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 6
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"tox",
"pytest>=2.6.4",
"pytest-flakes>=0.2",
"pytest-pep8>=1.0.6",
"pytest-cov>=1.8.1",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
figgis==1.8.1
filelock==3.18.0
greenlet==3.1.1
idna==3.10
iniconfig==2.1.0
iso3166==2.1.1
Mako==1.3.9
MarkupSafe==3.0.2
packaging==24.2
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pyflakes==3.3.1
pylastfm==0.2.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-cache==1.0
pytest-cov==6.0.0
pytest-flakes==4.0.5
pytest-pep8==1.0.6
python-dateutil==2.9.0.post0
python-mpd2==3.1.1
requests==2.32.3
six==1.17.0
SQLAlchemy==2.0.40
-e git+https://github.com/thesquelched/suggestive.git@b5c3a2f0a1734d8948778fbb0b252d3ea5059def#egg=suggestive
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
urwid==2.6.16
virtualenv==20.29.3
wcwidth==0.2.13
| name: suggestive
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- figgis==1.8.1
- filelock==3.18.0
- greenlet==3.1.1
- idna==3.10
- iniconfig==2.1.0
- iso3166==2.1.1
- mako==1.3.9
- markupsafe==3.0.2
- packaging==24.2
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pyflakes==3.3.1
- pylastfm==0.2.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cache==1.0
- pytest-cov==6.0.0
- pytest-flakes==4.0.5
- pytest-pep8==1.0.6
- python-dateutil==2.9.0.post0
- python-mpd2==3.1.1
- requests==2.32.3
- six==1.17.0
- sqlalchemy==2.0.40
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- urwid==2.6.16
- virtualenv==20.29.3
- wcwidth==0.2.13
prefix: /opt/conda/envs/suggestive
| [
"tests/test_mstat.py::TestMpdLoader::test_check_duplicates",
"tests/test_mstat.py::TestMpdLoader::test_list_mpd_files",
"tests/test_mstat.py::TestMpdLoader::test_mpd_info"
]
| []
| []
| []
| BSD 2-Clause "Simplified" License | 494 | [
"suggestive/mstat.py",
"CHANGELOG.md",
"README.md",
"suggestive/_version.py",
"tox.ini",
"suggestive/app.py"
]
| [
"suggestive/mstat.py",
"CHANGELOG.md",
"README.md",
"suggestive/_version.py",
"tox.ini",
"suggestive/app.py"
]
|
|
enthought__okonomiyaki-182 | 38b9e3ecc18d2041f43a7681d05ed860b76b8d01 | 2016-04-07 13:30:38 | ced8e9ed8db05996bc8d296c5203a942b15804ef | diff --git a/okonomiyaki/runtimes/runtime_metadata.py b/okonomiyaki/runtimes/runtime_metadata.py
index ed9d415..d103630 100644
--- a/okonomiyaki/runtimes/runtime_metadata.py
+++ b/okonomiyaki/runtimes/runtime_metadata.py
@@ -209,7 +209,7 @@ def runtime_metadata_factory(path_or_file):
key = _factory_key_from_metadata(json_dict)
klass = _METADATA_KLASS_FACTORY.get(key)
if klass is None:
- msg = "No support for language '{1}' ('{0!r}')".format(*key)
+ msg = "No support for language '{1}' (metadata version '{0}')".format(*key)
raise UnsupportedMetadata(key[0], msg)
else:
return klass._from_path(path_or_file)
| MetadataVersion object should not be repr()'d in exception message
```python
'cpython-2.7.9+1-rh5_x86_64-gnu.runtime': No support for language 'whitespace' ('MetadataVersion(1, 0)')
```
https://github.com/enthought/okonomiyaki/blob/master/okonomiyaki/runtimes/runtime_metadata.py#L212 | enthought/okonomiyaki | diff --git a/okonomiyaki/runtimes/tests/test_runtime_metadata.py b/okonomiyaki/runtimes/tests/test_runtime_metadata.py
index 1fc8f65..1a3c660 100644
--- a/okonomiyaki/runtimes/tests/test_runtime_metadata.py
+++ b/okonomiyaki/runtimes/tests/test_runtime_metadata.py
@@ -203,7 +203,7 @@ class TestRuntimeMetadataFactory(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
UnsupportedMetadata,
- r"^No support for language 'r' \('MetadataVersion\(1, 0\)'\)"):
+ r"^No support for language 'r' \(metadata version '1.0'\)"):
runtime_metadata_factory(path)
# Given
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
coverage==7.8.0
docutils==0.21.2
enum34==1.1.10
exceptiongroup==1.2.2
flake8==7.2.0
haas==0.9.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==1.0.1
-e git+https://github.com/enthought/okonomiyaki.git@38b9e3ecc18d2041f43a7681d05ed860b76b8d01#egg=okonomiyaki
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
referencing==0.36.2
rpds-py==0.24.0
six==1.17.0
statistics==1.0.3.5
stevedore==1.9.0
testfixtures==8.3.0
tomli==2.2.1
typing_extensions==4.13.0
zipfile2==0.0.12
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==25.3.0
- coverage==7.8.0
- docutils==0.21.2
- enum34==1.1.10
- exceptiongroup==1.2.2
- flake8==7.2.0
- haas==0.9.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==1.0.1
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- referencing==0.36.2
- rpds-py==0.24.0
- six==1.17.0
- statistics==1.0.3.5
- stevedore==1.9.0
- testfixtures==8.3.0
- tomli==2.2.1
- typing-extensions==4.13.0
- zipfile2==0.0.12
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestRuntimeMetadataFactory::test_invalid"
]
| []
| [
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_invalid",
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_simple",
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_simple_pypy",
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestJuliaRuntimeMetadataV1::test_simple",
"okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestRuntimeMetadataFactory::test_simple"
]
| []
| BSD License | 495 | [
"okonomiyaki/runtimes/runtime_metadata.py"
]
| [
"okonomiyaki/runtimes/runtime_metadata.py"
]
|
|
enthought__okonomiyaki-183 | 38b9e3ecc18d2041f43a7681d05ed860b76b8d01 | 2016-04-07 13:45:02 | ced8e9ed8db05996bc8d296c5203a942b15804ef | diff --git a/okonomiyaki/errors.py b/okonomiyaki/errors.py
index be3e7d6..fd003c4 100644
--- a/okonomiyaki/errors.py
+++ b/okonomiyaki/errors.py
@@ -47,8 +47,11 @@ class InvalidMetadataField(InvalidMetadata):
def __init__(self, name, value, *a, **kw):
self.name = name
self.value = value
- message = 'Metadata field is invalid ({0} = {1!r})'.format(
- name, value)
+ if value is InvalidMetadataField.undefined:
+ message = "Missing metadata field {0!r}".format(self.name)
+ else:
+ message = 'Invalid value for metadata field {0!r}: {1!r}'.format(
+ name, value)
super(InvalidMetadataField, self).__init__(message, *a, **kw)
diff --git a/okonomiyaki/file_formats/_egg_info.py b/okonomiyaki/file_formats/_egg_info.py
index 332168f..49fae61 100644
--- a/okonomiyaki/file_formats/_egg_info.py
+++ b/okonomiyaki/file_formats/_egg_info.py
@@ -347,8 +347,7 @@ def _guess_python_tag(pyver):
else:
m = _PYVER_RE.search(pyver)
if m is None:
- msg = "python_tag cannot be guessed for python = {0}"
- raise InvalidMetadata(msg.format(pyver))
+ raise InvalidMetadataField('python', pyver)
else:
major = m.groupdict()["major"]
minor = m.groupdict()["minor"]
| Wrong exception raised for invalid python attribute in egg metadata
The exception raised should be `InvalidMetadataField`, as used elsewhere in the module.
```
Traceback (most recent call last):
File "okonomiyaki/file_formats/_egg_info.py", line 733, in _from_egg
spec_depend = LegacySpecDepend._from_egg(path_or_file, sha256)
File "okonomiyaki/file_formats/_egg_info.py", line 463, in _from_egg
return _create_spec_depend(path_or_file)
File "okonomiyaki/file_formats/_egg_info.py", line 455, in _create_spec_depend
spec_depend_string, epd_platform, sha256
File "okonomiyaki/file_formats/_egg_info.py", line 647, in _normalized_info_from_string
raw_data[_TAG_PYTHON]
File "okonomiyaki/file_formats/_egg_info.py", line 327, in _guess_python_tag
raise InvalidMetadata(msg.format(pyver))
okonomiyaki.errors.InvalidMetadata: python_tag cannot be guessed for python = red
``` | enthought/okonomiyaki | diff --git a/okonomiyaki/file_formats/tests/test__egg_info.py b/okonomiyaki/file_formats/tests/test__egg_info.py
index 6765b8f..7760c64 100644
--- a/okonomiyaki/file_formats/tests/test__egg_info.py
+++ b/okonomiyaki/file_formats/tests/test__egg_info.py
@@ -301,10 +301,14 @@ packages = [
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadata,
- r'^python_tag cannot be guessed'):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python': 'a.7'"
+ ) as exc:
LegacySpecDepend.from_string(s)
+ self.assertEqual(exc.exception.name, "python")
+ self.assertEqual(exc.exception.value, "a.7")
+
def test_blacklisted_platform(self):
# Given
egg = XZ_5_2_0_EGG
@@ -776,8 +780,8 @@ class TestParseRawspec(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r'^Metadata field is invalid \(name = <undefined>\)$'):
+ InvalidMetadataField, r"^Missing metadata field 'name'"
+ ):
parse_rawspec(spec_string)
def test_simple_1_2(self):
@@ -911,8 +915,9 @@ packages = [
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r'^Metadata field is invalid \(metadata_version = None\)$'):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'metadata_version': None"
+ ):
parse_rawspec(spec_s)
# Given a spec_string without some other metadata in >= 1.1
@@ -933,8 +938,8 @@ packages = [
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r'^Metadata field is invalid \(platform = <undefined>\)$'):
+ InvalidMetadataField, r"^Missing metadata field 'platform'"
+ ):
parse_rawspec(spec_s)
# Given a spec_string without some other metadata in >= 1.2
@@ -956,8 +961,8 @@ packages = [
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r'^Metadata field is invalid \(python_tag = <undefined>\)$'):
+ InvalidMetadataField, r"^Missing metadata field 'python_tag'"
+ ):
parse_rawspec(spec_s)
diff --git a/okonomiyaki/platforms/tests/test_python_implementation.py b/okonomiyaki/platforms/tests/test_python_implementation.py
index 8ccddd9..92d6cf9 100644
--- a/okonomiyaki/platforms/tests/test_python_implementation.py
+++ b/okonomiyaki/platforms/tests/test_python_implementation.py
@@ -50,8 +50,9 @@ class TestPythonImplementation(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r"^Metadata field is invalid \(python_tag = 'cp'\)$"):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python_tag': 'cp'"
+ ):
PythonImplementation.from_string(s)
# Given
@@ -59,8 +60,9 @@ class TestPythonImplementation(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r"^Metadata field is invalid \(python_tag = 'py2'\)$"):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python_tag': 'py2'$"
+ ):
PythonImplementation.from_string(s)
# Given
@@ -68,8 +70,9 @@ class TestPythonImplementation(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r"^Metadata field is invalid \(python_tag = 'py234'\)$"):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python_tag': 'py234'$"
+ ):
PythonImplementation.from_string(s)
def test_simple(self):
@@ -125,8 +128,9 @@ class TestPythonImplementation(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r"^Metadata field is invalid \(python_tag = 'py3'\)$"):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python_tag': 'py3'$"
+ ):
PythonImplementation.from_string(tag_string)
# Given
@@ -134,6 +138,7 @@ class TestPythonImplementation(unittest.TestCase):
# When/Then
with self.assertRaisesRegexp(
- InvalidMetadataField,
- r"^Metadata field is invalid \(python_tag = 'py345'\)$"):
+ InvalidMetadataField,
+ r"^Invalid value for metadata field 'python_tag': 'py345'$"
+ ):
PythonImplementation.from_string(tag_string)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
coverage==7.8.0
docutils==0.21.2
enum34==1.1.10
exceptiongroup==1.2.2
flake8==7.2.0
haas==0.9.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==1.0.1
-e git+https://github.com/enthought/okonomiyaki.git@38b9e3ecc18d2041f43a7681d05ed860b76b8d01#egg=okonomiyaki
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
referencing==0.36.2
rpds-py==0.24.0
six==1.17.0
statistics==1.0.3.5
stevedore==1.9.0
testfixtures==8.3.0
tomli==2.2.1
typing_extensions==4.13.0
zipfile2==0.0.12
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==25.3.0
- coverage==7.8.0
- docutils==0.21.2
- enum34==1.1.10
- exceptiongroup==1.2.2
- flake8==7.2.0
- haas==0.9.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==1.0.1
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- referencing==0.36.2
- rpds-py==0.24.0
- six==1.17.0
- statistics==1.0.3.5
- stevedore==1.9.0
- testfixtures==8.3.0
- tomli==2.2.1
- typing-extensions==4.13.0
- zipfile2==0.0.12
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_error_python_to_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_invalid_spec_strings",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_unsupported",
"okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_errors",
"okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_from_string"
]
| []
| [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_spec_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_str",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_3",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_4",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_missing_spec_depend",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_unsupported_metadata_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_windows_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_extension_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg_pypi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_all_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_python_implementation",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_27",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_34",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_35",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name_invalid",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_dependencies",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_blacklisted",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_fixed_requirement",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_from_cross_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_from_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_no_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_platform_abi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_platform_abi_no_python",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_simple_non_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_strictness",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_support_higher_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_support_lower_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_to_spec_string",
"okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_abbreviations",
"okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_from_running_python",
"okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_simple"
]
| []
| BSD License | 496 | [
"okonomiyaki/errors.py",
"okonomiyaki/file_formats/_egg_info.py"
]
| [
"okonomiyaki/errors.py",
"okonomiyaki/file_formats/_egg_info.py"
]
|
|
dpkp__kafka-python-640 | 810f08b7996a15e65cdd8af6c1a7167c28f94646 | 2016-04-09 00:02:35 | 810f08b7996a15e65cdd8af6c1a7167c28f94646 | diff --git a/kafka/client_async.py b/kafka/client_async.py
index 0c22f90..36e808c 100644
--- a/kafka/client_async.py
+++ b/kafka/client_async.py
@@ -6,7 +6,14 @@ import heapq
import itertools
import logging
import random
-import select
+
+# selectors in stdlib as of py3.4
+try:
+ import selectors # pylint: disable=import-error
+except ImportError:
+ # vendored backport module
+ from . import selectors34 as selectors
+
import socket
import time
@@ -92,6 +99,7 @@ class KafkaClient(object):
self.cluster = ClusterMetadata(**self.config)
self._topics = set() # empty set will fetch all topic metadata
self._metadata_refresh_in_progress = False
+ self._selector = selectors.DefaultSelector()
self._conns = {}
self._connecting = set()
self._refresh_on_disconnects = True
@@ -101,6 +109,7 @@ class KafkaClient(object):
self._bootstrap(collect_hosts(self.config['bootstrap_servers']))
self._wake_r, self._wake_w = socket.socketpair()
self._wake_r.setblocking(False)
+ self._selector.register(self._wake_r, selectors.EVENT_READ)
def __del__(self):
self._wake_r.close()
@@ -160,11 +169,19 @@ class KafkaClient(object):
def _conn_state_change(self, node_id, conn):
if conn.connecting():
self._connecting.add(node_id)
+ self._selector.register(conn._sock, selectors.EVENT_WRITE)
elif conn.connected():
log.debug("Node %s connected", node_id)
if node_id in self._connecting:
self._connecting.remove(node_id)
+
+ try:
+ self._selector.unregister(conn._sock)
+ except KeyError:
+ pass
+ self._selector.register(conn._sock, selectors.EVENT_READ, conn)
+
if 'bootstrap' in self._conns and node_id != 'bootstrap':
bootstrap = self._conns.pop('bootstrap')
# XXX: make conn.close() require error to cause refresh
@@ -176,6 +193,10 @@ class KafkaClient(object):
elif conn.state is ConnectionStates.DISCONNECTING:
if node_id in self._connecting:
self._connecting.remove(node_id)
+ try:
+ self._selector.unregister(conn._sock)
+ except KeyError:
+ pass
if self._refresh_on_disconnects:
log.warning("Node %s connect failed -- refreshing metadata", node_id)
self.cluster.request_update()
@@ -388,45 +409,25 @@ class KafkaClient(object):
return responses
- def _poll(self, timeout, sleep=False):
+ def _poll(self, timeout, sleep=True):
# select on reads across all connected sockets, blocking up to timeout
- sockets = dict([(conn._sock, conn)
- for conn in six.itervalues(self._conns)
- if conn.state is ConnectionStates.CONNECTED
- and conn.in_flight_requests])
- if not sockets:
- # if sockets are connecting, we can wake when they are writeable
- if self._connecting:
- sockets = [self._conns[node]._sock for node in self._connecting]
- select.select([self._wake_r], sockets, [], timeout)
- elif timeout:
- if sleep:
- log.debug('Sleeping at %s for %s', time.time(), timeout)
- select.select([self._wake_r], [], [], timeout)
- log.debug('Woke up at %s', time.time())
- else:
- log.warning('_poll called with a non-zero timeout and'
- ' sleep=False -- but there was nothing to do.'
- ' This can cause high CPU usage during idle.')
- self._clear_wake_fd()
- return []
-
- # Add a private pipe fd to allow external wakeups
- fds = list(sockets.keys())
- fds.append(self._wake_r)
- ready, _, _ = select.select(fds, [], [], timeout)
-
+ assert self.in_flight_request_count() > 0 or self._connecting or sleep
responses = []
- for sock in ready:
- if sock == self._wake_r:
+ for key, events in self._selector.select(timeout):
+ if key.fileobj is self._wake_r:
+ self._clear_wake_fd()
+ continue
+ elif not (events & selectors.EVENT_READ):
continue
- conn = sockets[sock]
+ conn = key.data
while conn.in_flight_requests:
response = conn.recv() # Note: conn.recv runs callbacks / errbacks
+
+ # Incomplete responses are buffered internally
+ # while conn.in_flight_requests retains the request
if not response:
break
responses.append(response)
- self._clear_wake_fd()
return responses
def in_flight_request_count(self, node_id=None):
diff --git a/kafka/selectors34.py b/kafka/selectors34.py
new file mode 100644
index 0000000..541c29c
--- /dev/null
+++ b/kafka/selectors34.py
@@ -0,0 +1,635 @@
+# pylint: skip-file
+# vendored from https://github.com/berkerpeksag/selectors34
+# at commit 5195dd2cbe598047ad0a2e446a829546f6ffc9eb (v1.1)
+#
+# Original author: Charles-Francois Natali (c.f.natali[at]gmail.com)
+# Maintainer: Berker Peksag (berker.peksag[at]gmail.com)
+# Also see https://pypi.python.org/pypi/selectors34
+"""Selectors module.
+
+This module allows high-level and efficient I/O multiplexing, built upon the
+`select` module primitives.
+
+The following code adapted from trollius.selectors.
+"""
+
+
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple, Mapping
+from errno import EINTR
+import math
+import select
+import sys
+
+import six
+
+
+def _wrap_error(exc, mapping, key):
+ if key not in mapping:
+ return
+ new_err_cls = mapping[key]
+ new_err = new_err_cls(*exc.args)
+
+ # raise a new exception with the original traceback
+ if hasattr(exc, '__traceback__'):
+ traceback = exc.__traceback__
+ else:
+ traceback = sys.exc_info()[2]
+ six.reraise(new_err_cls, new_err, traceback)
+
+
+# generic events, that must be mapped to implementation-specific ones
+EVENT_READ = (1 << 0)
+EVENT_WRITE = (1 << 1)
+
+
+def _fileobj_to_fd(fileobj):
+ """Return a file descriptor from a file object.
+
+ Parameters:
+ fileobj -- file object or file descriptor
+
+ Returns:
+ corresponding file descriptor
+
+ Raises:
+ ValueError if the object is invalid
+ """
+ if isinstance(fileobj, six.integer_types):
+ fd = fileobj
+ else:
+ try:
+ fd = int(fileobj.fileno())
+ except (AttributeError, TypeError, ValueError):
+ raise ValueError("Invalid file object: "
+ "{0!r}".format(fileobj))
+ if fd < 0:
+ raise ValueError("Invalid file descriptor: {0}".format(fd))
+ return fd
+
+
+SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
+"""Object used to associate a file object to its backing file descriptor,
+selected event mask and attached data."""
+
+
+class _SelectorMapping(Mapping):
+ """Mapping of file objects to selector keys."""
+
+ def __init__(self, selector):
+ self._selector = selector
+
+ def __len__(self):
+ return len(self._selector._fd_to_key)
+
+ def __getitem__(self, fileobj):
+ try:
+ fd = self._selector._fileobj_lookup(fileobj)
+ return self._selector._fd_to_key[fd]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ def __iter__(self):
+ return iter(self._selector._fd_to_key)
+
+
+class BaseSelector(six.with_metaclass(ABCMeta)):
+ """Selector abstract base class.
+
+ A selector supports registering file objects to be monitored for specific
+ I/O events.
+
+ A file object is a file descriptor or any object with a `fileno()` method.
+ An arbitrary object can be attached to the file object, which can be used
+ for example to store context information, a callback, etc.
+
+ A selector can use various implementations (select(), poll(), epoll()...)
+ depending on the platform. The default `Selector` class uses the most
+ efficient implementation on the current platform.
+ """
+
+ @abstractmethod
+ def register(self, fileobj, events, data=None):
+ """Register a file object.
+
+ Parameters:
+ fileobj -- file object or file descriptor
+ events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
+ data -- attached data
+
+ Returns:
+ SelectorKey instance
+
+ Raises:
+ ValueError if events is invalid
+ KeyError if fileobj is already registered
+ OSError if fileobj is closed or otherwise is unacceptable to
+ the underlying system call (if a system call is made)
+
+ Note:
+ OSError may or may not be raised
+ """
+ raise NotImplementedError
+
+ @abstractmethod
+ def unregister(self, fileobj):
+ """Unregister a file object.
+
+ Parameters:
+ fileobj -- file object or file descriptor
+
+ Returns:
+ SelectorKey instance
+
+ Raises:
+ KeyError if fileobj is not registered
+
+ Note:
+ If fileobj is registered but has since been closed this does
+ *not* raise OSError (even if the wrapped syscall does)
+ """
+ raise NotImplementedError
+
+ def modify(self, fileobj, events, data=None):
+ """Change a registered file object monitored events or attached data.
+
+ Parameters:
+ fileobj -- file object or file descriptor
+ events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
+ data -- attached data
+
+ Returns:
+ SelectorKey instance
+
+ Raises:
+ Anything that unregister() or register() raises
+ """
+ self.unregister(fileobj)
+ return self.register(fileobj, events, data)
+
+ @abstractmethod
+ def select(self, timeout=None):
+ """Perform the actual selection, until some monitored file objects are
+ ready or a timeout expires.
+
+ Parameters:
+ timeout -- if timeout > 0, this specifies the maximum wait time, in
+ seconds
+ if timeout <= 0, the select() call won't block, and will
+ report the currently ready file objects
+ if timeout is None, select() will block until a monitored
+ file object becomes ready
+
+ Returns:
+ list of (key, events) for ready file objects
+ `events` is a bitwise mask of EVENT_READ|EVENT_WRITE
+ """
+ raise NotImplementedError
+
+ def close(self):
+ """Close the selector.
+
+ This must be called to make sure that any underlying resource is freed.
+ """
+ pass
+
+ def get_key(self, fileobj):
+ """Return the key associated to a registered file object.
+
+ Returns:
+ SelectorKey for this file object
+ """
+ mapping = self.get_map()
+ if mapping is None:
+ raise RuntimeError('Selector is closed')
+ try:
+ return mapping[fileobj]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ @abstractmethod
+ def get_map(self):
+ """Return a mapping of file objects to selector keys."""
+ raise NotImplementedError
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+
+class _BaseSelectorImpl(BaseSelector):
+ """Base selector implementation."""
+
+ def __init__(self):
+ # this maps file descriptors to keys
+ self._fd_to_key = {}
+ # read-only mapping returned by get_map()
+ self._map = _SelectorMapping(self)
+
+ def _fileobj_lookup(self, fileobj):
+ """Return a file descriptor from a file object.
+
+ This wraps _fileobj_to_fd() to do an exhaustive search in case
+ the object is invalid but we still have it in our map. This
+ is used by unregister() so we can unregister an object that
+ was previously registered even if it is closed. It is also
+ used by _SelectorMapping.
+ """
+ try:
+ return _fileobj_to_fd(fileobj)
+ except ValueError:
+ # Do an exhaustive search.
+ for key in self._fd_to_key.values():
+ if key.fileobj is fileobj:
+ return key.fd
+ # Raise ValueError after all.
+ raise
+
+ def register(self, fileobj, events, data=None):
+ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
+ raise ValueError("Invalid events: {0!r}".format(events))
+
+ key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
+
+ if key.fd in self._fd_to_key:
+ raise KeyError("{0!r} (FD {1}) is already registered"
+ .format(fileobj, key.fd))
+
+ self._fd_to_key[key.fd] = key
+ return key
+
+ def unregister(self, fileobj):
+ try:
+ key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+ return key
+
+ def modify(self, fileobj, events, data=None):
+ # TODO: Subclasses can probably optimize this even further.
+ try:
+ key = self._fd_to_key[self._fileobj_lookup(fileobj)]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+ if events != key.events:
+ self.unregister(fileobj)
+ key = self.register(fileobj, events, data)
+ elif data != key.data:
+ # Use a shortcut to update the data.
+ key = key._replace(data=data)
+ self._fd_to_key[key.fd] = key
+ return key
+
+ def close(self):
+ self._fd_to_key.clear()
+ self._map = None
+
+ def get_map(self):
+ return self._map
+
+ def _key_from_fd(self, fd):
+ """Return the key associated to a given file descriptor.
+
+ Parameters:
+ fd -- file descriptor
+
+ Returns:
+ corresponding key, or None if not found
+ """
+ try:
+ return self._fd_to_key[fd]
+ except KeyError:
+ return None
+
+
+class SelectSelector(_BaseSelectorImpl):
+ """Select-based selector."""
+
+ def __init__(self):
+ super(SelectSelector, self).__init__()
+ self._readers = set()
+ self._writers = set()
+
+ def register(self, fileobj, events, data=None):
+ key = super(SelectSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ self._readers.add(key.fd)
+ if events & EVENT_WRITE:
+ self._writers.add(key.fd)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(SelectSelector, self).unregister(fileobj)
+ self._readers.discard(key.fd)
+ self._writers.discard(key.fd)
+ return key
+
+ if sys.platform == 'win32':
+ def _select(self, r, w, _, timeout=None):
+ r, w, x = select.select(r, w, w, timeout)
+ return r, w + x, []
+ else:
+ _select = select.select
+
+ def select(self, timeout=None):
+ timeout = None if timeout is None else max(timeout, 0)
+ ready = []
+ try:
+ r, w, _ = self._select(self._readers, self._writers, [], timeout)
+ except select.error as exc:
+ if exc.args[0] == EINTR:
+ return ready
+ else:
+ raise
+ r = set(r)
+ w = set(w)
+ for fd in r | w:
+ events = 0
+ if fd in r:
+ events |= EVENT_READ
+ if fd in w:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+
+if hasattr(select, 'poll'):
+
+ class PollSelector(_BaseSelectorImpl):
+ """Poll-based selector."""
+
+ def __init__(self):
+ super(PollSelector, self).__init__()
+ self._poll = select.poll()
+
+ def register(self, fileobj, events, data=None):
+ key = super(PollSelector, self).register(fileobj, events, data)
+ poll_events = 0
+ if events & EVENT_READ:
+ poll_events |= select.POLLIN
+ if events & EVENT_WRITE:
+ poll_events |= select.POLLOUT
+ self._poll.register(key.fd, poll_events)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(PollSelector, self).unregister(fileobj)
+ self._poll.unregister(key.fd)
+ return key
+
+ def select(self, timeout=None):
+ if timeout is None:
+ timeout = None
+ elif timeout <= 0:
+ timeout = 0
+ else:
+ # poll() has a resolution of 1 millisecond, round away from
+ # zero to wait *at least* timeout seconds.
+ timeout = int(math.ceil(timeout * 1e3))
+ ready = []
+ try:
+ fd_event_list = self._poll.poll(timeout)
+ except select.error as exc:
+ if exc.args[0] == EINTR:
+ return ready
+ else:
+ raise
+ for fd, event in fd_event_list:
+ events = 0
+ if event & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+
+if hasattr(select, 'epoll'):
+
+ class EpollSelector(_BaseSelectorImpl):
+ """Epoll-based selector."""
+
+ def __init__(self):
+ super(EpollSelector, self).__init__()
+ self._epoll = select.epoll()
+
+ def fileno(self):
+ return self._epoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(EpollSelector, self).register(fileobj, events, data)
+ epoll_events = 0
+ if events & EVENT_READ:
+ epoll_events |= select.EPOLLIN
+ if events & EVENT_WRITE:
+ epoll_events |= select.EPOLLOUT
+ self._epoll.register(key.fd, epoll_events)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(EpollSelector, self).unregister(fileobj)
+ try:
+ self._epoll.unregister(key.fd)
+ except IOError:
+ # This can happen if the FD was closed since it
+ # was registered.
+ pass
+ return key
+
+ def select(self, timeout=None):
+ if timeout is None:
+ timeout = -1
+ elif timeout <= 0:
+ timeout = 0
+ else:
+ # epoll_wait() has a resolution of 1 millisecond, round away
+ # from zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1e3) * 1e-3
+
+ # epoll_wait() expects `maxevents` to be greater than zero;
+ # we want to make sure that `select()` can be called when no
+ # FD is registered.
+ max_ev = max(len(self._fd_to_key), 1)
+
+ ready = []
+ try:
+ fd_event_list = self._epoll.poll(timeout, max_ev)
+ except IOError as exc:
+ if exc.errno == EINTR:
+ return ready
+ else:
+ raise
+ for fd, event in fd_event_list:
+ events = 0
+ if event & ~select.EPOLLIN:
+ events |= EVENT_WRITE
+ if event & ~select.EPOLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def close(self):
+ self._epoll.close()
+ super(EpollSelector, self).close()
+
+
+if hasattr(select, 'devpoll'):
+
+ class DevpollSelector(_BaseSelectorImpl):
+ """Solaris /dev/poll selector."""
+
+ def __init__(self):
+ super(DevpollSelector, self).__init__()
+ self._devpoll = select.devpoll()
+
+ def fileno(self):
+ return self._devpoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(DevpollSelector, self).register(fileobj, events, data)
+ poll_events = 0
+ if events & EVENT_READ:
+ poll_events |= select.POLLIN
+ if events & EVENT_WRITE:
+ poll_events |= select.POLLOUT
+ self._devpoll.register(key.fd, poll_events)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(DevpollSelector, self).unregister(fileobj)
+ self._devpoll.unregister(key.fd)
+ return key
+
+ def select(self, timeout=None):
+ if timeout is None:
+ timeout = None
+ elif timeout <= 0:
+ timeout = 0
+ else:
+ # devpoll() has a resolution of 1 millisecond, round away from
+ # zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1e3)
+ ready = []
+ try:
+ fd_event_list = self._devpoll.poll(timeout)
+ except OSError as exc:
+ if exc.errno == EINTR:
+ return ready
+ else:
+ raise
+ for fd, event in fd_event_list:
+ events = 0
+ if event & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def close(self):
+ self._devpoll.close()
+ super(DevpollSelector, self).close()
+
+
+if hasattr(select, 'kqueue'):
+
+ class KqueueSelector(_BaseSelectorImpl):
+ """Kqueue-based selector."""
+
+ def __init__(self):
+ super(KqueueSelector, self).__init__()
+ self._kqueue = select.kqueue()
+
+ def fileno(self):
+ return self._kqueue.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(KqueueSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ kev = select.kevent(key.fd, select.KQ_FILTER_READ,
+ select.KQ_EV_ADD)
+ self._kqueue.control([kev], 0, 0)
+ if events & EVENT_WRITE:
+ kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
+ select.KQ_EV_ADD)
+ self._kqueue.control([kev], 0, 0)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(KqueueSelector, self).unregister(fileobj)
+ if key.events & EVENT_READ:
+ kev = select.kevent(key.fd, select.KQ_FILTER_READ,
+ select.KQ_EV_DELETE)
+ try:
+ self._kqueue.control([kev], 0, 0)
+ except OSError:
+ # This can happen if the FD was closed since it
+ # was registered.
+ pass
+ if key.events & EVENT_WRITE:
+ kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
+ select.KQ_EV_DELETE)
+ try:
+ self._kqueue.control([kev], 0, 0)
+ except OSError:
+ # See comment above.
+ pass
+ return key
+
+ def select(self, timeout=None):
+ timeout = None if timeout is None else max(timeout, 0)
+ max_ev = len(self._fd_to_key)
+ ready = []
+ try:
+ kev_list = self._kqueue.control(None, max_ev, timeout)
+ except OSError as exc:
+ if exc.errno == EINTR:
+ return ready
+ else:
+ raise
+ for kev in kev_list:
+ fd = kev.ident
+ flag = kev.filter
+ events = 0
+ if flag == select.KQ_FILTER_READ:
+ events |= EVENT_READ
+ if flag == select.KQ_FILTER_WRITE:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def close(self):
+ self._kqueue.close()
+ super(KqueueSelector, self).close()
+
+
+# Choose the best implementation, roughly:
+# epoll|kqueue|devpoll > poll > select.
+# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
+if 'KqueueSelector' in globals():
+ DefaultSelector = KqueueSelector
+elif 'EpollSelector' in globals():
+ DefaultSelector = EpollSelector
+elif 'DevpollSelector' in globals():
+ DefaultSelector = DevpollSelector
+elif 'PollSelector' in globals():
+ DefaultSelector = PollSelector
+else:
+ DefaultSelector = SelectSelector
| KafkaProducer fails when many sockets are opened
When using KafkaProducer in an environment with over 1024 open sockets we get the following issue:
```python
File "/srv/.../lib/python2.7/site-packages/kafka/producer/kafka.py", line 248, in __init__
self.config['api_version'] = client.check_version()
File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 639, in check_version
self.poll(future=f)
File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 367, in poll
responses.extend(self._poll(timeout, sleep=sleep))
File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 402, in _poll
ready, _, _ = select.select(fds, [], [], timeout)
ValueError: filedescriptor out of range in select()
```
AFAIK this is because the select() call is limited to FD_SETSIZE being 1024 on Linux. | dpkp/kafka-python | diff --git a/test/test_client_async.py b/test/test_client_async.py
index ad76aad..922e43c 100644
--- a/test/test_client_async.py
+++ b/test/test_client_async.py
@@ -1,3 +1,10 @@
+# selectors in stdlib as of py3.4
+try:
+ import selectors # pylint: disable=import-error
+except ImportError:
+ # vendored backport module
+ import kafka.selectors34 as selectors
+
import socket
import time
@@ -99,15 +106,19 @@ def test_maybe_connect(conn):
def test_conn_state_change(mocker, conn):
cli = KafkaClient()
+ sel = mocker.patch.object(cli, '_selector')
node_id = 0
conn.state = ConnectionStates.CONNECTING
cli._conn_state_change(node_id, conn)
assert node_id in cli._connecting
+ sel.register.assert_called_with(conn._sock, selectors.EVENT_WRITE)
conn.state = ConnectionStates.CONNECTED
cli._conn_state_change(node_id, conn)
assert node_id not in cli._connecting
+ sel.unregister.assert_called_with(conn._sock)
+ sel.register.assert_called_with(conn._sock, selectors.EVENT_READ, conn)
# Failure to connect should trigger metadata update
assert cli.cluster._need_update is False
@@ -115,6 +126,7 @@ def test_conn_state_change(mocker, conn):
cli._conn_state_change(node_id, conn)
assert node_id not in cli._connecting
assert cli.cluster._need_update is True
+ sel.unregister.assert_called_with(conn._sock)
conn.state = ConnectionStates.CONNECTING
cli._conn_state_change(node_id, conn)
@@ -167,8 +179,9 @@ def test_is_ready(mocker, conn):
assert not cli.is_ready(0)
-def test_close(conn):
+def test_close(mocker, conn):
cli = KafkaClient()
+ mocker.patch.object(cli, '_selector')
# Unknown node - silent
cli.close(2)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "six",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-pylint",
"pytest-sugar",
"pytest-mock",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
coverage==6.2
dill==0.3.4
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==5.10.1
-e git+https://github.com/dpkp/kafka-python.git@810f08b7996a15e65cdd8af6c1a7167c28f94646#egg=kafka_python
lazy-object-proxy==1.7.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-pylint==0.18.0
pytest-sugar==0.9.6
six @ file:///tmp/build/80754af9/six_1644875935023/work
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- coverage==6.2
- dill==0.3.4
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-pylint==0.18.0
- pytest-sugar==0.9.6
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_client_async.py::test_conn_state_change",
"test/test_client_async.py::test_close"
]
| []
| [
"test/test_client_async.py::test_bootstrap_servers[None-expected_hosts0]",
"test/test_client_async.py::test_bootstrap_servers[foobar:1234-expected_hosts1]",
"test/test_client_async.py::test_bootstrap_servers[fizzbuzz-expected_hosts2]",
"test/test_client_async.py::test_bootstrap_servers[foo:12,bar:34-expected_hosts3]",
"test/test_client_async.py::test_bootstrap_servers[bootstrap4-expected_hosts4]",
"test/test_client_async.py::test_bootstrap_success",
"test/test_client_async.py::test_bootstrap_failure",
"test/test_client_async.py::test_can_connect",
"test/test_client_async.py::test_maybe_connect",
"test/test_client_async.py::test_ready",
"test/test_client_async.py::test_is_ready",
"test/test_client_async.py::test_is_disconnected",
"test/test_client_async.py::test_send",
"test/test_client_async.py::test_poll",
"test/test_client_async.py::test__poll",
"test/test_client_async.py::test_in_flight_request_count",
"test/test_client_async.py::test_least_loaded_node",
"test/test_client_async.py::test_set_topics",
"test/test_client_async.py::test_maybe_refresh_metadata",
"test/test_client_async.py::test_schedule",
"test/test_client_async.py::test_unschedule"
]
| []
| Apache License 2.0 | 497 | [
"kafka/client_async.py",
"kafka/selectors34.py"
]
| [
"kafka/client_async.py",
"kafka/selectors34.py"
]
|
|
QuickPay__quickpay-python-client-4 | 79e52fcd5075d58e5da7692aa5850d567af1d824 | 2016-04-11 12:21:59 | 79e52fcd5075d58e5da7692aa5850d567af1d824 | diff --git a/README.md b/README.md
index e6b320f..c4a0949 100644
--- a/README.md
+++ b/README.md
@@ -9,11 +9,11 @@ Installation
Add to your `requirements.txt`
- quickpay
+ quickpay-api-client
or install via [pip](https://github.com/pypa/pip):
- $ pip install quickpay-python-client
+ $ pip install quickpay-api-client
It is currently tested with Python >= 2.5 and Python 3.
@@ -31,14 +31,14 @@ First you should create a client instance that is anonymous or authorized with a
To initialise an anonymous client:
```
-from quickpay import QPClient
+from quickpay_api_client import QPClient
client = QPClient()
```
To initialise a client with QuickPay Api Key:
```
-from quickpay import QPClient
+from quickpay_api_client import QPClient
secret = ":{0}".format(os.environ['QUICKPAY_API_KEY'])
client = QPClient(secret)
```
@@ -46,7 +46,7 @@ client = QPClient(secret)
Or you can provide login credentials like:
```
-from quickpay import QPClient
+from quickpay_api_client import QPClient
secret= "{0}:{1}".format(os.environ['QUICKPAY_LOGIN'], os.environ['QUICKPAY_PASSWORD'])
client = QPClient(secret)
```
@@ -81,7 +81,7 @@ By default (get|post|patch|put|delete) will return JSON parsed body on success (
You can listen for any api error like:
```
-from quickpay.exceptions import ApiError
+from quickpay_api_client.exceptions import ApiError
try:
client.post('/payments', currency='DKK', order_id='1212')
...
diff --git a/quickpay/__init__.py b/quickpay_api_client/__init__.py
similarity index 100%
rename from quickpay/__init__.py
rename to quickpay_api_client/__init__.py
diff --git a/quickpay/api.py b/quickpay_api_client/api.py
similarity index 96%
rename from quickpay/api.py
rename to quickpay_api_client/api.py
index 1032900..a0a4777 100644
--- a/quickpay/api.py
+++ b/quickpay_api_client/api.py
@@ -7,8 +7,8 @@ import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
-from quickpay import exceptions
-import quickpay
+from quickpay_api_client import exceptions
+import quickpay_api_client
class QPAdapter(HTTPAdapter):
@@ -49,7 +49,7 @@ class QPApi(object):
headers = {
"Accept-Version": 'v%s' % self.api_version,
- "User-Agent": "quickpay-python-client, v%s" % quickpay.__version__
+ "User-Agent": "quickpay-python-client, v%s" % quickpay_api_client.__version__
}
if self.secret:
diff --git a/setup.py b/setup.py
index 3c9281c..e1faeb7 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ reqs = ['requests>=2.5']
tests_requires = ['nose', 'httpretty', 'mock']
version = ''
-with open('quickpay/__init__.py', 'r') as fd:
+with open('quickpay_api_client/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(),
re.MULTILINE).group(1)
@@ -18,13 +18,13 @@ if not version:
raise RuntimeError('Cannot find version information')
setup(
- name='quickpay',
+ name='quickpay-api-client',
version=version,
description='Python client for QuickPay API',
author_email="[email protected]",
author="QuickPay Developers",
url="https://github.com/QuickPay/quickpay-python-client",
- packages=['quickpay'],
+ packages=['quickpay_api_client'],
install_requires=reqs,
tests_requires=tests_requires,
test_suite='nose.collector')
| Not available on PIP
Looks like this isn't really available on pip even though the readme file states so.
```
pip install quickpay-python-client
Collecting quickpay-python-client
Could not find any downloads that satisfy the requirement quickpay-python-client
No distributions at all found for quickpay-python-client
```
Also the readme file says to add "quickpay" to the requirements file, but says that the package name is quickpay-python-client
And neither of them works. | QuickPay/quickpay-python-client | diff --git a/quickpay/client.py b/quickpay_api_client/client.py
similarity index 100%
rename from quickpay/client.py
rename to quickpay_api_client/client.py
diff --git a/quickpay/exceptions.py b/quickpay_api_client/exceptions.py
similarity index 100%
rename from quickpay/exceptions.py
rename to quickpay_api_client/exceptions.py
diff --git a/quickpay/tests/__init__.py b/quickpay_api_client/tests/__init__.py
similarity index 100%
rename from quickpay/tests/__init__.py
rename to quickpay_api_client/tests/__init__.py
diff --git a/quickpay/tests/api_tests.py b/quickpay_api_client/tests/api_tests.py
similarity index 94%
rename from quickpay/tests/api_tests.py
rename to quickpay_api_client/tests/api_tests.py
index a82edff..4fdd36b 100644
--- a/quickpay/tests/api_tests.py
+++ b/quickpay_api_client/tests/api_tests.py
@@ -1,8 +1,8 @@
import base64, json
from nose.tools import assert_equal, assert_raises
import requests
-from quickpay.api import QPApi
-from quickpay.exceptions import ApiError
+from quickpay_api_client.api import QPApi
+from quickpay_api_client.exceptions import ApiError
import httpretty
diff --git a/quickpay/tests/client_tests.py b/quickpay_api_client/tests/client_tests.py
similarity index 93%
rename from quickpay/tests/client_tests.py
rename to quickpay_api_client/tests/client_tests.py
index 06eb4de..46d3e25 100644
--- a/quickpay/tests/client_tests.py
+++ b/quickpay_api_client/tests/client_tests.py
@@ -1,6 +1,6 @@
from nose.tools import assert_equal, assert_raises
-from quickpay.api import QPApi
-from quickpay import QPClient
+from quickpay_api_client.api import QPApi
+from quickpay_api_client import QPClient
from mock import MagicMock
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
exceptiongroup==1.2.2
httpretty==0.8.8
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
mock==1.0.1
nose==1.3.6
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
-e git+https://github.com/QuickPay/quickpay-python-client.git@79e52fcd5075d58e5da7692aa5850d567af1d824#egg=quickpay
requests==2.31.0
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: quickpay-python-client
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- httpretty==0.8.8
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- mock==1.0.1
- nose==1.3.6
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- requests==2.31.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/quickpay-python-client
| [
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_api_instance",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_get_delegation",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_post_delegation",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_delete_delegation",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_put_delegation",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_patch_delegation",
"quickpay_api_client/tests/client_tests.py::TestQPClient::test_non_http_method"
]
| [
"quickpay_api_client/tests/api_tests.py::TestApi::test_perform_success",
"quickpay_api_client/tests/api_tests.py::TestApi::test_perform_failure",
"quickpay_api_client/tests/api_tests.py::TestApi::test_headers",
"quickpay_api_client/tests/api_tests.py::TestApi::test_perform_when_raw"
]
| []
| []
| MIT License | 498 | [
"quickpay/api.py",
"quickpay/__init__.py",
"README.md",
"setup.py"
]
| [
"setup.py",
"quickpay_api_client/__init__.py",
"quickpay_api_client/api.py",
"README.md"
]
|
|
juju-solutions__charms.reactive-65 | 3540030b9b142787f3f7fd16a14ed33d18b4d7a1 | 2016-04-12 16:08:23 | 59b07bd9447d8a4cb027ea2515089216b8d20549 | diff --git a/charms/reactive/bus.py b/charms/reactive/bus.py
index 9676244..885e498 100644
--- a/charms/reactive/bus.py
+++ b/charms/reactive/bus.py
@@ -267,7 +267,10 @@ class Handler(object):
"""
Lazily evaluate the args.
"""
- return list(chain.from_iterable(self._args))
+ if not hasattr(self, '_args_evaled'):
+ # cache the args in case handler is re-invoked due to states change
+ self._args_evaled = list(chain.from_iterable(self._args))
+ return self._args_evaled
def invoke(self):
"""
| Handler args are dropped if called more than once per hook
Because the args are implemented as generators, any time the list is evaluated after the first it ends up being empty, leading to an "missing arg" error:
```
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined Traceback (most recent call last):
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined File "/var/lib/juju/agents/unit-nn-0/charm/hooks/namenode-cluster-relation-joined", line 19, in <module>
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined main()
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined File "/usr/local/lib/python3.4/dist-packages/charms/reactive/__init__.py", line 73, in main
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined bus.dispatch()
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined File "/usr/local/lib/python3.4/dist-packages/charms/reactive/bus.py", line 418, in dispatch
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined _invoke(other_handlers)
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined File "/usr/local/lib/python3.4/dist-packages/charms/reactive/bus.py", line 401, in _invoke
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined handler.invoke()
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined File "/usr/local/lib/python3.4/dist-packages/charms/reactive/bus.py", line 277, in invoke
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined self._action(*args)
2016-04-12 15:15:51 INFO namenode-cluster-relation-joined TypeError: report_status() missing 1 required positional argument: 'datanode'
``` | juju-solutions/charms.reactive | diff --git a/tests/test_decorators.py b/tests/test_decorators.py
index 836753a..fdbb512 100644
--- a/tests/test_decorators.py
+++ b/tests/test_decorators.py
@@ -103,6 +103,11 @@ class TestReactiveDecorators(unittest.TestCase):
action.assert_called_once_with('rel')
self.assertEqual(reactive.bus.Handler._CONSUMED_STATES, set(['foo', 'bar', 'qux']))
+ action.reset_mock()
+ assert handler.test()
+ handler.invoke()
+ action.assert_called_once_with('rel')
+
@mock.patch.object(reactive.decorators, 'when_all')
def test_when(self, when_all):
@reactive.when('foo', 'bar', 'qux')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"flake8",
"ipython",
"ipdb",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backcall==0.2.0
certifi==2021.5.30
charmhelpers==1.2.1
-e git+https://github.com/juju-solutions/charms.reactive.git@3540030b9b142787f3f7fd16a14ed33d18b4d7a1#egg=charms.reactive
coverage==6.2
decorator==5.1.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
ipdb==0.13.13
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
netaddr==0.10.1
nose==1.3.7
packaging==21.3
parso==0.7.1
pbr==6.1.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pyaml==23.5.8
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
six==1.17.0
tomli==1.2.3
traitlets==4.3.3
typing_extensions==4.1.1
wcwidth==0.2.13
zipp==3.6.0
| name: charms.reactive
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backcall==0.2.0
- charmhelpers==1.2.1
- coverage==6.2
- decorator==5.1.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipdb==0.13.13
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- netaddr==0.10.1
- nose==1.3.7
- packaging==21.3
- parso==0.7.1
- pbr==6.1.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pyaml==23.5.8
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- tomli==1.2.3
- traitlets==4.3.3
- typing-extensions==4.1.1
- wcwidth==0.2.13
- zipp==3.6.0
prefix: /opt/conda/envs/charms.reactive
| [
"tests/test_decorators.py::TestReactiveDecorators::test_when_all"
]
| []
| [
"tests/test_decorators.py::TestReactiveDecorators::test_multi",
"tests/test_decorators.py::TestReactiveDecorators::test_not_unless",
"tests/test_decorators.py::TestReactiveDecorators::test_only_once",
"tests/test_decorators.py::TestReactiveDecorators::test_when",
"tests/test_decorators.py::TestReactiveDecorators::test_when_any",
"tests/test_decorators.py::TestReactiveDecorators::test_when_file_changed",
"tests/test_decorators.py::TestReactiveDecorators::test_when_none",
"tests/test_decorators.py::TestReactiveDecorators::test_when_not",
"tests/test_decorators.py::TestReactiveDecorators::test_when_not_all"
]
| [
"tests/test_decorators.py::TestReactiveDecorators::test_hook"
]
| Apache License 2.0 | 501 | [
"charms/reactive/bus.py"
]
| [
"charms/reactive/bus.py"
]
|
|
nickstenning__honcho-174 | 5af4cf1d98926fa63eae711e6a89f8e2ef5d8539 | 2016-04-13 06:21:38 | 5af4cf1d98926fa63eae711e6a89f8e2ef5d8539 | diff --git a/honcho/command.py b/honcho/command.py
index 3796a39..9a7323d 100644
--- a/honcho/command.py
+++ b/honcho/command.py
@@ -39,6 +39,7 @@ def _add_common_args(parser, with_defaults=False):
help='procfile directory (default: .)')
parser.add_argument('-f', '--procfile',
metavar='FILE',
+ default=suppress,
help='procfile path (default: Procfile)')
parser.add_argument('-v', '--version',
action='version',
| -f argument before command is suppressed
System information: Mac OS X, 10.11.1
Honcho version: 0.7.0
With the latest release of honcho (0.7.0), I've noticed that the -f argument is processed properly **after** a command, but not **before** a command. -e and -d arguments don't exhibit this order-dependent behavior.
Examples:
```
$ honcho -f my_procfile check
2016-04-12 23:49:22 [45546] [ERROR] Procfile does not exist or is not a file
$ honcho check -f my_procfile
2016-04-12 23:49:24 [45548] [INFO] Valid procfile detected (postgres, rabbit, redis, flower)
``` | nickstenning/honcho | diff --git a/tests/integration/test_start.py b/tests/integration/test_start.py
index 2388bda..73c8693 100644
--- a/tests/integration/test_start.py
+++ b/tests/integration/test_start.py
@@ -53,6 +53,39 @@ def test_start_env_procfile(testenv):
assert 'mongoose' in out
[email protected]('testenv', [{
+ 'Procfile': 'foo: {0} test.py'.format(python_bin),
+ 'Procfile.dev': 'bar: {0} test_dev.py'.format(python_bin),
+ 'test.py': script,
+ 'test_dev.py': textwrap.dedent("""
+ from __future__ import print_function
+ print("mongoose")
+ """)
+}], indirect=True)
+def test_start_procfile_after_command(testenv):
+ # Regression test for #173: Ensure that -f argument can be provided after
+ # command
+ ret, out, err = testenv.run_honcho(['start', '-f', 'Procfile.dev'])
+
+ assert 'mongoose' in out
+
+
[email protected]('testenv', [{
+ 'Procfile': 'foo: {0} test.py'.format(python_bin),
+ 'Procfile.dev': 'bar: {0} test_dev.py'.format(python_bin),
+ 'test.py': script,
+ 'test_dev.py': textwrap.dedent("""
+ from __future__ import print_function
+ print("mongoose")
+ """)
+}], indirect=True)
+def test_start_procfile_before_command(testenv):
+ # Test case for #173: Ensure that -f argument can be provided before command
+ ret, out, err = testenv.run_honcho(['-f', 'Procfile.dev', 'start'])
+
+ assert 'mongoose' in out
+
+
@pytest.mark.parametrize('testenv', [{
'Procfile': 'foo: {0} test.py'.format(python_bin),
'test.py': 'import sys; sys.exit(42)',
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[export]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"jinja2",
"flake8"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2021.5.30
flake8==5.0.4
-e git+https://github.com/nickstenning/honcho.git@5af4cf1d98926fa63eae711e6a89f8e2ef5d8539#egg=honcho
importlib-metadata==4.2.0
Jinja2==2.7.3
MarkupSafe==2.0.1
mccabe==0.7.0
pycodestyle==2.9.1
pyflakes==2.5.0
typing_extensions==4.1.1
zipp==3.6.0
| name: honcho
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- flake8==5.0.4
- importlib-metadata==4.2.0
- jinja2==2.7.3
- markupsafe==2.0.1
- mccabe==0.7.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/honcho
| [
"tests/integration/test_start.py::test_start_procfile_before_command[testenv0]"
]
| []
| [
"tests/integration/test_start.py::test_start[testenv0]",
"tests/integration/test_start.py::test_start_env[testenv0]",
"tests/integration/test_start.py::test_start_env_procfile[testenv0]",
"tests/integration/test_start.py::test_start_procfile_after_command[testenv0]",
"tests/integration/test_start.py::test_start_returncode[testenv0]"
]
| []
| MIT License | 502 | [
"honcho/command.py"
]
| [
"honcho/command.py"
]
|
|
guykisel__inline-plz-129 | a7c89d5b65df2486ccf78f43fcffdc18dff76bd7 | 2016-04-15 01:05:40 | a7c89d5b65df2486ccf78f43fcffdc18dff76bd7 | raphaelcastaneda: Hmm...
https://travis-ci.org/guykisel/inline-plz/jobs/123219118#L726
` File "/home/travis/build/guykisel/inline-plz/inlineplz/linters/__init__.py", line 364, in lint
linter_messages = config.get('parser')().parse(output)
File "/home/travis/build/guykisel/inline-plz/inlineplz/parsers/rflint.py", line 14, in parse
for line in lint_data.split('\n'):
AttributeError: 'list' object has no attribute 'split'`
| diff --git a/inlineplz/linters/__init__.py b/inlineplz/linters/__init__.py
index 16cce9f..5cafd13 100644
--- a/inlineplz/linters/__init__.py
+++ b/inlineplz/linters/__init__.py
@@ -175,13 +175,13 @@ LINTERS = {
'rflint': {
'install': [['pip', 'install', 'robotframework-lint']],
'help': ['rflint', '--help'],
- 'run': ['rflint', '.'],
- 'rundefault': ['rflint', '-A', '{config_dir}/.rflint', '.'],
+ 'run': ['rflint'],
+ 'rundefault': ['rflint', '-A', '{config_dir}/.rflint'],
'dotfiles': ['.rflint'],
'parser': parsers.RobotFrameworkLintParser,
'language': 'robotframework',
'autorun': True,
- 'run_per_file': False
+ 'run_per_file': True
},
}
diff --git a/inlineplz/parsers/rflint.py b/inlineplz/parsers/rflint.py
index 56206ae..c4e8500 100644
--- a/inlineplz/parsers/rflint.py
+++ b/inlineplz/parsers/rflint.py
@@ -11,16 +11,17 @@ class RobotFrameworkLintParser(ParserBase):
def parse(self, lint_data):
messages = set()
current_file = None
- for line in lint_data.split('\n'):
- try:
- if line.startswith('+'):
- current_file = line.split(' ')[1].strip()
- continue
- else:
- _, position, message = line.split(':')
- line_number, _ = position.split(',')
- messages.add((current_file, int(line_number), message.strip()))
- except ValueError:
- pass
+ for _, output in lint_data:
+ for line in output.split('\n'):
+ try:
+ if line.startswith('+'):
+ current_file = line[2:]
+ continue
+ else:
+ _, position, message = line.split(':')
+ line_number, _ = position.split(',')
+ messages.add((current_file, int(line_number), message.strip()))
+ except ValueError:
+ pass
return messages
| rflint is running against all file types
```
Message:
Path: node_modules/stylint/node_modules/yargs/node_modules/cliui/node_modules/wordwrap/test/idleness.txt
Line number: 19
Content: set([u'rflint: Line is too long (exceeds 250 characters) (LineTooLong)'])
```
fyi @raphaelcastaneda | guykisel/inline-plz | diff --git a/tests/parsers/test_rflint.py b/tests/parsers/test_rflint.py
index fcee628..7d164ea 100644
--- a/tests/parsers/test_rflint.py
+++ b/tests/parsers/test_rflint.py
@@ -17,7 +17,14 @@ rflint_path = os.path.join(
def test_rflint():
with codecs.open(rflint_path, encoding='utf-8', errors='replace') as inputfile:
- messages = sorted(list(rflint.RobotFrameworkLintParser().parse(inputfile.read())))
- assert messages[-1][2] == 'Too few steps (1) in keyword (TooFewKeywordSteps)'
- assert messages[-1][1] == 30
- assert messages[-1][0] == './Functional_Requirements/keywords.robot'
+ test_data = inputfile.readlines()
+ test_filename = ''
+ test_input = []
+ for line in test_data:
+ if line.startswith('+'):
+ test_filename = line.split(' ')[-1].strip()
+ test_input.append((test_filename, line))
+ messages = sorted(list(rflint.RobotFrameworkLintParser().parse(test_input)))
+ assert messages[-1][2] == 'Too few steps (1) in keyword (TooFewKeywordSteps)'
+ assert messages[-1][1] == 30
+ assert messages[-1][0] == './Functional_Requirements/keywords.robot'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
github3.py==4.0.1
idna==3.10
iniconfig==2.1.0
-e git+https://github.com/guykisel/inline-plz.git@a7c89d5b65df2486ccf78f43fcffdc18dff76bd7#egg=inlineplz
packaging==24.2
pluggy==1.5.0
pycparser==2.22
PyJWT==2.10.1
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
scandir==1.10.0
six==1.17.0
tomli==2.2.1
unidiff==0.7.5
uritemplate==4.1.1
urllib3==2.3.0
xmltodict==0.14.2
| name: inline-plz
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- github3-py==4.0.1
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pyjwt==2.10.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- scandir==1.10.0
- six==1.17.0
- tomli==2.2.1
- unidiff==0.7.5
- uritemplate==4.1.1
- urllib3==2.3.0
- xmltodict==0.14.2
prefix: /opt/conda/envs/inline-plz
| [
"tests/parsers/test_rflint.py::test_rflint"
]
| []
| []
| []
| ISC License | 503 | [
"inlineplz/parsers/rflint.py",
"inlineplz/linters/__init__.py"
]
| [
"inlineplz/parsers/rflint.py",
"inlineplz/linters/__init__.py"
]
|
pysmt__pysmt-243 | 2abfb4538fa93379f9b2671bce30f27967dedbcf | 2016-04-15 16:24:27 | 2abfb4538fa93379f9b2671bce30f27967dedbcf | diff --git a/pysmt/formula.py b/pysmt/formula.py
index 0a4dcd3..4fdcbfe 100644
--- a/pysmt/formula.py
+++ b/pysmt/formula.py
@@ -477,7 +477,7 @@ class FormulaManager(object):
A -> !(B \/ C)
B -> !(C)
"""
- args = list(*args)
+ args = self._polymorph_args_to_tuple(args)
return self.And(self.Or(*args),
self.AtMostOne(*args))
| issue in processing arguments for ExactlyOne()
Hi,
I noticed that instantiating shortcuts.ExactlyOne() throws e.g.
TypeError: list() takes at most 1 argument (3 given)
at formula.py, line 480. I believe args shouldn't be unpacked in the list constructor.
Martin | pysmt/pysmt | diff --git a/pysmt/test/test_formula.py b/pysmt/test/test_formula.py
index 0ddbec4..924328a 100644
--- a/pysmt/test/test_formula.py
+++ b/pysmt/test/test_formula.py
@@ -494,6 +494,16 @@ class TestFormulaManager(TestCase):
self.assertEqual(c, self.mgr.Bool(False),
"ExactlyOne should not allow 2 symbols to be True")
+ s1 = self.mgr.Symbol("x")
+ s2 = self.mgr.Symbol("x")
+ f1 = self.mgr.ExactlyOne((s for s in [s1,s2]))
+ f2 = self.mgr.ExactlyOne([s1,s2])
+ f3 = self.mgr.ExactlyOne(s1,s2)
+
+ self.assertEqual(f1,f2)
+ self.assertEqual(f2,f3)
+
+
@skipIfNoSolverForLogic(QF_BOOL)
def test_exactly_one_is_sat(self):
symbols = [ self.mgr.Symbol("s%d"%i, BOOL) for i in range(5) ]
diff --git a/pysmt/test/test_regressions.py b/pysmt/test/test_regressions.py
index 2fecd04..67bfc3d 100644
--- a/pysmt/test/test_regressions.py
+++ b/pysmt/test/test_regressions.py
@@ -311,6 +311,14 @@ class TestRegressions(TestCase):
close_l = get_closer_smtlib_logic(logics.BOOL)
self.assertEqual(close_l, logics.LRA)
+ def test_exactly_one_unpacking(self):
+ s1,s2 = Symbol("x"), Symbol("y")
+ f1 = ExactlyOne((s for s in [s1,s2]))
+ f2 = ExactlyOne([s1,s2])
+ f3 = ExactlyOne(s1,s2)
+
+ self.assertEqual(f1,f2)
+ self.assertEqual(f2,f3)
if __name__ == "__main__":
main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"nose",
"nose-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cov-core==1.15.0
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
nose-cov==1.6
packaging==24.2
pluggy==1.5.0
-e git+https://github.com/pysmt/pysmt.git@2abfb4538fa93379f9b2671bce30f27967dedbcf#egg=PySMT
pytest==8.3.5
pytest-cov==6.0.0
six==1.17.0
tomli==2.2.1
| name: pysmt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cov-core==1.15.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- nose-cov==1.6
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/pysmt
| [
"pysmt/test/test_formula.py::TestFormulaManager::test_exactly_one",
"pysmt/test/test_regressions.py::TestRegressions::test_exactly_one_unpacking"
]
| [
"pysmt/test/test_regressions.py::TestRegressions::test_parse_define_fun",
"pysmt/test/test_regressions.py::TestRegressions::test_parse_define_fun_bind"
]
| [
"pysmt/test/test_formula.py::TestFormulaManager::test_0arity_function",
"pysmt/test/test_formula.py::TestFormulaManager::test_all_different",
"pysmt/test/test_formula.py::TestFormulaManager::test_and_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_at_most_one",
"pysmt/test/test_formula.py::TestFormulaManager::test_bconstant",
"pysmt/test/test_formula.py::TestFormulaManager::test_constant",
"pysmt/test/test_formula.py::TestFormulaManager::test_div_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_div_non_linear",
"pysmt/test/test_formula.py::TestFormulaManager::test_equals",
"pysmt/test/test_formula.py::TestFormulaManager::test_equals_or_iff",
"pysmt/test/test_formula.py::TestFormulaManager::test_formula_in_formula_manager",
"pysmt/test/test_formula.py::TestFormulaManager::test_function",
"pysmt/test/test_formula.py::TestFormulaManager::test_ge_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_ge_node_type",
"pysmt/test/test_formula.py::TestFormulaManager::test_get_or_create_symbol",
"pysmt/test/test_formula.py::TestFormulaManager::test_get_symbol",
"pysmt/test/test_formula.py::TestFormulaManager::test_gt_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_gt_node_type",
"pysmt/test/test_formula.py::TestFormulaManager::test_iff_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_implies_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_infix",
"pysmt/test/test_formula.py::TestFormulaManager::test_infix_extended",
"pysmt/test/test_formula.py::TestFormulaManager::test_is_term",
"pysmt/test/test_formula.py::TestFormulaManager::test_ite",
"pysmt/test/test_formula.py::TestFormulaManager::test_le_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_le_node_type",
"pysmt/test/test_formula.py::TestFormulaManager::test_lt_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_lt_node_type",
"pysmt/test/test_formula.py::TestFormulaManager::test_max",
"pysmt/test/test_formula.py::TestFormulaManager::test_min",
"pysmt/test/test_formula.py::TestFormulaManager::test_minus_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_new_fresh_symbol",
"pysmt/test/test_formula.py::TestFormulaManager::test_not_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_or_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_pickling",
"pysmt/test/test_formula.py::TestFormulaManager::test_plus_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_symbol",
"pysmt/test/test_formula.py::TestFormulaManager::test_times_node",
"pysmt/test/test_formula.py::TestFormulaManager::test_toReal",
"pysmt/test/test_formula.py::TestFormulaManager::test_typing",
"pysmt/test/test_formula.py::TestFormulaManager::test_xor",
"pysmt/test/test_formula.py::TestShortcuts::test_shortcut_is_using_global_env",
"pysmt/test/test_regressions.py::TestRegressions::test_cnf_as_set",
"pysmt/test/test_regressions.py::TestRegressions::test_dependencies_not_includes_toreal",
"pysmt/test/test_regressions.py::TestRegressions::test_determinism",
"pysmt/test/test_regressions.py::TestRegressions::test_empty_string_symbol",
"pysmt/test/test_regressions.py::TestRegressions::test_exactlyone_w_generator",
"pysmt/test/test_regressions.py::TestRegressions::test_infix_notation_wrong_le",
"pysmt/test/test_regressions.py::TestRegressions::test_is_one",
"pysmt/test/test_regressions.py::TestRegressions::test_multiple_declaration_w_same_functiontype",
"pysmt/test/test_regressions.py::TestRegressions::test_multiple_exit",
"pysmt/test/test_regressions.py::TestRegressions::test_qf_bool_smt2",
"pysmt/test/test_regressions.py::TestRegressions::test_simplifying_int_plus_changes_type_of_expression",
"pysmt/test/test_regressions.py::TestRegressions::test_smtlib_info_quoting",
"pysmt/test/test_regressions.py::TestRegressions::test_substitute_memoization",
"pysmt/test/test_regressions.py::TestRegressions::test_substitute_to_real"
]
| []
| Apache License 2.0 | 504 | [
"pysmt/formula.py"
]
| [
"pysmt/formula.py"
]
|
|
pystorm__pystorm-31 | 506568d7033169811423a08f3af83c15abe5fd3e | 2016-04-15 18:03:37 | 7f0d6b320e9943082bcdfd6de93d161a3b174e12 | diff --git a/pystorm/bolt.py b/pystorm/bolt.py
index 4e60879..c6a1538 100644
--- a/pystorm/bolt.py
+++ b/pystorm/bolt.py
@@ -128,7 +128,7 @@ class Bolt(Component):
pass
def emit(self, tup, stream=None, anchors=None, direct_task=None,
- need_task_ids=True):
+ need_task_ids=False):
"""Emit a new Tuple to a stream.
:param tup: the Tuple payload to send to Storm, should contain only
@@ -146,13 +146,13 @@ class Bolt(Component):
:param direct_task: the task to send the Tuple to.
:type direct_task: int
:param need_task_ids: indicate whether or not you'd like the task IDs
- the Tuple was emitted (default: ``True``).
+ the Tuple was emitted (default: ``False``).
:type need_task_ids: bool
- :returns: a ``list`` of task IDs that the Tuple was sent to. Note that
- when specifying direct_task, this will be equal to
- ``[direct_task]``. If you specify ``need_task_ids=False``,
- this function will return ``None``.
+ :returns: ``None``, unless ``need_task_ids=True``, in which case it will
+ be a ``list`` of task IDs that the Tuple was sent to if. Note
+ that when specifying direct_task, this will be equal to
+ ``[direct_task]``.
"""
if anchors is None:
anchors = self._current_tups if self.auto_anchor else []
diff --git a/pystorm/component.py b/pystorm/component.py
index e4b3764..757767f 100644
--- a/pystorm/component.py
+++ b/pystorm/component.py
@@ -364,7 +364,7 @@ class Component(object):
'level': level})
def emit(self, tup, tup_id=None, stream=None, anchors=None,
- direct_task=None, need_task_ids=True):
+ direct_task=None, need_task_ids=False):
"""Emit a new Tuple to a stream.
:param tup: the Tuple payload to send to Storm, should contain only
@@ -385,13 +385,13 @@ class Component(object):
:param direct_task: the task to send the Tuple to.
:type direct_task: int
:param need_task_ids: indicate whether or not you'd like the task IDs
- the Tuple was emitted (default: ``True``).
+ the Tuple was emitted (default: ``False``).
:type need_task_ids: bool
- :returns: a ``list`` of task IDs that the Tuple was sent to. Note that
- when specifying direct_task, this will be equal to
- ``[direct_task]``. If you specify ``need_task_ids=False``,
- this function will return ``None``.
+ :returns: ``None``, unless ``need_task_ids=True``, in which case it will
+ be a ``list`` of task IDs that the Tuple was sent to if. Note
+ that when specifying direct_task, this will be equal to
+ ``[direct_task]``.
"""
if not isinstance(tup, (list, tuple)):
raise TypeError('All Tuples must be either lists or tuples, '
diff --git a/pystorm/spout.py b/pystorm/spout.py
index 5290c9a..cc21903 100644
--- a/pystorm/spout.py
+++ b/pystorm/spout.py
@@ -54,7 +54,7 @@ class Spout(Component):
raise NotImplementedError()
def emit(self, tup, tup_id=None, stream=None, direct_task=None,
- need_task_ids=True):
+ need_task_ids=False):
"""Emit a spout Tuple message.
:param tup: the Tuple to send to Storm, should contain only
@@ -70,14 +70,13 @@ class Spout(Component):
direct emit.
:type direct_task: int
:param need_task_ids: indicate whether or not you'd like the task IDs
- the Tuple was emitted (default:
- ``True``).
+ the Tuple was emitted (default: ``False``).
:type need_task_ids: bool
- :returns: a ``list`` of task IDs that the Tuple was sent to. Note that
- when specifying direct_task, this will be equal to
- ``[direct_task]``. If you specify ``need_task_ids=False``,
- this function will return ``None``.
+ :returns: ``None``, unless ``need_task_ids=True``, in which case it will
+ be a ``list`` of task IDs that the Tuple was sent to if. Note
+ that when specifying direct_task, this will be equal to
+ ``[direct_task]``.
"""
return super(Spout, self).emit(tup, tup_id=tup_id, stream=stream,
direct_task=direct_task,
| need_task_ids should default to False
We currently default to True for backward compatibility reasons, but I would venture to guess that vast majority of users do not care about the IDs of the tasks that their emitted tuples went to. Disabling is a nice free speedup, but if we made it disabled by default, we would be cutting out extra work for most users. | pystorm/pystorm | diff --git a/test/pystorm/test_bolt.py b/test/pystorm/test_bolt.py
index 277ac1a..24cb868 100644
--- a/test/pystorm/test_bolt.py
+++ b/test/pystorm/test_bolt.py
@@ -110,7 +110,8 @@ class BoltTests(unittest.TestCase):
send_message_mock.assert_called_with(self.bolt, {'command': 'emit',
'anchors': [],
'tuple': [1, 2, 3],
- 'task': 'other_bolt'})
+ 'task': 'other_bolt',
+ 'need_task_ids': False})
@patch.object(Bolt, 'send_message', autospec=True)
def test_ack_id(self, send_message_mock):
diff --git a/test/pystorm/test_spout.py b/test/pystorm/test_spout.py
index 2b4e1a9..a3d7a15 100644
--- a/test/pystorm/test_spout.py
+++ b/test/pystorm/test_spout.py
@@ -49,7 +49,8 @@ class SpoutTests(unittest.TestCase):
self.spout.emit([1, 2, 3], direct_task='other_spout')
send_message_mock.assert_called_with(self.spout, {'command': 'emit',
'tuple': [1, 2, 3],
- 'task': 'other_spout'})
+ 'task': 'other_spout',
+ 'need_task_ids': False})
# Reliable emit
self.spout.emit([1, 2, 3], tup_id='foo', need_task_ids=False)
send_message_mock.assert_called_with(self.spout, {'command': 'emit',
@@ -62,7 +63,8 @@ class SpoutTests(unittest.TestCase):
send_message_mock.assert_called_with(self.spout, {'command': 'emit',
'tuple': [1, 2, 3],
'task': 'other_spout',
- 'id': 'foo'})
+ 'id': 'foo',
+ 'need_task_ids': False})
@patch.object(Spout, 'read_command', autospec=True,
return_value={'command': 'ack', 'id': 1234})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock",
"pytest",
"unittest2"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
linecache2==1.0.0
mock==5.2.0
msgpack-python==0.5.6
packaging==24.2
pluggy==1.5.0
-e git+https://github.com/pystorm/pystorm.git@506568d7033169811423a08f3af83c15abe5fd3e#egg=pystorm
pytest==8.3.5
pytest-timeout==2.3.1
simplejson==3.20.1
six==1.17.0
tomli==2.2.1
traceback2==1.4.0
unittest2==1.1.0
| name: pystorm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- linecache2==1.0.0
- mock==5.2.0
- msgpack-python==0.5.6
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-timeout==2.3.1
- simplejson==3.20.1
- six==1.17.0
- tomli==2.2.1
- traceback2==1.4.0
- unittest2==1.1.0
prefix: /opt/conda/envs/pystorm
| [
"test/pystorm/test_bolt.py::BoltTests::test_emit_direct",
"test/pystorm/test_spout.py::SpoutTests::test_emit"
]
| []
| [
"test/pystorm/test_bolt.py::BoltTests::test_ack_id",
"test/pystorm/test_bolt.py::BoltTests::test_ack_tuple",
"test/pystorm/test_bolt.py::BoltTests::test_auto_ack_off",
"test/pystorm/test_bolt.py::BoltTests::test_auto_ack_on",
"test/pystorm/test_bolt.py::BoltTests::test_auto_anchor_off",
"test/pystorm/test_bolt.py::BoltTests::test_auto_anchor_on",
"test/pystorm/test_bolt.py::BoltTests::test_auto_anchor_override",
"test/pystorm/test_bolt.py::BoltTests::test_auto_fail_off",
"test/pystorm/test_bolt.py::BoltTests::test_auto_fail_on",
"test/pystorm/test_bolt.py::BoltTests::test_emit_basic",
"test/pystorm/test_bolt.py::BoltTests::test_emit_stream_anchors",
"test/pystorm/test_bolt.py::BoltTests::test_fail_id",
"test/pystorm/test_bolt.py::BoltTests::test_fail_tuple",
"test/pystorm/test_bolt.py::BoltTests::test_heartbeat_response",
"test/pystorm/test_bolt.py::BoltTests::test_process_tick",
"test/pystorm/test_bolt.py::BoltTests::test_read_tuple",
"test/pystorm/test_bolt.py::BoltTests::test_read_tuple_named_fields",
"test/pystorm/test_bolt.py::BoltTests::test_run",
"test/pystorm/test_bolt.py::BoltTests::test_setup_component",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_auto_ack_off",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_auto_ack_on",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_auto_fail_off",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_auto_fail_on",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_auto_fail_partial",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_batching",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_group_key",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_heartbeat_response",
"test/pystorm/test_bolt.py::BatchingBoltTests::test_process_tick",
"test/pystorm/test_spout.py::SpoutTests::test_ack",
"test/pystorm/test_spout.py::SpoutTests::test_fail",
"test/pystorm/test_spout.py::SpoutTests::test_next_tuple"
]
| []
| Apache License 2.0 | 505 | [
"pystorm/bolt.py",
"pystorm/component.py",
"pystorm/spout.py"
]
| [
"pystorm/bolt.py",
"pystorm/component.py",
"pystorm/spout.py"
]
|
|
falconry__falcon-755 | f44fba57ef88ec9c11223c2765d49fb3573305a0 | 2016-04-16 22:54:49 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | fxfitz: LGTM; needs update.
jmvrbanac: :+1: | diff --git a/falcon/api.py b/falcon/api.py
index 5b51de0..7957ca0 100644
--- a/falcon/api.py
+++ b/falcon/api.py
@@ -483,7 +483,18 @@ class API(object):
path = req.path
method = req.method
- resource, method_map, params = self._router.find(path)
+
+ route = self._router.find(path)
+
+ if route is not None:
+ resource, method_map, params = route
+ else:
+ # NOTE(kgriffs): Older routers may indicate that no route
+ # was found by returning (None, None, None). Therefore, we
+ # normalize resource as the flag to indicate whether or not
+ # a route was found, for the sake of backwards-compat.
+ resource = None
+
if resource is not None:
try:
responder = method_map[method]
@@ -491,7 +502,6 @@ class API(object):
responder = falcon.responders.bad_request
else:
params = {}
- resource = None
for pattern, sink in self._sinks:
m = pattern.match(path)
diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py
index 9177edb..057cf6e 100644
--- a/falcon/routing/compiled.py
+++ b/falcon/routing/compiled.py
@@ -93,7 +93,7 @@ class CompiledRouter(object):
if node is not None:
return node.resource, node.method_map, params
else:
- return None, None, None
+ return None
def _compile_tree(self, nodes, indent=1, level=0, fast_return=True):
"""Generates Python code for a routing tree or subtree."""
| router.find() cannot return None
Contrary to http://falcon.readthedocs.org/en/latest/api/routing.html it seems like custom routers `.find()` method cannot return None. AFAICT, `_get_responder()` unpacks `self._router.find(path)` without checking for a None return value and thus an exception occurs. However, returning `(None, None, None)` seems to work.
I don't know if the docstring is wrong or the method. | falconry/falcon | diff --git a/tests/test_custom_router.py b/tests/test_custom_router.py
index 8ddb824..cbe1b48 100644
--- a/tests/test_custom_router.py
+++ b/tests/test_custom_router.py
@@ -24,13 +24,31 @@ class TestCustomRouter(testing.TestBase):
resp.body = '{"status": "ok"}'
class CustomRouter(object):
- def find(self, *args, **kwargs):
- return resource, {'GET': resource}, {}
+ def __init__(self):
+ self.reached_backwards_compat = False
- self.api = falcon.API(router=CustomRouter())
+ def find(self, uri):
+ if uri == '/test':
+ return resource, {'GET': resource}, {}
+
+ if uri == '/404/backwards-compat':
+ self.reached_backwards_compat = True
+ return (None, None, None)
+
+ return None
+
+ router = CustomRouter()
+ self.api = falcon.API(router=router)
body = self.simulate_request('/test')
self.assertEqual(body, [b'{"status": "ok"}'])
+ for uri in ('/404', '/404/backwards-compat'):
+ body = self.simulate_request(uri)
+ self.assertFalse(body)
+ self.assertEqual(self.srmock.status, falcon.HTTP_404)
+
+ self.assertTrue(router.reached_backwards_compat)
+
def test_can_pass_additional_params_to_add_route(self):
check = []
diff --git a/tests/test_default_router.py b/tests/test_default_router.py
index 84af78f..dec8a8e 100644
--- a/tests/test_default_router.py
+++ b/tests/test_default_router.py
@@ -38,8 +38,8 @@ class TestRegressionCases(testing.TestBase):
resource, method_map, params = self.router.find('/v1/messages')
self.assertEqual(resource.resource_id, 2)
- resource, method_map, params = self.router.find('/v1')
- self.assertIs(resource, None)
+ route = self.router.find('/v1')
+ self.assertIs(route, None)
def test_recipes(self):
self.router.add_route(
@@ -53,8 +53,8 @@ class TestRegressionCases(testing.TestBase):
resource, method_map, params = self.router.find('/recipes/baking')
self.assertEqual(resource.resource_id, 2)
- resource, method_map, params = self.router.find('/recipes/grilling')
- self.assertIs(resource, None)
+ route = self.router.find('/recipes/grilling')
+ self.assertIs(route, None)
@ddt.ddt
@@ -166,8 +166,8 @@ class TestComplexRouting(testing.TestBase):
resource, method_map, params = self.router.find('/emojis/signs/42/small')
self.assertEqual(resource.resource_id, 14.1)
- resource, method_map, params = self.router.find('/emojis/signs/1/small')
- self.assertEqual(resource, None)
+ route = self.router.find('/emojis/signs/1/small')
+ self.assertEqual(route, None)
@ddt.data(
'/teams',
@@ -176,17 +176,17 @@ class TestComplexRouting(testing.TestBase):
'/gists/42',
)
def test_dead_segment(self, template):
- resource, method_map, params = self.router.find(template)
- self.assertIs(resource, None)
+ route = self.router.find(template)
+ self.assertIs(route, None)
def test_malformed_pattern(self):
- resource, method_map, params = self.router.find(
+ route = self.router.find(
'/repos/racker/falcon/compare/foo')
- self.assertIs(resource, None)
+ self.assertIs(route, None)
- resource, method_map, params = self.router.find(
+ route = self.router.find(
'/repos/racker/falcon/compare/foo/full')
- self.assertIs(resource, None)
+ self.assertIs(route, None)
def test_literal(self):
resource, method_map, params = self.router.find('/user/memberships')
@@ -248,12 +248,12 @@ class TestComplexRouting(testing.TestBase):
'/emojis/signs/78/undefined',
)
def test_not_found(self, path):
- resource, method_map, params = self.router.find(path)
- self.assertIs(resource, None)
+ route = self.router.find(path)
+ self.assertIs(route, None)
def test_subsegment_not_found(self):
- resource, method_map, params = self.router.find('/emojis/signs/0/x')
- self.assertIs(resource, None)
+ route = self.router.find('/emojis/signs/0/x')
+ self.assertIs(route, None)
def test_multivar(self):
resource, method_map, params = self.router.find(
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coveralls",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
coverage==6.5.0
coveralls==3.3.1
ddt==1.7.2
docopt==0.6.2
exceptiongroup==1.2.2
-e git+https://github.com/falconry/falcon.git@f44fba57ef88ec9c11223c2765d49fb3573305a0#egg=falcon
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
nose==1.3.7
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.31.0
six==1.17.0
testtools==2.7.1
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==3.4.1
- coverage==6.5.0
- coveralls==3.3.1
- ddt==1.7.2
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- nose==1.3.7
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.31.0
- six==1.17.0
- testtools==2.7.1
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_custom_router.py::TestCustomRouter::test_custom_router_find_should_be_used",
"tests/test_default_router.py::TestRegressionCases::test_recipes",
"tests/test_default_router.py::TestRegressionCases::test_versioned_url",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_1__teams",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_2__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_3__gists",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_4__gists_42",
"tests/test_default_router.py::TestComplexRouting::test_literal_segment",
"tests/test_default_router.py::TestComplexRouting::test_malformed_pattern",
"tests/test_default_router.py::TestComplexRouting::test_not_found_01__this_does_not_exist",
"tests/test_default_router.py::TestComplexRouting::test_not_found_02__user_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_03__repos_racker_falcon_compare_johndoe_master___janedoe_dev_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_04__teams",
"tests/test_default_router.py::TestComplexRouting::test_not_found_05__teams_42_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_06__teams_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_07__teams_42_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_08__teams_default_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_09__teams_default_members_thing_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_10__teams_default_members_thing_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_11__teams_default_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_12__teams_default_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_13__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_not_found_14__emojis_signs_0_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_15__emojis_signs_0_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_16__emojis_signs_0_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_17__emojis_signs_20_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_18__emojis_signs_20_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_19__emojis_signs_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_20__emojis_signs_78_undefined",
"tests/test_default_router.py::TestComplexRouting::test_subsegment_not_found"
]
| []
| [
"tests/test_custom_router.py::TestCustomRouter::test_can_pass_additional_params_to_add_route",
"tests/test_custom_router.py::TestCustomRouter::test_custom_router_add_route_should_be_used",
"tests/test_default_router.py::TestComplexRouting::test_collision_1__teams__collision_",
"tests/test_default_router.py::TestComplexRouting::test_collision_2__emojis_signs__id_too_",
"tests/test_default_router.py::TestComplexRouting::test_collision_3__repos__org___repo__compare__complex___vs_____complex2___collision_",
"tests/test_default_router.py::TestComplexRouting::test_complex_1______5_",
"tests/test_default_router.py::TestComplexRouting::test_complex_2____full___10_",
"tests/test_default_router.py::TestComplexRouting::test_complex_3____part___15_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_1______16_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_2____full___17_",
"tests/test_default_router.py::TestComplexRouting::test_dump",
"tests/test_default_router.py::TestComplexRouting::test_literal",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_01____teams_default___19_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_02____teams_default_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_03____teams_foo___6_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_04____teams_foo_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_05____gists_first___20_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_06____gists_first_raw___18_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_07____gists_first_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_08____gists_1776_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_09____emojis_signs_78___13_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_10____emojis_signs_78_small___22_",
"tests/test_default_router.py::TestComplexRouting::test_multivar",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_1__repos__org___repo__compare__simple_vs_complex_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_2__repos__complex___vs___simple_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_3__repos__org___repo__compare__complex___vs_____complex2__full",
"tests/test_default_router.py::TestComplexRouting::test_override",
"tests/test_default_router.py::TestComplexRouting::test_variable"
]
| []
| Apache License 2.0 | 506 | [
"falcon/routing/compiled.py",
"falcon/api.py"
]
| [
"falcon/routing/compiled.py",
"falcon/api.py"
]
|
Shopify__shopify_python_api-136 | a78109e725cf9e400f955062399767f36f3a1f44 | 2016-04-18 03:46:33 | c29e0ecbed9de67dd923f980a3ac053922dab75e | diff --git a/shopify/mixins.py b/shopify/mixins.py
index 9d3c179..c7806a0 100644
--- a/shopify/mixins.py
+++ b/shopify/mixins.py
@@ -11,8 +11,15 @@ class Countable(object):
class Metafields(object):
- def metafields(self):
- return shopify.resources.Metafield.find(resource=self.__class__.plural, resource_id=self.id)
+ def metafields(self, _options=None, **kwargs):
+ if _options is None:
+ _options = kwargs
+ return shopify.resources.Metafield.find(resource=self.__class__.plural, resource_id=self.id, **_options)
+
+ def metafields_count(self, _options=None, **kwargs):
+ if _options is None:
+ _options = kwargs
+ return int(self.get("metafields/count", **_options))
def add_metafield(self, metafield):
if self.is_new():
| metafields() method should be able to take options parameters for limit and page
```
import shopify
product = shopify.Product.find(5972485446)
metafields = product.metafields()
print(len(metafields))
> 50
metafields = product.metafields(limit=250)
> TypeError: metafields() got an unexpected keyword argument 'limit'
```
I looked into the code, and it seems like it may be a simple fix. If I come up with something I'll submit a pull request, if I can't I'll let it be known here so somebody else can try tackling it. | Shopify/shopify_python_api | diff --git a/test/fixtures/metafields_count.json b/test/fixtures/metafields_count.json
new file mode 100644
index 0000000..a113c32
--- /dev/null
+++ b/test/fixtures/metafields_count.json
@@ -0,0 +1,1 @@
+{"count":2}
diff --git a/test/product_test.py b/test/product_test.py
index c48962a..dcc9ae7 100644
--- a/test/product_test.py
+++ b/test/product_test.py
@@ -28,6 +28,26 @@ class ProductTest(TestCase):
for field in metafields:
self.assertTrue(isinstance(field, shopify.Metafield))
+ def test_get_metafields_for_product_with_params(self):
+ self.fake("products/632910392/metafields.json?limit=2", extension=False, body=self.load_fixture('metafields'))
+
+ metafields = self.product.metafields(limit=2)
+ self.assertEqual(2, len(metafields))
+ for field in metafields:
+ self.assertTrue(isinstance(field, shopify.Metafield))
+
+ def test_get_metafields_for_product_count(self):
+ self.fake("products/632910392/metafields/count", body=self.load_fixture('metafields_count'))
+
+ metafields_count = self.product.metafields_count()
+ self.assertEqual(2, metafields_count)
+
+ def test_get_metafields_for_product_count_with_params(self):
+ self.fake("products/632910392/metafields/count.json?value_type=string", extension=False, body=self.load_fixture('metafields_count'))
+
+ metafields_count = self.product.metafields_count(value_type="string")
+ self.assertEqual(2, metafields_count)
+
def test_update_loaded_variant(self):
self.fake("products/632910392/variants/808950810", method='PUT', code=200, body=self.load_fixture('variant'))
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"mock>=1.0.1",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pyactiveresource==2.2.2
pytest @ file:///croot/pytest_1738938843180/work
PyYAML==6.0.2
-e git+https://github.com/Shopify/shopify_python_api.git@a78109e725cf9e400f955062399767f36f3a1f44#egg=ShopifyAPI
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: shopify_python_api
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- mock==5.2.0
- pyactiveresource==2.2.2
- pyyaml==6.0.2
- six==1.17.0
prefix: /opt/conda/envs/shopify_python_api
| [
"test/product_test.py::ProductTest::test_get_metafields_for_product_count",
"test/product_test.py::ProductTest::test_get_metafields_for_product_count_with_params",
"test/product_test.py::ProductTest::test_get_metafields_for_product_with_params"
]
| []
| [
"test/product_test.py::ProductTest::test_add_metafields_to_product",
"test/product_test.py::ProductTest::test_add_variant_to_product",
"test/product_test.py::ProductTest::test_get_metafields_for_product",
"test/product_test.py::ProductTest::test_update_loaded_variant"
]
| []
| MIT License | 507 | [
"shopify/mixins.py"
]
| [
"shopify/mixins.py"
]
|
|
networkx__networkx-2092 | d26d5e4de3a7a7fa609f56295dd8537bccffccd9 | 2016-04-18 16:08:11 | 3f4fd85765bf2d88188cfd4c84d0707152e6cd1e | diff --git a/doc/source/reference/algorithms.rst b/doc/source/reference/algorithms.rst
index e9a3b1f99..dd3958cfe 100644
--- a/doc/source/reference/algorithms.rst
+++ b/doc/source/reference/algorithms.rst
@@ -30,7 +30,6 @@ Algorithms
algorithms.distance_regular
algorithms.dominance
algorithms.dominating
- algorithms.efficiency
algorithms.euler
algorithms.flow
algorithms.graphical
diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py
index d6b34f3dc..6c56678e1 100644
--- a/networkx/algorithms/assortativity/correlation.py
+++ b/networkx/algorithms/assortativity/correlation.py
@@ -190,15 +190,13 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None):
Assortativity measures the similarity of connections
in the graph with respect to the given numeric attribute.
- The numeric attribute must be an integer.
-
+
Parameters
----------
G : NetworkX graph
attribute : string
- Node attribute key. The corresponding attribute value must be an
- integer.
+ Node attribute key
nodes: list or iterable (optional)
Compute numeric assortativity only for attributes of nodes in
diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py
index 09886c749..2c0e4f02b 100644
--- a/networkx/algorithms/assortativity/mixing.py
+++ b/networkx/algorithms/assortativity/mixing.py
@@ -166,15 +166,13 @@ def degree_mixing_matrix(G, x='out', y='in', weight=None,
def numeric_mixing_matrix(G,attribute,nodes=None,normalized=True):
"""Return numeric mixing matrix for attribute.
- The attribute must be an integer.
-
Parameters
----------
G : graph
NetworkX graph object.
attribute : string
- Node attribute key. The corresponding attribute must be an integer.
+ Node attribute key.
nodes: list or iterable (optional)
Build the matrix only with nodes in container. The default is all nodes.
diff --git a/networkx/algorithms/community/__init__.py b/networkx/algorithms/community/__init__.py
index 1d0fbbdf1..aa9a9b1a4 100644
--- a/networkx/algorithms/community/__init__.py
+++ b/networkx/algorithms/community/__init__.py
@@ -1,6 +1,6 @@
from networkx.algorithms.community.asyn_lpa import *
from networkx.algorithms.community.centrality import *
-from networkx.algorithms.community.community_generators import *
+from networkx.algorithms.community.generators import *
from networkx.algorithms.community.kclique import *
from networkx.algorithms.community.kernighan_lin import *
from networkx.algorithms.community.quality import *
diff --git a/networkx/algorithms/community/community_generators.py b/networkx/algorithms/community/generators.py
similarity index 100%
rename from networkx/algorithms/community/community_generators.py
rename to networkx/algorithms/community/generators.py
diff --git a/networkx/algorithms/components/attracting.py b/networkx/algorithms/components/attracting.py
index ae805903a..aa7971b7c 100644
--- a/networkx/algorithms/components/attracting.py
+++ b/networkx/algorithms/components/attracting.py
@@ -1,23 +1,22 @@
# -*- coding: utf-8 -*-
+"""
+Attracting components.
+"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
-#
-# Authors: Christopher Ellison
-"""Attracting components."""
import networkx as nx
from networkx.utils.decorators import not_implemented_for
-
-__all__ = ['number_attracting_components',
+__authors__ = "\n".join(['Christopher Ellison'])
+__all__ = ['number_attracting_components',
'attracting_components',
- 'is_attracting_component',
+ 'is_attracting_component',
'attracting_component_subgraphs',
]
-
@not_implemented_for('undirected')
def attracting_components(G):
"""Generates a list of attracting components in `G`.
@@ -40,15 +39,10 @@ def attracting_components(G):
attractors : generator of sets
A generator of sets of nodes, one for each attracting component of G.
- Raises
- ------
- NetworkXNotImplemented :
- If the input graph is undirected.
-
See Also
--------
number_attracting_components
- is_attracting_component
+ is_attracting_component
attracting_component_subgraphs
"""
@@ -58,7 +52,6 @@ def attracting_components(G):
if cG.out_degree(n) == 0:
yield scc[n]
-
@not_implemented_for('undirected')
def number_attracting_components(G):
"""Returns the number of attracting components in `G`.
@@ -73,11 +66,6 @@ def number_attracting_components(G):
n : int
The number of attracting components in G.
- Raises
- ------
- NetworkXNotImplemented :
- If the input graph is undirected.
-
See Also
--------
attracting_components
@@ -103,11 +91,6 @@ def is_attracting_component(G):
attracting : bool
True if `G` has a single attracting component. Otherwise, False.
- Raises
- ------
- NetworkXNotImplemented :
- If the input graph is undirected.
-
See Also
--------
attracting_components
@@ -138,14 +121,9 @@ def attracting_component_subgraphs(G, copy=True):
A list of node-induced subgraphs of the attracting components of `G`.
copy : bool
- If copy is True, graph, node, and edge attributes are copied to the
+ If copy is True, graph, node, and edge attributes are copied to the
subgraphs.
- Raises
- ------
- NetworkXNotImplemented :
- If the input graph is undirected.
-
See Also
--------
attracting_components
diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py
index 53a96cafc..762e10b60 100644
--- a/networkx/algorithms/components/biconnected.py
+++ b/networkx/algorithms/components/biconnected.py
@@ -1,19 +1,21 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2011-2016 by
+"""
+Biconnected components and articulation points.
+"""
+# Copyright (C) 2011-2013 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
-#
-# Authors: Jordi Torrents ([email protected])
-# Dan Schult ([email protected])
-# Aric Hagberg ([email protected])
-"""Biconnected components and articulation points."""
from itertools import chain
import networkx as nx
from networkx.utils.decorators import not_implemented_for
+__author__ = '\n'.join(['Jordi Torrents <[email protected]>',
+ 'Dan Schult <[email protected]>',
+ 'Aric Hagberg <[email protected]>'])
+
__all__ = [
'biconnected_components',
'biconnected_component_edges',
@@ -28,7 +30,7 @@ def is_biconnected(G):
"""Return True if the graph is biconnected, False otherwise.
A graph is biconnected if, and only if, it cannot be disconnected by
- removing only one node (and all edges incident on that node). If
+ removing only one node (and all edges incident on that node). If
removing a node increases the number of disconnected components
in the graph, that node is called an articulation point, or cut
vertex. A biconnected graph has no articulation points.
@@ -63,20 +65,16 @@ def is_biconnected(G):
articulation_points
biconnected_component_edges
biconnected_component_subgraphs
- components.is_strongly_connected
- components.is_weakly_connected
- components.is_connected
- components.is_semiconnected
Notes
-----
The algorithm to find articulation points and biconnected
components is implemented using a non-recursive depth-first-search
(DFS) that keeps track of the highest level that back edges reach
- in the DFS tree. A node `n` is an articulation point if, and only
+ in the DFS tree. A node `n` is an articulation point if, and only
if, there exists a subtree rooted at `n` such that there is no
back edge from any successor of `n` that links to a predecessor of
- `n` in the DFS tree. By keeping track of all the edges traversed
+ `n` in the DFS tree. By keeping track of all the edges traversed
by the DFS we can obtain the biconnected components because all
edges of a bicomponent will be traversed consecutively between
articulation points.
@@ -89,7 +87,7 @@ def is_biconnected(G):
"""
bcc = list(biconnected_components(G))
- if not bcc: # No bicomponents (it could be an empty graph)
+ if not bcc: # No bicomponents (it could be an empty graph)
return False
return len(bcc[0]) == len(G)
@@ -101,9 +99,9 @@ def biconnected_component_edges(G):
Biconnected components are maximal subgraphs such that the removal of a
node (and all edges incident on that node) will not disconnect the
- subgraph. Note that nodes may be part of more than one biconnected
- component. Those nodes are articulation points, or cut vertices.
- However, each edge belongs to one, and only one, biconnected component.
+ subgraph. Note that nodes may be part of more than one biconnected
+ component. Those nodes are articulation points, or cut vertices. However,
+ each edge belongs to one, and only one, biconnected component.
Notice that by convention a dyad is considered a biconnected component.
@@ -149,10 +147,10 @@ def biconnected_component_edges(G):
The algorithm to find articulation points and biconnected
components is implemented using a non-recursive depth-first-search
(DFS) that keeps track of the highest level that back edges reach
- in the DFS tree. A node `n` is an articulation point if, and only
+ in the DFS tree. A node `n` is an articulation point if, and only
if, there exists a subtree rooted at `n` such that there is no
back edge from any successor of `n` that links to a predecessor of
- `n` in the DFS tree. By keeping track of all the edges traversed
+ `n` in the DFS tree. By keeping track of all the edges traversed
by the DFS we can obtain the biconnected components because all
edges of a bicomponent will be traversed consecutively between
articulation points.
@@ -176,7 +174,7 @@ def biconnected_components(G):
Biconnected components are maximal subgraphs such that the removal of a
node (and all edges incident on that node) will not disconnect the
subgraph. Note that nodes may be part of more than one biconnected
- component. Those nodes are articulation points, or cut vertices. The
+ component. Those nodes are articulation points, or cut vertices. The
removal of articulation points will increase the number of connected
components of the graph.
@@ -226,9 +224,9 @@ def biconnected_components(G):
See Also
--------
- is_biconnected
- articulation_points
- biconnected_component_edges
+ is_biconnected,
+ articulation_points,
+ biconnected_component_edges,
biconnected_component_subgraphs
Notes
@@ -236,10 +234,10 @@ def biconnected_components(G):
The algorithm to find articulation points and biconnected
components is implemented using a non-recursive depth-first-search
(DFS) that keeps track of the highest level that back edges reach
- in the DFS tree. A node `n` is an articulation point if, and only
+ in the DFS tree. A node `n` is an articulation point if, and only
if, there exists a subtree rooted at `n` such that there is no
back edge from any successor of `n` that links to a predecessor of
- `n` in the DFS tree. By keeping track of all the edges traversed
+ `n` in the DFS tree. By keeping track of all the edges traversed
by the DFS we can obtain the biconnected components because all
edges of a bicomponent will be traversed consecutively between
articulation points.
@@ -254,7 +252,6 @@ def biconnected_components(G):
for comp in _biconnected_dfs(G, components=True):
yield set(chain.from_iterable(comp))
-
@not_implemented_for('directed')
def biconnected_component_subgraphs(G, copy=True):
"""Return a generator of graphs, one graph for each biconnected component
@@ -262,8 +259,8 @@ def biconnected_component_subgraphs(G, copy=True):
Biconnected components are maximal subgraphs such that the removal of a
node (and all edges incident on that node) will not disconnect the
- subgraph. Note that nodes may be part of more than one biconnected
- component. Those nodes are articulation points, or cut vertices. The
+ subgraph. Note that nodes may be part of more than one biconnected
+ component. Those nodes are articulation points, or cut vertices. The
removal of articulation points will increase the number of connected
components of the graph.
@@ -315,9 +312,9 @@ def biconnected_component_subgraphs(G, copy=True):
See Also
--------
- is_biconnected
- articulation_points
- biconnected_component_edges
+ is_biconnected,
+ articulation_points,
+ biconnected_component_edges,
biconnected_components
Notes
@@ -325,10 +322,10 @@ def biconnected_component_subgraphs(G, copy=True):
The algorithm to find articulation points and biconnected
components is implemented using a non-recursive depth-first-search
(DFS) that keeps track of the highest level that back edges reach
- in the DFS tree. A node `n` is an articulation point if, and only
+ in the DFS tree. A node `n` is an articulation point if, and only
if, there exists a subtree rooted at `n` such that there is no
back edge from any successor of `n` that links to a predecessor of
- `n` in the DFS tree. By keeping track of all the edges traversed
+ `n` in the DFS tree. By keeping track of all the edges traversed
by the DFS we can obtain the biconnected components because all
edges of a bicomponent will be traversed consecutively between
articulation points.
@@ -355,7 +352,7 @@ def articulation_points(G):
An articulation point or cut vertex is any node whose removal (along with
all its incident edges) increases the number of connected components of
- a graph. An undirected connected graph without articulation points is
+ a graph. An undirected connected graph without articulation points is
biconnected. Articulation points belong to more than one biconnected
component of a graph.
@@ -392,9 +389,9 @@ def articulation_points(G):
See Also
--------
- is_biconnected
- biconnected_components
- biconnected_component_edges
+ is_biconnected,
+ biconnected_components,
+ biconnected_component_edges,
biconnected_component_subgraphs
Notes
@@ -402,10 +399,10 @@ def articulation_points(G):
The algorithm to find articulation points and biconnected
components is implemented using a non-recursive depth-first-search
(DFS) that keeps track of the highest level that back edges reach
- in the DFS tree. A node `n` is an articulation point if, and only
+ in the DFS tree. A node `n` is an articulation point if, and only
if, there exists a subtree rooted at `n` such that there is no
back edge from any successor of `n` that links to a predecessor of
- `n` in the DFS tree. By keeping track of all the edges traversed
+ `n` in the DFS tree. By keeping track of all the edges traversed
by the DFS we can obtain the biconnected components because all
edges of a bicomponent will be traversed consecutively between
articulation points.
@@ -428,8 +425,8 @@ def _biconnected_dfs(G, components=True):
for start in G:
if start in visited:
continue
- discovery = {start: 0} # time of first discovery of node during search
- low = {start: 0}
+ discovery = {start:0} # "time" of first discovery of node during search
+ low = {start:0}
root_children = 0
visited.add(start)
edge_stack = []
@@ -441,31 +438,31 @@ def _biconnected_dfs(G, components=True):
if grandparent == child:
continue
if child in visited:
- if discovery[child] <= discovery[parent]: # back edge
- low[parent] = min(low[parent], discovery[child])
+ if discovery[child] <= discovery[parent]: # back edge
+ low[parent] = min(low[parent],discovery[child])
if components:
- edge_stack.append((parent, child))
+ edge_stack.append((parent,child))
else:
low[child] = discovery[child] = len(discovery)
visited.add(child)
stack.append((parent, child, iter(G[child])))
if components:
- edge_stack.append((parent, child))
+ edge_stack.append((parent,child))
except StopIteration:
stack.pop()
if len(stack) > 1:
if low[parent] >= discovery[grandparent]:
if components:
- ind = edge_stack.index((grandparent, parent))
+ ind = edge_stack.index((grandparent,parent))
yield edge_stack[ind:]
- edge_stack = edge_stack[:ind]
+ edge_stack=edge_stack[:ind]
else:
yield grandparent
low[grandparent] = min(low[parent], low[grandparent])
- elif stack: # length 1 so grandparent is root
+ elif stack: # length 1 so grandparent is root
root_children += 1
if components:
- ind = edge_stack.index((grandparent, parent))
+ ind = edge_stack.index((grandparent,parent))
yield edge_stack[ind:]
if not components:
# root node is articulation point if it has more than 1 child
diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py
index 5e90da716..dc17d34da 100644
--- a/networkx/algorithms/components/connected.py
+++ b/networkx/algorithms/components/connected.py
@@ -1,19 +1,20 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2004-2016 by
+"""
+Connected components.
+"""
+# Copyright (C) 2004-2013 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
-#
-# Authors: Eben Kenah
-# Aric Hagberg ([email protected])
-# Christopher Ellison
-"""Connected components."""
import networkx as nx
from networkx.utils.decorators import not_implemented_for
from ...utils import arbitrary_element
+__authors__ = "\n".join(['Eben Kenah',
+ 'Aric Hagberg <[email protected]>'
+ 'Christopher Ellison'])
__all__ = [
'number_connected_components',
'connected_components',
@@ -37,11 +38,6 @@ def connected_components(G):
comp : generator of sets
A generator of sets of nodes, one for each component of G.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
Generate a sorted list of connected components, largest first.
@@ -58,8 +54,7 @@ def connected_components(G):
See Also
--------
- components.strongly_connected_components
- components.weakly_connected_components
+ strongly_connected_components
Notes
-----
@@ -91,11 +86,6 @@ def connected_component_subgraphs(G, copy=True):
comp : generator
A generator of graphs, one for each connected component of G.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
>>> G = nx.path_graph(4)
@@ -103,15 +93,13 @@ def connected_component_subgraphs(G, copy=True):
>>> graphs = list(nx.connected_component_subgraphs(G))
If you only want the largest connected component, it's more
- efficient to use max instead of sort:
+ efficient to use max than sort.
>>> Gc = max(nx.connected_component_subgraphs(G), key=len)
See Also
--------
connected_components
- components.strongly_connected_component_subgraphs
- components.weakly_connected_component_subgraphs
Notes
-----
@@ -142,8 +130,6 @@ def number_connected_components(G):
See Also
--------
connected_components
- components.number_weakly_connected_components
- components.number_strongly_connected_components
Notes
-----
@@ -167,11 +153,6 @@ def is_connected(G):
connected : bool
True if the graph is connected, false otherwise.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
>>> G = nx.path_graph(4)
@@ -180,10 +161,6 @@ def is_connected(G):
See Also
--------
- components.is_strongly_connected
- components.is_weakly_connected
- components.is_semiconnected
- components.is_biconnected
connected_components
Notes
@@ -214,11 +191,6 @@ def node_connected_component(G, n):
comp : set
A set of nodes in the component of G containing node n.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
See Also
--------
connected_components
diff --git a/networkx/algorithms/components/semiconnected.py b/networkx/algorithms/components/semiconnected.py
index a9766024a..07cc05dd5 100644
--- a/networkx/algorithms/components/semiconnected.py
+++ b/networkx/algorithms/components/semiconnected.py
@@ -1,19 +1,18 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2004-2016 by
-# Aric Hagberg <[email protected]>
-# Dan Schult <[email protected]>
-# Pieter Swart <[email protected]>
-# All rights reserved.
-# BSD license.
-#
-# Authors: ysitu ([email protected])
-"""Semiconnectedness."""
+"""
+Semiconnectedness.
+"""
+
+__author__ = """ysitu <[email protected]>"""
+# Copyright (C) 2014 ysitu <[email protected]>
+# All rights reserved.
+# BSD license.
+
import networkx as nx
from networkx.utils import not_implemented_for, pairwise
__all__ = ['is_semiconnected']
-
@not_implemented_for('undirected')
def is_semiconnected(G):
"""Return True if the graph is semiconnected, False otherwise.
@@ -34,7 +33,7 @@ def is_semiconnected(G):
Raises
------
NetworkXNotImplemented :
- If the input graph is undirected.
+ If the input graph is not directed.
NetworkXPointlessConcept :
If the graph is empty.
@@ -50,10 +49,8 @@ def is_semiconnected(G):
See Also
--------
- components.is_strongly_connected
- components.is_weakly_connected
- components.is_connected
- components.is_biconnected
+ is_strongly_connected,
+ is_weakly_connected
"""
if len(G) == 0:
raise nx.NetworkXPointlessConcept(
diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py
index 9fcfd1de5..98e57f5de 100644
--- a/networkx/algorithms/components/strongly_connected.py
+++ b/networkx/algorithms/components/strongly_connected.py
@@ -1,19 +1,20 @@
# -*- coding: utf-8 -*-
+"""Strongly connected components.
+"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
-#
-# Authors: Eben Kenah
-# Aric Hagberg ([email protected])
-# Christopher Ellison
-# Ben Edwards ([email protected])
-"""Strongly connected components."""
import networkx as nx
from networkx.utils.decorators import not_implemented_for
+__authors__ = "\n".join(['Eben Kenah',
+ 'Aric Hagberg ([email protected])'
+ 'Christopher Ellison',
+ 'Ben Edwards ([email protected])'])
+
__all__ = ['number_strongly_connected_components',
'strongly_connected_components',
'strongly_connected_component_subgraphs',
@@ -40,7 +41,7 @@ def strongly_connected_components(G):
Raises
------
- NetworkXNotImplemented :
+ NetworkXNotImplemented:
If G is undirected.
Examples
@@ -60,13 +61,12 @@ def strongly_connected_components(G):
See Also
--------
- components.connected_components
- components.weakly_connected_components
- kosaraju_strongly_connected_components
+ connected_components,
+ weakly_connected_components
Notes
-----
- Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
+ Uses Tarjan's algorithm with Nuutila's modifications.
Nonrecursive version of algorithm.
References
@@ -157,7 +157,8 @@ def kosaraju_strongly_connected_components(G, source=None):
See Also
--------
- strongly_connected_components
+ connected_components
+ weakly_connected_components
Notes
-----
@@ -197,8 +198,8 @@ def strongly_connected_components_recursive(G):
Raises
------
- NetworkXNotImplemented :
- If G is undirected.
+ NetworkXNotImplemented:
+ If G is undirected
Examples
--------
@@ -221,7 +222,7 @@ def strongly_connected_components_recursive(G):
Notes
-----
- Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
+ Uses Tarjan's algorithm with Nuutila's modifications.
References
----------
@@ -283,11 +284,6 @@ def strongly_connected_component_subgraphs(G, copy=True):
comp : generator of graphs
A generator of graphs, one for each strongly connected component of G.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
Generate a sorted list of strongly connected components, largest first.
@@ -305,9 +301,8 @@ def strongly_connected_component_subgraphs(G, copy=True):
See Also
--------
- strongly_connected_components
- components.connected_component_subgraphs
- components.weakly_connected_component_subgraphs
+ connected_component_subgraphs
+ weakly_connected_component_subgraphs
"""
for comp in strongly_connected_components(G):
@@ -331,16 +326,9 @@ def number_strongly_connected_components(G):
n : integer
Number of strongly connected components
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
See Also
--------
- strongly_connected_components
- components.number_connected_components
- components.number_weakly_connected_components
+ connected_components
Notes
-----
@@ -363,17 +351,8 @@ def is_strongly_connected(G):
connected : bool
True if the graph is strongly connected, False otherwise.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
See Also
--------
- components.is_weakly_connected
- components.is_semiconnected
- components.is_connected
- components.is_biconnected
strongly_connected_components
Notes
@@ -407,18 +386,18 @@ def condensation(G, scc=None):
Returns
-------
C : NetworkX DiGraph
- The condensation graph C of G. The node labels are integers
+ The condensation graph C of G. The node labels are integers
corresponding to the index of the component in the list of
- strongly connected components of G. C has a graph attribute named
+ strongly connected components of G. C has a graph attribute named
'mapping' with a dictionary mapping the original nodes to the
- nodes in C to which they belong. Each node in C also has a node
+ nodes in C to which they belong. Each node in C also has a node
attribute 'members' with the set of original nodes in G that
form the SCC that the node in C represents.
Raises
------
NetworkXNotImplemented:
- If G is undirected.
+ If G is not directed
Notes
-----
diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py
index ff2b06421..05df0166a 100644
--- a/networkx/algorithms/components/weakly_connected.py
+++ b/networkx/algorithms/components/weakly_connected.py
@@ -1,17 +1,19 @@
# -*- coding: utf-8 -*-
+"""Weakly connected components.
+"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
-#
-# Authors: Aric Hagberg ([email protected])
-# Christopher Ellison
-"""Weakly connected components."""
+
import networkx as nx
from networkx.utils.decorators import not_implemented_for
+__authors__ = "\n".join(['Aric Hagberg ([email protected])'
+ 'Christopher Ellison'])
+
__all__ = [
'number_weakly_connected_components',
'weakly_connected_components',
@@ -35,11 +37,6 @@ def weakly_connected_components(G):
A generator of sets of nodes, one for each weakly connected
component of G.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
Generate a sorted list of weakly connected components, largest first.
@@ -51,14 +48,13 @@ def weakly_connected_components(G):
[4, 3]
If you only want the largest component, it's more efficient to
- use max instead of sort:
+ use max instead of sort.
>>> largest_cc = max(nx.weakly_connected_components(G), key=len)
See Also
--------
- components.connected_components
- components.strongly_connected_components
+ strongly_connected_components
Notes
-----
@@ -87,16 +83,9 @@ def number_weakly_connected_components(G):
n : integer
Number of weakly connected components
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
See Also
--------
- weakly_connected_components
- components.number_connected_components
- components.number_strongly_connected_components
+ connected_components
Notes
-----
@@ -123,11 +112,6 @@ def weakly_connected_component_subgraphs(G, copy=True):
comp : generator
A generator of graphs, one for each weakly connected component of G.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
Examples
--------
Generate a sorted list of weakly connected components, largest first.
@@ -139,15 +123,14 @@ def weakly_connected_component_subgraphs(G, copy=True):
[4, 3]
If you only want the largest component, it's more efficient to
- use max instead of sort:
+ use max instead of sort.
>>> Gc = max(nx.weakly_connected_component_subgraphs(G), key=len)
See Also
--------
- weakly_connected_components
- components.strongly_connected_component_subgraphs
- components.connected_component_subgraphs
+ strongly_connected_components
+ connected_components
Notes
-----
@@ -179,18 +162,11 @@ def is_weakly_connected(G):
connected : bool
True if the graph is weakly connected, False otherwise.
- Raises
- ------
- NetworkXNotImplemented:
- If G is undirected.
-
See Also
--------
- components.is_strongly_connected
- components.is_semiconnected
- components.is_connected
- components.is_biconnected
- weakly_connected_components
+ is_strongly_connected
+ is_semiconnected
+ is_connected
Notes
-----
diff --git a/networkx/algorithms/dominance.py b/networkx/algorithms/dominance.py
index 0f0901e21..9d3b84375 100644
--- a/networkx/algorithms/dominance.py
+++ b/networkx/algorithms/dominance.py
@@ -126,17 +126,12 @@ def dominance_frontiers(G, start):
"""
idom = nx.immediate_dominators(G, start)
- df = {u: [] for u in idom}
-
+ df = {u: set() for u in idom}
for u in idom:
- if len(G.pred[u]) - int(u in G.pred[u]) >= 2:
- p = set()
+ if len(G.pred[u]) >= 2:
for v in G.pred[u]:
- while v != idom[u] and v not in p:
- p.add(v)
- v = idom[v]
- p.discard(u)
- for v in p:
- df[v].append(u)
-
+ if v in idom:
+ while v != idom[u]:
+ df[v].add(u)
+ v = idom[v]
return df
diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py
index 02b3740fb..516fb4f4d 100644
--- a/networkx/generators/geometric.py
+++ b/networkx/generators/geometric.py
@@ -60,9 +60,9 @@ def random_geometric_graph(n, radius, dim=2, pos=None, metric=None):
A metric on vectors of numbers (represented as lists or
tuples). This must be a function that accepts two lists (or
tuples) as input and yields a number as output. The function
- must also satisfy the four requirements of a `metric`_.
- Specifically, if *d* is the function and *x*, *y*,
- and *z* are vectors in the graph, then *d* must satisfy
+ must also satisfy the four requirements of a
+ `metric`_. Specifically, if *d* is the function and *x*, *y*,
+ and *x* are vectors in the graph, then *d* must satisfy
1. *d*(*x*, *y*) ≥ 0,
2. *d*(*x*, *y*) = 0 if and only if *x* = *y*,
@@ -72,7 +72,7 @@ def random_geometric_graph(n, radius, dim=2, pos=None, metric=None):
If this argument is not specified, the Euclidean distance metric is
used.
- .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
+ .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
Returns
-------
@@ -193,9 +193,9 @@ def geographical_threshold_graph(n, theta, alpha=2, dim=2, pos=None,
A metric on vectors of numbers (represented as lists or
tuples). This must be a function that accepts two lists (or
tuples) as input and yields a number as output. The function
- must also satisfy the four requirements of a `metric`_.
- Specifically, if *d* is the function and *x*, *y*,
- and *z* are vectors in the graph, then *d* must satisfy
+ must also satisfy the four requirements of a
+ `metric`_. Specifically, if *d* is the function and *x*, *y*,
+ and *x* are vectors in the graph, then *d* must satisfy
1. *d*(*x*, *y*) ≥ 0,
2. *d*(*x*, *y*) = 0 if and only if *x* = *y*,
@@ -205,7 +205,7 @@ def geographical_threshold_graph(n, theta, alpha=2, dim=2, pos=None,
If this argument is not specified, the Euclidean distance metric is
used.
- .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
+ .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
Returns
-------
@@ -326,9 +326,9 @@ def waxman_graph(n, alpha=0.4, beta=0.1, L=None, domain=(0, 0, 1, 1),
A metric on vectors of numbers (represented as lists or
tuples). This must be a function that accepts two lists (or
tuples) as input and yields a number as output. The function
- must also satisfy the four requirements of a `metric`_.
- Specifically, if *d* is the function and *x*, *y*,
- and *z* are vectors in the graph, then *d* must satisfy
+ must also satisfy the four requirements of a
+ `metric`_. Specifically, if *d* is the function and *x*, *y*,
+ and *x* are vectors in the graph, then *d* must satisfy
1. *d*(*x*, *y*) ≥ 0,
2. *d*(*x*, *y*) = 0 if and only if *x* = *y*,
@@ -338,7 +338,7 @@ def waxman_graph(n, alpha=0.4, beta=0.1, L=None, domain=(0, 0, 1, 1),
If this argument is not specified, the Euclidean distance metric is
used.
- .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
+ .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
Returns
-------
| Possible bug in dominance package
While working with ```dominance_frontiers``` algorithm I have noticed some strange line of code, which I think leads to incorrect results.
My question is: why try to discard element ```u```?
```
p.discard(u)
```
Given the following example it produces wrong results.
```
import networkx as nx
from networkx.algorithms.dominance import *
g = nx.DiGraph()
g.add_edges_from([
('b0','b1'),
('b1', 'b2'),
('b2', 'b3'),
('b3','b1'),
('b1','b5'),
('b5', 'b6'),
('b5', 'b8'),
('b6', 'b7'),
('b8', 'b7'),
('b7', 'b3'),
('b3', 'b4')
]
df = dominance_frontiers(g, 'b0')
print df
```
It yields:
```
{'b0': [],
'b1': [],
'b2': ['b3'],
'b3': ['b1'],
'b4': [],
'b5': ['b3'],
'b6': ['b7'],
'b7': ['b3'],
'b8': ['b7']}
```
However, I expect to see ```b1``` in ```df[b1]```, as if I use general algorithm for ```b1```, while parsing its predecessors, ```b1``` itself will be passed before its immediate dominator ```b0```, so it should be added to ```df[b1]```.
| networkx/networkx | diff --git a/networkx/algorithms/tests/test_dominance.py b/networkx/algorithms/tests/test_dominance.py
index 94bd80468..4ce020f6c 100644
--- a/networkx/algorithms/tests/test_dominance.py
+++ b/networkx/algorithms/tests/test_dominance.py
@@ -99,28 +99,28 @@ class TestDominanceFrontiers(object):
def test_singleton(self):
G = nx.DiGraph()
G.add_node(0)
- assert_equal(nx.dominance_frontiers(G, 0), {0: []})
+ assert_equal(nx.dominance_frontiers(G, 0), {0: set()})
G.add_edge(0, 0)
- assert_equal(nx.dominance_frontiers(G, 0), {0: []})
+ assert_equal(nx.dominance_frontiers(G, 0), {0: set()})
def test_path(self):
n = 5
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, 0),
- {i: [] for i in range(n)})
+ {i: set() for i in range(n)})
def test_cycle(self):
n = 5
G = nx.cycle_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, 0),
- {i: [] for i in range(n)})
+ {i: set() for i in range(n)})
def test_unreachable(self):
n = 5
assert_greater(n, 1)
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, n // 2),
- {i: [] for i in range(n // 2, n)})
+ {i: set() for i in range(n // 2, n)})
def test_irreducible1(self):
# Graph taken from Figure 2 of
@@ -129,9 +129,11 @@ class TestDominanceFrontiers(object):
# Software Practice & Experience, 4:110, 2001.
edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
G = nx.DiGraph(edges)
- assert_equal({u: sorted(df)
+ assert_equal({u: df
for u, df in nx.dominance_frontiers(G, 5).items()},
- {1: [2], 2: [1], 3: [2], 4: [1], 5: []})
+ {1: set([2]), 2: set([1]), 3: set([2]),
+ 4: set([1]), 5: set()})
+
def test_irreducible2(self):
# Graph taken from Figure 4 of
@@ -142,18 +144,21 @@ class TestDominanceFrontiers(object):
(6, 4), (6, 5)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 6),
- {1: [2], 2: [1, 3], 3: [2], 4: [2, 3], 5: [1], 6: []})
+ {1: set([2]), 2: set([1, 3]), 3: set([2]), 4: set([2, 3])
+ , 5: set([1]), 6: set([])})
def test_domrel_png(self):
# Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png
edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 1),
- {1: [], 2: [], 3: [5], 4: [5], 5: [2], 6: []})
+ {1: set([]), 2: set([2]), 3: set([5]), 4: set([5]),
+ 5: set([2]), 6: set()})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.dominance_frontiers(G, 6),
- {1: [], 2: [], 3: [2], 4: [2], 5: [2], 6: []})
+ {1: set(), 2: set([2]), 3: set([2]), 4: set([2]),
+ 5: set([2]), 6: set()})
def test_boost_example(self):
# Graph taken from Figure 1 of
@@ -162,10 +167,97 @@ class TestDominanceFrontiers(object):
(5, 7), (6, 4)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 0),
- {0: [], 1: [], 2: [7], 3: [7], 4: [7], 5: [7], 6: [4],
- 7: []})
+ {0: set(), 1: set(), 2: set([7]), 3: set([7]),
+ 4: set([4,7]), 5: set([7]), 6: set([4]), 7: set()})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.dominance_frontiers(G, 7),
- {0: [], 1: [], 2: [1], 3: [1], 4: [1], 5: [1], 6: [4],
- 7: []})
+ {0: set(), 1: set(), 2: set([1]), 3: set([1]),
+ 4: set([1,4]), 5: set([1]), 6: set([4]), 7: set()})
+
+
+ def test_discard_issue(self):
+ # https://github.com/networkx/networkx/issues/2071
+ g = nx.DiGraph()
+ g.add_edges_from([
+ ('b0','b1'),
+ ('b1', 'b2'),
+ ('b2', 'b3'),
+ ('b3','b1'),
+ ('b1','b5'),
+ ('b5', 'b6'),
+ ('b5', 'b8'),
+ ('b6', 'b7'),
+ ('b8', 'b7'),
+ ('b7', 'b3'),
+ ('b3', 'b4')
+ ]
+ )
+ df = nx.dominance_frontiers(g, 'b0')
+ assert_equal(df, {'b4': set(), 'b5': set(['b3']), 'b6': set(['b7']),
+ 'b7': set(['b3']),
+ 'b0': set(), 'b1': set(['b1']), 'b2': set(['b3']),
+ 'b3': set(['b1']), 'b8': set(['b7'])})
+
+ def test_loop(self):
+ g = nx.DiGraph()
+ g.add_edges_from([('a','b'),('b','c'),('b','a')])
+ df = nx.dominance_frontiers(g, 'a')
+ assert_equal(df, {'a': set(), 'b': set(), 'c': set()})
+
+ def test_missing_immediate_doms(self):
+ # see https://github.com/networkx/networkx/issues/2070
+ g = nx.DiGraph()
+ edges = [
+ ('entry_1', 'b1'),
+ ('b1', 'b2'),
+ ('b2', 'b3'),
+ ('b3', 'exit'),
+ ('entry_2', 'b3')
+ ]
+
+ # entry_1
+ # |
+ # b1
+ # |
+ # b2 entry_2
+ # | /
+ # b3
+ # |
+ # exit
+
+ g.add_edges_from(edges)
+ # formerly raised KeyError on entry_2 when parsing b3
+ # because entry_2 does not have immediate doms (no path)
+ nx.dominance_frontiers(g,'entry_1')
+
+ def test_loops_larger(self):
+ # from
+ # http://ecee.colorado.edu/~waite/Darmstadt/motion.html
+ g = nx.DiGraph()
+ edges = [
+ ('entry', 'exit'),
+ ('entry', '1'),
+ ('1', '2'),
+ ('2', '3'),
+ ('3', '4'),
+ ('4', '5'),
+ ('5', '6'),
+ ('6', 'exit'),
+ ('6', '2'),
+ ('5', '3'),
+ ('4', '4')
+ ]
+
+ g.add_edges_from(edges)
+ df = nx.dominance_frontiers(g,'entry')
+ answer = {'entry': set(),
+ '1': set(['exit']),
+ '2': set(['exit', '2']),
+ '3': set(['exit', '3', '2']),
+ '4': set(['exit', '4','3', '2']),
+ '5': set(['exit', '3', '2']),
+ '6': set(['exit', '2']),
+ 'exit': set()}
+ for n in df:
+ assert_equal(set(df[n]),set(answer[n]))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 12
} | help | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgdal-dev graphviz"
],
"python": "3.6",
"reqs_path": [
"requirements/default.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
decorator==5.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/networkx/networkx.git@d26d5e4de3a7a7fa609f56295dd8537bccffccd9#egg=networkx
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: networkx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/networkx
| [
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_singleton",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_path",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_cycle",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_unreachable",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_irreducible1",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_irreducible2",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_domrel_png",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_boost_example",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_discard_issue",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_loop",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_missing_immediate_doms",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_loops_larger"
]
| []
| [
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_exceptions",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_singleton",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_path",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_cycle",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_unreachable",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_irreducible1",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_irreducible2",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_domrel_png",
"networkx/algorithms/tests/test_dominance.py::TestImmediateDominators::test_boost_example",
"networkx/algorithms/tests/test_dominance.py::TestDominanceFrontiers::test_exceptions"
]
| []
| BSD 3-Clause | 508 | [
"networkx/algorithms/components/semiconnected.py",
"networkx/algorithms/community/community_generators.py",
"networkx/algorithms/dominance.py",
"doc/source/reference/algorithms.rst",
"networkx/algorithms/community/__init__.py",
"networkx/algorithms/components/connected.py",
"networkx/generators/geometric.py",
"networkx/algorithms/assortativity/mixing.py",
"networkx/algorithms/assortativity/correlation.py",
"networkx/algorithms/components/weakly_connected.py",
"networkx/algorithms/components/attracting.py",
"networkx/algorithms/components/biconnected.py",
"networkx/algorithms/components/strongly_connected.py"
]
| [
"networkx/algorithms/components/semiconnected.py",
"networkx/algorithms/dominance.py",
"networkx/algorithms/components/attracting.py",
"doc/source/reference/algorithms.rst",
"networkx/algorithms/community/__init__.py",
"networkx/algorithms/components/connected.py",
"networkx/generators/geometric.py",
"networkx/algorithms/assortativity/mixing.py",
"networkx/algorithms/assortativity/correlation.py",
"networkx/algorithms/components/weakly_connected.py",
"networkx/algorithms/community/generators.py",
"networkx/algorithms/components/biconnected.py",
"networkx/algorithms/components/strongly_connected.py"
]
|
|
zalando-stups__zign-14 | 46f296b8952b518c9f93d398ef890d5d4001a37a | 2016-04-19 17:40:40 | e2641d7972e25769830f527c1bd81f6729f4d3ea | diff --git a/zign/api.py b/zign/api.py
index d98ed9a..065afae 100644
--- a/zign/api.py
+++ b/zign/api.py
@@ -100,6 +100,11 @@ def get_named_token(scope, realm, name, user, password, url=None,
if existing_token:
return existing_token
+ if name and not realm:
+ access_token = get_service_token(name, scope)
+ if access_token:
+ return {'access_token': access_token}
+
config = get_config()
url = url or config.get('url')
@@ -153,6 +158,21 @@ def is_user_scope(scope: str):
return scope in set(['uid', 'cn'])
+def get_service_token(name: str, scopes: list):
+ '''Get service token (tokens lib) if possible, otherwise return None'''
+ tokens.manage(name, scopes)
+ try:
+ access_token = tokens.get(name)
+ except tokens.ConfigurationError:
+ # will be thrown if configuration is missing (e.g. OAUTH2_ACCESS_TOKEN_URL)
+ access_token = None
+ except tokens.InvalidCredentialsError:
+ # will be thrown if $CREDENTIALS_DIR/*.json cannot be read
+ access_token = None
+
+ return access_token
+
+
def get_token(name: str, scopes: list):
'''Get an OAuth token, either from Token Service
or directly from OAuth provider (using the Python tokens library)'''
@@ -163,14 +183,7 @@ def get_token(name: str, scopes: list):
if token:
return token['access_token']
- tokens.manage(name, scopes)
- try:
- access_token = tokens.get(name)
- except tokens.ConfigurationError:
- access_token = None
- except tokens.InvalidCredentialsError:
- access_token = None
-
+ access_token = get_service_token(name, scopes)
if access_token:
return access_token
| Transparently get service tokens via "tokens" library if possible
We already have the `zign.api.get_token` function and we should consider getting service tokens transparently also when using `zign token` directly. | zalando-stups/zign | diff --git a/tests/test_api.py b/tests/test_api.py
index ad3cc20..e6e2f6c 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,4 +1,5 @@
import pytest
+import time
import tokens
import zign.api
@@ -72,3 +73,18 @@ def test_get_token_fallback_success(monkeypatch):
monkeypatch.setattr('zign.api.get_new_token', lambda *args, **kwargs: {'access_token': 'tt77'})
assert zign.api.get_token('mytok', ['myscope']) == 'tt77'
+
+
+def test_get_named_token_existing(monkeypatch):
+ existing = {'mytok': {'access_token': 'tt77', 'creation_time': time.time() - 10, 'expires_in': 3600}}
+ monkeypatch.setattr('zign.api.get_tokens', lambda: existing)
+ tok = zign.api.get_named_token(scope=['myscope'], realm=None, name='mytok', user='myusr', password='mypw')
+ assert tok['access_token'] == 'tt77'
+
+
+def test_get_named_token_services(monkeypatch):
+ response = MagicMock(status_code=401)
+ monkeypatch.setattr('requests.get', MagicMock(return_value=response))
+ monkeypatch.setattr('tokens.get', lambda x: 'svcmytok123')
+ tok = zign.api.get_named_token(scope=['myscope'], realm=None, name='mytok', user='myusr', password='mypw')
+ assert tok['access_token'] == 'svcmytok123'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | backports.tarfile==1.2.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
cryptography==44.0.2
dnspython==2.7.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
keyring==25.6.0
keyrings.alt==5.0.2
more-itertools==10.6.0
packaging==24.2
pluggy==1.5.0
pycparser==2.22
pytest==8.3.5
pytest-cov==6.0.0
PyYAML==6.0.2
requests==2.32.3
SecretStorage==3.3.3
stups-cli-support==1.1.22
stups-tokens==1.1.19
-e git+https://github.com/zalando-stups/zign.git@46f296b8952b518c9f93d398ef890d5d4001a37a#egg=stups_zign
tomli==2.2.1
urllib3==2.3.0
zipp==3.21.0
| name: zign
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-tarfile==1.2.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- cryptography==44.0.2
- dnspython==2.7.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- keyring==25.6.0
- keyrings-alt==5.0.2
- more-itertools==10.6.0
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pytest==8.3.5
- pytest-cov==6.0.0
- pyyaml==6.0.2
- requests==2.32.3
- secretstorage==3.3.3
- stups-cli-support==1.1.22
- stups-tokens==1.1.19
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/zign
| [
"tests/test_api.py::test_get_named_token_services"
]
| [
"tests/test_api.py::test_get_new_token_auth_fail",
"tests/test_api.py::test_get_new_token_server_error"
]
| [
"tests/test_api.py::test_get_new_token_invalid_json",
"tests/test_api.py::test_get_new_token_missing_access_token",
"tests/test_api.py::test_get_token_existing",
"tests/test_api.py::test_get_token_configuration_error",
"tests/test_api.py::test_get_token_service_success",
"tests/test_api.py::test_get_token_fallback_success",
"tests/test_api.py::test_get_named_token_existing"
]
| []
| Apache License 2.0 | 509 | [
"zign/api.py"
]
| [
"zign/api.py"
]
|
|
PyCQA__pycodestyle-499 | eae54ff0e4c50ccc4507e95a2f8689fefb89e70e | 2016-04-20 13:29:51 | 4f5d398f9256727ad8fd7f67c45ea60a8fad5a4a | IanLee1521: Can you also update the error list page in the docs to say this is in the default list? | diff --git a/docs/intro.rst b/docs/intro.rst
index e26daf7..2ce1eb6 100644
--- a/docs/intro.rst
+++ b/docs/intro.rst
@@ -392,7 +392,7 @@ This is the current list of error and warning codes:
+------------+----------------------------------------------------------------------+
| **W5** | *Line break warning* |
+------------+----------------------------------------------------------------------+
-| W503 | line break occurred before a binary operator |
+| W503 (*) | line break occurred before a binary operator |
+------------+----------------------------------------------------------------------+
+------------+----------------------------------------------------------------------+
| **W6** | *Deprecation warning* |
diff --git a/pep8.py b/pep8.py
index 8ec21e1..499c370 100755
--- a/pep8.py
+++ b/pep8.py
@@ -65,7 +65,7 @@ except ImportError:
__version__ = '1.8.0-dev'
DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox'
-DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704'
+DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704,W503'
try:
if sys.platform == 'win32':
USER_CONFIG = os.path.expanduser(r'~\.pep8')
| Update handling of breaking around binary operators: W503 vs W504
Based on the discussion that started on python-dev [1] and ended with an update to PEP-8 [2][3], the logic for allowing (preferring really) breaks to occur before rather than after binary operators.
[1] https://mail.python.org/pipermail/python-ideas/2016-April/039774.html
[2] http://bugs.python.org/issue26763
[3] https://hg.python.org/peps/rev/3857909d7956
| PyCQA/pycodestyle | diff --git a/testsuite/test_api.py b/testsuite/test_api.py
index 1cb0d4b..0b83c4e 100644
--- a/testsuite/test_api.py
+++ b/testsuite/test_api.py
@@ -181,7 +181,7 @@ class APITestCase(unittest.TestCase):
self.assertEqual(options.select, ())
self.assertEqual(
options.ignore,
- ('E121', 'E123', 'E126', 'E226', 'E24', 'E704')
+ ('E121', 'E123', 'E126', 'E226', 'E24', 'E704', 'W503')
)
options = parse_argv('--doctest').options
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"tox",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
distlib==0.3.9
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/PyCQA/pycodestyle.git@eae54ff0e4c50ccc4507e95a2f8689fefb89e70e#egg=pep8
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
pyproject-api==1.9.0
pytest @ file:///croot/pytest_1738938843180/work
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.30.0
| name: pycodestyle
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- distlib==0.3.9
- filelock==3.18.0
- platformdirs==4.3.7
- pyproject-api==1.9.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.30.0
prefix: /opt/conda/envs/pycodestyle
| [
"testsuite/test_api.py::APITestCase::test_styleguide_ignore_code"
]
| []
| [
"testsuite/test_api.py::APITestCase::test_check_nullbytes",
"testsuite/test_api.py::APITestCase::test_check_unicode",
"testsuite/test_api.py::APITestCase::test_register_ast_check",
"testsuite/test_api.py::APITestCase::test_register_invalid_check",
"testsuite/test_api.py::APITestCase::test_register_logical_check",
"testsuite/test_api.py::APITestCase::test_register_physical_check",
"testsuite/test_api.py::APITestCase::test_styleguide",
"testsuite/test_api.py::APITestCase::test_styleguide_check_files",
"testsuite/test_api.py::APITestCase::test_styleguide_checks",
"testsuite/test_api.py::APITestCase::test_styleguide_continuation_line_outdented",
"testsuite/test_api.py::APITestCase::test_styleguide_excluded",
"testsuite/test_api.py::APITestCase::test_styleguide_init_report",
"testsuite/test_api.py::APITestCase::test_styleguide_options",
"testsuite/test_api.py::APITestCase::test_styleguide_unmatched_triple_quotes"
]
| []
| Expat License | 510 | [
"pep8.py",
"docs/intro.rst"
]
| [
"pep8.py",
"docs/intro.rst"
]
|
dask__dask-1113 | 6e7fea680dc7eaf6f96452e00625e0466c110532 | 2016-04-22 23:07:23 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | diff --git a/dask/array/core.py b/dask/array/core.py
index b85b5c2c0..a6e10cc55 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -1377,6 +1377,12 @@ class Array(Base):
out._chunks = chunks
return out
+ def copy(self):
+ """
+ Copy array. This is a no-op for dask.arrays, which are immutable
+ """
+ return self
+
def to_imperative(self):
""" Convert Array into dask Values
| Add a dummy .copy() method on dask arrays?
There's no need to copy the data in dask arrays, because they're (currently) immutable. Still, it's common to copy numpy arrays to be on the safe side, so it might be nice to add this for duck-array compatibility. | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index f75b85ee5..1351bff05 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -2003,3 +2003,8 @@ def test_atop_names():
def test_A_property():
x = da.ones(5, chunks=(2,))
assert x.A is x
+
+
+def test_copy():
+ x = da.ones(5, chunks=(2,))
+ assert x.copy() is x
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.23.10
botocore==1.26.10
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@6e7fea680dc7eaf6f96452e00625e0466c110532#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.9.5
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs==0.4.2
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.23.10
- botocore==1.26.10
- distributed==1.9.5
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3fs==0.4.2
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_copy"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_unravel",
"dask/array/tests/test_array_core.py::test_reshape",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_to_hdf5",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_h5py_newaxis",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_h5py_tokenize",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_imperative",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_A_property"
]
| []
| BSD 3-Clause "New" or "Revised" License | 511 | [
"dask/array/core.py"
]
| [
"dask/array/core.py"
]
|
|
tornadoweb__tornado-1699 | d71026ab2e1febfedb9d5e0589107349c5019fde | 2016-04-23 23:40:24 | c20c44d776d3bd9b2c002db5aaa9e3b5284a3043 | diff --git a/tornado/escape.py b/tornado/escape.py
index e6636f20..7a3b0e03 100644
--- a/tornado/escape.py
+++ b/tornado/escape.py
@@ -37,6 +37,11 @@ else:
import htmlentitydefs
import urllib as urllib_parse
+try:
+ import typing # noqa
+except ImportError:
+ pass
+
_XHTML_ESCAPE_RE = re.compile('[&<>"\']')
_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"',
@@ -180,6 +185,7 @@ _UTF8_TYPES = (bytes, type(None))
def utf8(value):
+ # type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None]
"""Converts a string argument to a byte string.
If the argument is already a byte string or None, it is returned unchanged.
diff --git a/tornado/http1connection.py b/tornado/http1connection.py
index b04cff13..8194f914 100644
--- a/tornado/http1connection.py
+++ b/tornado/http1connection.py
@@ -30,7 +30,7 @@ from tornado import httputil
from tornado import iostream
from tornado.log import gen_log, app_log
from tornado import stack_context
-from tornado.util import GzipDecompressor
+from tornado.util import GzipDecompressor, PY3
class _QuietException(Exception):
@@ -372,7 +372,14 @@ class HTTP1Connection(httputil.HTTPConnection):
self._expected_content_remaining = int(headers['Content-Length'])
else:
self._expected_content_remaining = None
- lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()])
+ # TODO: headers are supposed to be of type str, but we still have some
+ # cases that let bytes slip through. Remove these native_str calls when those
+ # are fixed.
+ header_lines = (native_str(n) + ": " + native_str(v) for n, v in headers.get_all())
+ if PY3:
+ lines.extend(l.encode('latin1') for l in header_lines)
+ else:
+ lines.extend(header_lines)
for line in lines:
if b'\n' in line:
raise ValueError('Newline in header: ' + repr(line))
diff --git a/tornado/httputil.py b/tornado/httputil.py
index d0901565..866681ad 100644
--- a/tornado/httputil.py
+++ b/tornado/httputil.py
@@ -59,6 +59,12 @@ except ImportError:
# on the class definition itself; must go through an assignment.
SSLError = _SSLError # type: ignore
+try:
+ import typing
+except ImportError:
+ pass
+
+
# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line
# terminator and ignore any preceding CR.
_CRLF_RE = re.compile(r'\r?\n')
@@ -124,8 +130,8 @@ class HTTPHeaders(collections.MutableMapping):
Set-Cookie: C=D
"""
def __init__(self, *args, **kwargs):
- self._dict = {}
- self._as_list = {}
+ self._dict = {} # type: typing.Dict[str, str]
+ self._as_list = {} # type: typing.Dict[str, typing.List[str]]
self._last_key = None
if (len(args) == 1 and len(kwargs) == 0 and
isinstance(args[0], HTTPHeaders)):
@@ -139,6 +145,7 @@ class HTTPHeaders(collections.MutableMapping):
# new public methods
def add(self, name, value):
+ # type: (str, str) -> None
"""Adds a new value for the given key."""
norm_name = _normalized_headers[name]
self._last_key = norm_name
@@ -155,6 +162,7 @@ class HTTPHeaders(collections.MutableMapping):
return self._as_list.get(norm_name, [])
def get_all(self):
+ # type: () -> typing.Iterable[typing.Tuple[str, str]]
"""Returns an iterable of all (name, value) pairs.
If a header has multiple values, multiple pairs will be
@@ -203,6 +211,7 @@ class HTTPHeaders(collections.MutableMapping):
self._as_list[norm_name] = [value]
def __getitem__(self, name):
+ # type: (str) -> str
return self._dict[_normalized_headers[name]]
def __delitem__(self, name):
diff --git a/tornado/log.py b/tornado/log.py
index ac1bb95e..1d10d379 100644
--- a/tornado/log.py
+++ b/tornado/log.py
@@ -77,8 +77,8 @@ class LogFormatter(logging.Formatter):
* Robust against str/bytes encoding problems.
This formatter is enabled automatically by
- `tornado.options.parse_command_line` or `tornado.options.parse_config_file`
- (unless ``--logging=none`` is used).
+ `tornado.options.parse_command_line` (unless ``--logging=none`` is
+ used).
"""
DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s'
DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S'
diff --git a/tornado/util.py b/tornado/util.py
index 4283d4e8..d49a84f4 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -33,12 +33,13 @@ else:
# Aliases for types that are spelled differently in different Python
# versions. bytes_type is deprecated and no longer used in Tornado
# itself but is left in case anyone outside Tornado is using it.
-unicode_type = type(u'')
bytes_type = bytes
if PY3:
+ unicode_type = str
basestring_type = str
else:
- # The name basestring doesn't exist in py3 so silence flake8.
+ # The names unicode and basestring don't exist in py3 so silence flake8.
+ unicode_type = unicode # noqa
basestring_type = basestring # noqa
diff --git a/tornado/web.py b/tornado/web.py
index 8f2acfcc..c9ff2b2d 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -104,6 +104,11 @@ else:
try:
import typing # noqa
+
+ # The following types are accepted by RequestHandler.set_header
+ # and related methods.
+ _HeaderTypes = typing.Union[bytes, unicode_type,
+ numbers.Integral, datetime.datetime]
except ImportError:
pass
@@ -164,6 +169,7 @@ class RequestHandler(object):
self._auto_finish = True
self._transforms = None # will be set in _execute
self._prepared_future = None
+ self._headers = None # type: httputil.HTTPHeaders
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
@@ -318,6 +324,7 @@ class RequestHandler(object):
return self._status_code
def set_header(self, name, value):
+ # type: (str, _HeaderTypes) -> None
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
@@ -327,6 +334,7 @@ class RequestHandler(object):
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
+ # type: (str, _HeaderTypes) -> None
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
@@ -343,13 +351,25 @@ class RequestHandler(object):
if name in self._headers:
del self._headers[name]
- _INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
+ _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
def _convert_header_value(self, value):
- if isinstance(value, bytes):
- pass
- elif isinstance(value, unicode_type):
- value = value.encode('utf-8')
+ # type: (_HeaderTypes) -> str
+
+ # Convert the input value to a str. This type check is a bit
+ # subtle: The bytes case only executes on python 3, and the
+ # unicode case only executes on python 2, because the other
+ # cases are covered by the first match for str.
+ if isinstance(value, str):
+ retval = value
+ elif isinstance(value, bytes): # py3
+ # Non-ascii characters in headers are not well supported,
+ # but if you pass bytes, use latin1 so they pass through as-is.
+ retval = value.decode('latin1')
+ elif isinstance(value, unicode_type): # py2
+ # TODO: This is inconsistent with the use of latin1 above,
+ # but it's been that way for a long time. Should it change?
+ retval = escape.utf8(value)
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
@@ -359,9 +379,9 @@ class RequestHandler(object):
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request.
- if RequestHandler._INVALID_HEADER_CHAR_RE.search(value):
- raise ValueError("Unsafe header value %r", value)
- return value
+ if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
+ raise ValueError("Unsafe header value %r", retval)
+ return retval
_ARG_DEFAULT = object()
@@ -2696,6 +2716,7 @@ class OutputTransform(object):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
+ # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes]
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
@@ -2736,10 +2757,12 @@ class GZipContentEncoding(OutputTransform):
return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
def transform_first_chunk(self, status_code, headers, chunk, finishing):
+ # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes]
+ # TODO: can/should this type be inherited from the superclass?
if 'Vary' in headers:
- headers['Vary'] += b', Accept-Encoding'
+ headers['Vary'] += ', Accept-Encoding'
else:
- headers['Vary'] = b'Accept-Encoding'
+ headers['Vary'] = 'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = self._compressible_type(ctype) and \
| When Use AsyncHttpClien.AttributeError: 'GZipContentEncoding' object has no attribute '_gzip_file'
File "D:\Python3.4.3\lib\site-packages\tornado-4.3-py3.4-win32.egg\tornado\web.py", line 781, in render
self.finish(html)
File "D:\Python3.4.3\lib\site-packages\tornado-4.3-py3.4-win32.egg\tornado\web.py", line 932, in finish
self.flush(include_footers=True)
File "D:\Python3.4.3\lib\site-packages\tornado-4.3-py3.4-win32.egg\tornado\web.py", line 891, in flush
chunk = transform.transform_chunk(chunk, include_footers)
File "D:\Python3.4.3\lib\site-packages\tornado-4.3-py3.4-win32.egg\tornado\web.py", line 2763, in transform_chunk
self._gzip_file.write(chunk)
AttributeError: 'GZipContentEncoding' object has no attribute '_gzip_file' | tornadoweb/tornado | diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index fac23a21..7e417854 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -1506,8 +1506,8 @@ class ErrorHandlerXSRFTest(WebTestCase):
class GzipTestCase(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
- if self.get_argument('vary', None):
- self.set_header('Vary', self.get_argument('vary'))
+ for v in self.get_arguments('vary'):
+ self.add_header('Vary', v)
# Must write at least MIN_LENGTH bytes to activate compression.
self.write('hello world' + ('!' * GZipContentEncoding.MIN_LENGTH))
@@ -1516,8 +1516,7 @@ class GzipTestCase(SimpleHandlerTestCase):
gzip=True,
static_path=os.path.join(os.path.dirname(__file__), 'static'))
- def test_gzip(self):
- response = self.fetch('/')
+ def assert_compressed(self, response):
# simple_httpclient renames the content-encoding header;
# curl_httpclient doesn't.
self.assertEqual(
@@ -1525,17 +1524,18 @@ class GzipTestCase(SimpleHandlerTestCase):
'Content-Encoding',
response.headers.get('X-Consumed-Content-Encoding')),
'gzip')
+
+
+ def test_gzip(self):
+ response = self.fetch('/')
+ self.assert_compressed(response)
self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
def test_gzip_static(self):
# The streaming responses in StaticFileHandler have subtle
# interactions with the gzip output so test this case separately.
response = self.fetch('/robots.txt')
- self.assertEqual(
- response.headers.get(
- 'Content-Encoding',
- response.headers.get('X-Consumed-Content-Encoding')),
- 'gzip')
+ self.assert_compressed(response)
self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
def test_gzip_not_requested(self):
@@ -1545,9 +1545,16 @@ class GzipTestCase(SimpleHandlerTestCase):
def test_vary_already_present(self):
response = self.fetch('/?vary=Accept-Language')
- self.assertEqual(response.headers['Vary'],
- 'Accept-Language, Accept-Encoding')
-
+ self.assert_compressed(response)
+ self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
+ ['Accept-Language', 'Accept-Encoding'])
+
+ def test_vary_already_present_multiple(self):
+ # Regression test for https://github.com/tornadoweb/tornado/issues/1670
+ response = self.fetch('/?vary=Accept-Language&vary=Cookie')
+ self.assert_compressed(response)
+ self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
+ ['Accept-Language', 'Cookie', 'Accept-Encoding'])
@wsgi_safe
class PathArgsInPrepareTest(WebTestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 6
} | 4.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
futures==2.2.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@d71026ab2e1febfedb9d5e0589107349c5019fde#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- futures==2.2.0
- mock==5.2.0
- monotonic==1.6
- six==1.17.0
- trollius==2.1.post2
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple"
]
| []
| [
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::StatusReasonTest::test_status",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse"
]
| []
| Apache License 2.0 | 512 | [
"tornado/web.py",
"tornado/log.py",
"tornado/http1connection.py",
"tornado/httputil.py",
"tornado/escape.py",
"tornado/util.py"
]
| [
"tornado/web.py",
"tornado/log.py",
"tornado/http1connection.py",
"tornado/httputil.py",
"tornado/escape.py",
"tornado/util.py"
]
|
|
falconry__falcon-762 | 3584dfd4d3653f2165cbd1e6832ea5d250a5d319 | 2016-04-25 12:43:40 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | kgriffs: This looks good, thanks! Would you mind rebasing? Once that's done, this is ready to merge.
yohanboniface: Done :)
codecov-io: ## [Current coverage][cc-pull] is **98.99%**
> Merging [#762][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #762 diff @@
==========================================
Files 29 29
Lines 1777 1777
Methods 0 0
Messages 0 0
Branches 296 296
==========================================
Hits 1759 1759
Misses 5 5
Partials 13 13
```
[][cc-pull]
> Powered by [Codecov](https://codecov.io?src=pr). Last updated by dd75405
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/762?src=pr | diff --git a/falcon/request.py b/falcon/request.py
index 5b4634a..6516154 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -43,8 +43,8 @@ SimpleCookie = http_cookies.SimpleCookie
DEFAULT_ERROR_LOG_FORMAT = (u'{0:%Y-%m-%d %H:%M:%S} [FALCON] [ERROR]'
u' {1} {2}{3} => ')
-TRUE_STRINGS = ('true', 'True', 'yes')
-FALSE_STRINGS = ('false', 'False', 'no')
+TRUE_STRINGS = ('true', 'True', 'yes', '1')
+FALSE_STRINGS = ('false', 'False', 'no', '0')
WSGI_CONTENT_HEADERS = ('CONTENT_TYPE', 'CONTENT_LENGTH')
@@ -865,8 +865,8 @@ class Request(object):
The following boolean strings are supported::
- TRUE_STRINGS = ('true', 'True', 'yes')
- FALSE_STRINGS = ('false', 'False', 'no')
+ TRUE_STRINGS = ('true', 'True', 'yes', '1')
+ FALSE_STRINGS = ('false', 'False', 'no', '0')
Args:
name (str): Parameter name, case-sensitive (e.g., 'detailed').
| Also convert '1' and '0' as booleans from query string
I see that [tests](https://github.com/falconry/falcon/blob/master/tests/test_query_params.py#L194) specifically exclude `0` and `1` from valid values to be converted to boolean by `Request.get_param_as_bool` method.
I'd like to discuss this situation :)
I see no reason not to convert those values as they seem non ambiguous to me and there are a bunch of situations where 1 and 0 are de facto boolean (database outputs…), but we only deals with string in query string.
What is the rationale behind this?
Thanks! :) | falconry/falcon | diff --git a/tests/test_query_params.py b/tests/test_query_params.py
index 285a6aa..a9b44e0 100644
--- a/tests/test_query_params.py
+++ b/tests/test_query_params.py
@@ -201,8 +201,8 @@ class _TestQueryParams(testing.TestBase):
req.get_param_as_int, 'pos', min=0, max=10)
def test_boolean(self):
- query_string = ('echo=true&doit=false&bogus=0&bogus2=1&'
- 't1=True&f1=False&t2=yes&f2=no&blank')
+ query_string = ('echo=true&doit=false&bogus=bar&bogus2=foo&'
+ 't1=True&f1=False&t2=yes&f2=no&blank&one=1&zero=0')
self.simulate_request('/', query_string=query_string)
req = self.resource.req
@@ -226,6 +226,8 @@ class _TestQueryParams(testing.TestBase):
self.assertEqual(req.get_param_as_bool('t2'), True)
self.assertEqual(req.get_param_as_bool('f1'), False)
self.assertEqual(req.get_param_as_bool('f2'), False)
+ self.assertEqual(req.get_param_as_bool('one'), True)
+ self.assertEqual(req.get_param_as_bool('zero'), False)
self.assertEqual(req.get_param('blank'), None)
store = {}
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@3584dfd4d3653f2165cbd1e6832ea5d250a5d319#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_query_params.py::_TestQueryParams::test_boolean",
"tests/test_query_params.py::PostQueryParams::test_boolean",
"tests/test_query_params.py::GetQueryParams::test_boolean"
]
| []
| [
"tests/test_query_params.py::_TestQueryParams::test_allowed_names",
"tests/test_query_params.py::_TestQueryParams::test_bad_percentage",
"tests/test_query_params.py::_TestQueryParams::test_blank",
"tests/test_query_params.py::_TestQueryParams::test_boolean_blank",
"tests/test_query_params.py::_TestQueryParams::test_get_date_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_date_store",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::_TestQueryParams::test_int",
"tests/test_query_params.py::_TestQueryParams::test_int_neg",
"tests/test_query_params.py::_TestQueryParams::test_list_transformer",
"tests/test_query_params.py::_TestQueryParams::test_list_type",
"tests/test_query_params.py::_TestQueryParams::test_list_type_blank",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::_TestQueryParams::test_none",
"tests/test_query_params.py::_TestQueryParams::test_param_property",
"tests/test_query_params.py::_TestQueryParams::test_percent_encoded",
"tests/test_query_params.py::_TestQueryParams::test_required_1_get_param",
"tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::_TestQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParams::test_allowed_names",
"tests/test_query_params.py::PostQueryParams::test_bad_percentage",
"tests/test_query_params.py::PostQueryParams::test_blank",
"tests/test_query_params.py::PostQueryParams::test_boolean_blank",
"tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse",
"tests/test_query_params.py::PostQueryParams::test_get_date_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_date_store",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::PostQueryParams::test_int",
"tests/test_query_params.py::PostQueryParams::test_int_neg",
"tests/test_query_params.py::PostQueryParams::test_list_transformer",
"tests/test_query_params.py::PostQueryParams::test_list_type",
"tests/test_query_params.py::PostQueryParams::test_list_type_blank",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::PostQueryParams::test_non_ascii",
"tests/test_query_params.py::PostQueryParams::test_none",
"tests/test_query_params.py::PostQueryParams::test_param_property",
"tests/test_query_params.py::PostQueryParams::test_percent_encoded",
"tests/test_query_params.py::PostQueryParams::test_required_1_get_param",
"tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::PostQueryParams::test_simple",
"tests/test_query_params.py::GetQueryParams::test_allowed_names",
"tests/test_query_params.py::GetQueryParams::test_bad_percentage",
"tests/test_query_params.py::GetQueryParams::test_blank",
"tests/test_query_params.py::GetQueryParams::test_boolean_blank",
"tests/test_query_params.py::GetQueryParams::test_get_date_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_date_store",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::GetQueryParams::test_int",
"tests/test_query_params.py::GetQueryParams::test_int_neg",
"tests/test_query_params.py::GetQueryParams::test_list_transformer",
"tests/test_query_params.py::GetQueryParams::test_list_type",
"tests/test_query_params.py::GetQueryParams::test_list_type_blank",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::GetQueryParams::test_none",
"tests/test_query_params.py::GetQueryParams::test_param_property",
"tests/test_query_params.py::GetQueryParams::test_percent_encoded",
"tests/test_query_params.py::GetQueryParams::test_required_1_get_param",
"tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::GetQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default"
]
| []
| Apache License 2.0 | 513 | [
"falcon/request.py"
]
| [
"falcon/request.py"
]
|
dask__dask-1126 | 5d94595499ebf0dd7b7bfcccca328f7d0fca7b7e | 2016-04-27 16:57:30 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | diff --git a/dask/callbacks.py b/dask/callbacks.py
index 55e7a5a1b..61aba8676 100644
--- a/dask/callbacks.py
+++ b/dask/callbacks.py
@@ -44,10 +44,14 @@ class Callback(object):
"""
def __init__(self, start=None, pretask=None, posttask=None, finish=None):
- self._start = start
- self._pretask = pretask
- self._posttask = posttask
- self._finish = finish
+ if start:
+ self._start = start
+ if pretask:
+ self._pretask = pretask
+ if posttask:
+ self._posttask = posttask
+ if finish:
+ self._finish = finish
@property
def _callback(self):
| Callback __init__() overrides subclass callback definitions
The `__init__` method in `Callback` is:
```
def __init__(self, start=None, pretask=None, posttask=None, finish=None):
self._start = start
self._pretask = pretask
self._posttask = posttask
self._finish = finish
```
which overrides callbacks defined in subclasses, e.g.:
```
class TaskCounter(Callback):
def _pretask(self, key, dask, state):
count(key, dask, state)
```
which doesn't work, while:
```
class TaskCounter(Callback):
def __init__(self):
pass
def _pretask(self, key, dask, state):
count(key, dask, state)
```
does. Not sure quite what desired behavior here is, but the example in the docs:
```
class PrintKeys(Callback):
def _pretask(self, key, dask, state):
"""Print the key of every task as it's started"""
print("Computing: {0}!".format(repr(key)))
```
doesn't work. | dask/dask | diff --git a/dask/tests/test_callbacks.py b/dask/tests/test_callbacks.py
new file mode 100644
index 000000000..b4f4b8396
--- /dev/null
+++ b/dask/tests/test_callbacks.py
@@ -0,0 +1,14 @@
+from dask.async import get_sync
+from dask.callbacks import Callback
+
+def test_callback():
+ flag = [False]
+
+ class MyCallback(Callback):
+ def _start(self, dsk):
+ flag[0] = True
+
+ with MyCallback():
+ get_sync({'x': 1}, 'x')
+
+ assert flag[0] is True
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.23.10
botocore==1.26.10
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@5d94595499ebf0dd7b7bfcccca328f7d0fca7b7e#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.9.5
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs==0.4.2
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.23.10
- botocore==1.26.10
- distributed==1.9.5
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3fs==0.4.2
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/tests/test_callbacks.py::test_callback"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 514 | [
"dask/callbacks.py"
]
| [
"dask/callbacks.py"
]
|
|
pre-commit__pre-commit-hooks-111 | 17478a0a50faf20fd8e5b3fefe7435cea410df0b | 2016-04-27 18:19:24 | 17478a0a50faf20fd8e5b3fefe7435cea410df0b | diff --git a/README.md b/README.md
index 091a8a4..6c3a3ec 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,8 @@ Add this to your `.pre-commit-config.yaml`
- `double-quote-string-fixer` - This hook replaces double quoted strings
with single quoted strings.
- `end-of-file-fixer` - Makes sure files end in a newline and only a newline.
-- `fix-encoding-pragma` - Add `# -*- coding: utf-8 -*-` to the top of python files
+- `fix-encoding-pragma` - Add `# -*- coding: utf-8 -*-` to the top of python files.
+ - To remove the coding pragma pass `--remove` (useful in a python3-only codebase)
- `flake8` - Run flake8 on your python files.
- `name-tests-test` - Assert that files in tests/ end in `_test.py`.
- Use `args: ['--django']` to match `test*.py` instead.
diff --git a/pre_commit_hooks/fix_encoding_pragma.py b/pre_commit_hooks/fix_encoding_pragma.py
index 48fc9c7..8586937 100644
--- a/pre_commit_hooks/fix_encoding_pragma.py
+++ b/pre_commit_hooks/fix_encoding_pragma.py
@@ -3,7 +3,7 @@ from __future__ import print_function
from __future__ import unicode_literals
import argparse
-import io
+import collections
expected_pragma = b'# -*- coding: utf-8 -*-\n'
@@ -21,34 +21,72 @@ def has_coding(line):
)
-def fix_encoding_pragma(f):
- first_line = f.readline()
- second_line = f.readline()
- old = f.read()
- f.seek(0)
+class ExpectedContents(collections.namedtuple(
+ 'ExpectedContents', ('shebang', 'rest', 'pragma_status'),
+)):
+ """
+ pragma_status:
+ - True: has exactly the coding pragma expected
+ - False: missing coding pragma entirely
+ - None: has a coding pragma, but it does not match
+ """
+ __slots__ = ()
- # Ok case: the file is empty
- if not (first_line + second_line + old).strip():
- return 0
+ @property
+ def has_any_pragma(self):
+ return self.pragma_status is not False
- # Ok case: we specify pragma as the first line
- if first_line == expected_pragma:
- return 0
+ def is_expected_pragma(self, remove):
+ expected_pragma_status = not remove
+ return self.pragma_status is expected_pragma_status
- # OK case: we have a shebang as first line and pragma on second line
- if first_line.startswith(b'#!') and second_line == expected_pragma:
- return 0
- # Otherwise we need to rewrite stuff!
+def _get_expected_contents(first_line, second_line, rest):
if first_line.startswith(b'#!'):
- if has_coding(second_line):
- f.write(first_line + expected_pragma + old)
- else:
- f.write(first_line + expected_pragma + second_line + old)
- elif has_coding(first_line):
- f.write(expected_pragma + second_line + old)
+ shebang = first_line
+ potential_coding = second_line
else:
- f.write(expected_pragma + first_line + second_line + old)
+ shebang = b''
+ potential_coding = first_line
+ rest = second_line + rest
+
+ if potential_coding == expected_pragma:
+ pragma_status = True
+ elif has_coding(potential_coding):
+ pragma_status = None
+ else:
+ pragma_status = False
+ rest = potential_coding + rest
+
+ return ExpectedContents(
+ shebang=shebang, rest=rest, pragma_status=pragma_status,
+ )
+
+
+def fix_encoding_pragma(f, remove=False):
+ expected = _get_expected_contents(f.readline(), f.readline(), f.read())
+
+ # Special cases for empty files
+ if not expected.rest.strip():
+ # If a file only has a shebang or a coding pragma, remove it
+ if expected.has_any_pragma or expected.shebang:
+ f.seek(0)
+ f.truncate()
+ f.write(b'')
+ return 1
+ else:
+ return 0
+
+ if expected.is_expected_pragma(remove):
+ return 0
+
+ # Otherwise, write out the new file
+ f.seek(0)
+ f.truncate()
+ f.write(expected.shebang)
+ if not remove:
+ f.write(expected_pragma)
+ f.write(expected.rest)
return 1
@@ -56,18 +94,25 @@ def fix_encoding_pragma(f):
def main(argv=None):
parser = argparse.ArgumentParser('Fixes the encoding pragma of python files')
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
+ parser.add_argument(
+ '--remove', action='store_true',
+ help='Remove the encoding pragma (Useful in a python3-only codebase)',
+ )
args = parser.parse_args(argv)
retv = 0
+ if args.remove:
+ fmt = 'Removed encoding pragma from {filename}'
+ else:
+ fmt = 'Added `{pragma}` to {filename}'
+
for filename in args.filenames:
- with io.open(filename, 'r+b') as f:
- file_ret = fix_encoding_pragma(f)
+ with open(filename, 'r+b') as f:
+ file_ret = fix_encoding_pragma(f, remove=args.remove)
retv |= file_ret
if file_ret:
- print('Added `{0}` to {1}'.format(
- expected_pragma.strip(), filename,
- ))
+ print(fmt.format(pragma=expected_pragma, filename=filename))
return retv
| Add a --remove option to fix-encoding-pragma
This is an unnecessary thing in python3 | pre-commit/pre-commit-hooks | diff --git a/tests/fix_encoding_pragma_test.py b/tests/fix_encoding_pragma_test.py
index e000a33..a9502a2 100644
--- a/tests/fix_encoding_pragma_test.py
+++ b/tests/fix_encoding_pragma_test.py
@@ -10,32 +10,46 @@ from pre_commit_hooks.fix_encoding_pragma import main
def test_integration_inserting_pragma(tmpdir):
- file_path = tmpdir.join('foo.py').strpath
+ path = tmpdir.join('foo.py')
+ path.write_binary(b'import httplib\n')
- with open(file_path, 'wb') as file_obj:
- file_obj.write(b'import httplib\n')
+ assert main((path.strpath,)) == 1
- assert main([file_path]) == 1
-
- with open(file_path, 'rb') as file_obj:
- assert file_obj.read() == (
- b'# -*- coding: utf-8 -*-\n'
- b'import httplib\n'
- )
+ assert path.read_binary() == (
+ b'# -*- coding: utf-8 -*-\n'
+ b'import httplib\n'
+ )
def test_integration_ok(tmpdir):
- file_path = tmpdir.join('foo.py').strpath
- with open(file_path, 'wb') as file_obj:
- file_obj.write(b'# -*- coding: utf-8 -*-\nx = 1\n')
- assert main([file_path]) == 0
+ path = tmpdir.join('foo.py')
+ path.write_binary(b'# -*- coding: utf-8 -*-\nx = 1\n')
+ assert main((path.strpath,)) == 0
+
+
+def test_integration_remove(tmpdir):
+ path = tmpdir.join('foo.py')
+ path.write_binary(b'# -*- coding: utf-8 -*-\nx = 1\n')
+
+ assert main((path.strpath, '--remove')) == 1
+
+ assert path.read_binary() == b'x = 1\n'
+
+
+def test_integration_remove_ok(tmpdir):
+ path = tmpdir.join('foo.py')
+ path.write_binary(b'x = 1\n')
+ assert main((path.strpath, '--remove')) == 0
@pytest.mark.parametrize(
'input_str',
(
b'',
- b'# -*- coding: utf-8 -*-\n',
+ (
+ b'# -*- coding: utf-8 -*-\n'
+ b'x = 1\n'
+ ),
(
b'#!/usr/bin/env python\n'
b'# -*- coding: utf-8 -*-\n'
@@ -59,20 +73,32 @@ def test_ok_inputs(input_str):
b'import httplib\n',
),
(
- b'#!/usr/bin/env python\n',
+ b'#!/usr/bin/env python\n'
+ b'x = 1\n',
b'#!/usr/bin/env python\n'
b'# -*- coding: utf-8 -*-\n'
+ b'x = 1\n',
),
(
- b'#coding=utf-8\n',
+ b'#coding=utf-8\n'
+ b'x = 1\n',
b'# -*- coding: utf-8 -*-\n'
+ b'x = 1\n',
),
(
b'#!/usr/bin/env python\n'
- b'#coding=utf8\n',
+ b'#coding=utf8\n'
+ b'x = 1\n',
b'#!/usr/bin/env python\n'
- b'# -*- coding: utf-8 -*-\n',
+ b'# -*- coding: utf-8 -*-\n'
+ b'x = 1\n',
),
+ # These should each get truncated
+ (b'#coding: utf-8\n', b''),
+ (b'# -*- coding: utf-8 -*-\n', b''),
+ (b'#!/usr/bin/env python\n', b''),
+ (b'#!/usr/bin/env python\n#coding: utf8\n', b''),
+ (b'#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n', b''),
)
)
def test_not_ok_inputs(input_str, output):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==1.3.2
attrs==22.2.0
autopep8==2.0.4
certifi==2021.5.30
cfgv==3.3.1
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==2.5.5
identify==2.4.4
importlib-metadata==4.2.0
importlib-resources==5.2.3
iniconfig==1.1.1
logilab-common==1.9.7
mccabe==0.4.0
mock==5.2.0
mypy-extensions==1.0.0
nodeenv==1.6.0
packaging==21.3
pep8==1.7.1
platformdirs==2.4.0
pluggy==1.0.0
pre-commit==2.17.0
-e git+https://github.com/pre-commit/pre-commit-hooks.git@17478a0a50faf20fd8e5b3fefe7435cea410df0b#egg=pre_commit_hooks
py==1.11.0
pycodestyle==2.10.0
pyflakes==1.0.0
pylint==1.3.1
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
simplejson==3.20.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: pre-commit-hooks
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- astroid==1.3.2
- attrs==22.2.0
- autopep8==2.0.4
- cfgv==3.3.1
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==2.5.5
- identify==2.4.4
- importlib-metadata==4.2.0
- importlib-resources==5.2.3
- iniconfig==1.1.1
- logilab-common==1.9.7
- mccabe==0.4.0
- mock==5.2.0
- mypy-extensions==1.0.0
- nodeenv==1.6.0
- packaging==21.3
- pep8==1.7.1
- platformdirs==2.4.0
- pluggy==1.0.0
- pre-commit==2.17.0
- py==1.11.0
- pycodestyle==2.10.0
- pyflakes==1.0.0
- pylint==1.3.1
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- simplejson==3.20.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- typing-extensions==4.1.1
- virtualenv==20.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/pre-commit-hooks
| [
"tests/fix_encoding_pragma_test.py::test_integration_remove",
"tests/fix_encoding_pragma_test.py::test_integration_remove_ok",
"tests/fix_encoding_pragma_test.py::test_not_ok_inputs[#!/usr/bin/env",
"tests/fix_encoding_pragma_test.py::test_not_ok_inputs[#coding:",
"tests/fix_encoding_pragma_test.py::test_not_ok_inputs[#"
]
| []
| [
"tests/fix_encoding_pragma_test.py::test_integration_inserting_pragma",
"tests/fix_encoding_pragma_test.py::test_integration_ok",
"tests/fix_encoding_pragma_test.py::test_ok_inputs[]",
"tests/fix_encoding_pragma_test.py::test_ok_inputs[#",
"tests/fix_encoding_pragma_test.py::test_ok_inputs[#!/usr/bin/env",
"tests/fix_encoding_pragma_test.py::test_not_ok_inputs[import",
"tests/fix_encoding_pragma_test.py::test_not_ok_inputs[#coding=utf-8\\nx"
]
| []
| MIT License | 515 | [
"README.md",
"pre_commit_hooks/fix_encoding_pragma.py"
]
| [
"README.md",
"pre_commit_hooks/fix_encoding_pragma.py"
]
|
|
marshmallow-code__apispec-70 | fe3f7a02358e65a7942d3d5d4d81c6a17844c6ce | 2016-04-28 23:37:34 | 0aa7b1be3af715eace95c0e730f98ac6f5a03c64 | diff --git a/apispec/ext/flask.py b/apispec/ext/flask.py
index 19c029f..a6c1e97 100644
--- a/apispec/ext/flask.py
+++ b/apispec/ext/flask.py
@@ -27,6 +27,11 @@ function to `add_path`. Inspects URL rules and view docstrings.
from __future__ import absolute_import
import re
+try:
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
+
from flask import current_app
from apispec.compat import iteritems
@@ -61,6 +66,8 @@ def path_from_view(spec, view, **kwargs):
"""Path helper that allows passing a Flask view function."""
rule = _rule_for_view(view)
path = flaskpath2swagger(rule.rule)
+ app_root = current_app.config['APPLICATION_ROOT'] or '/'
+ path = urljoin(app_root.rstrip('/') + '/', path.lstrip('/'))
operations = utils.load_operations_from_docstring(view.__doc__)
path = Path(path=path, operations=operations)
return path
| Flask extension does not support application root
The flask extension only outputs the route path, but `flask.url_for()` takes into consideration that the path should be prefixed with the configured `APPLICATION_ROOT` path. [apispec/ext/flask.py#L58](https://github.com/marshmallow-code/apispec/blob/1076180/apispec/ext/flask.py#L58)
- http://flask.pocoo.org/docs/0.10/config/#builtin-configuration-values
- https://github.com/pallets/flask/blob/9f1be8e/flask/helpers.py#L309
- https://github.com/pallets/flask/blob/2bf477c/flask/app.py#L1761 | marshmallow-code/apispec | diff --git a/tests/test_ext_flask.py b/tests/test_ext_flask.py
index 0f6f292..eeb4a03 100644
--- a/tests/test_ext_flask.py
+++ b/tests/test_ext_flask.py
@@ -105,3 +105,36 @@ class TestPathHelpers:
spec.add_path(view=get_pet)
assert '/pet/{pet_id}' in spec._paths
+
+ def test_path_includes_app_root(self, app, spec):
+
+ app.config['APPLICATION_ROOT'] = '/app/root'
+
+ @app.route('/partial/path/pet')
+ def get_pet():
+ return 'pet'
+
+ spec.add_path(view=get_pet)
+ assert '/app/root/partial/path/pet' in spec._paths
+
+ def test_path_with_args_includes_app_root(self, app, spec):
+
+ app.config['APPLICATION_ROOT'] = '/app/root'
+
+ @app.route('/partial/path/pet/{pet_id}')
+ def get_pet(pet_id):
+ return 'representation of pet {pet_id}'.format(pet_id=pet_id)
+
+ spec.add_path(view=get_pet)
+ assert '/app/root/partial/path/pet/{pet_id}' in spec._paths
+
+ def test_path_includes_app_root_with_right_slash(self, app, spec):
+
+ app.config['APPLICATION_ROOT'] = '/app/root/'
+
+ @app.route('/partial/path/pet')
+ def get_pet():
+ return 'pet'
+
+ spec.add_path(view=get_pet)
+ assert '/app/root/partial/path/pet' in spec._paths
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/marshmallow-code/apispec.git@fe3f7a02358e65a7942d3d5d4d81c6a17844c6ce#egg=apispec
backports.tarfile==1.2.0
blinker==1.9.0
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
cryptography==44.0.2
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
flake8==2.5.4
Flask==3.1.0
id==1.5.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
invoke==2.2.0
itsdangerous==2.2.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
Jinja2==3.1.6
keyring==25.6.0
markdown-it-py==3.0.0
MarkupSafe==3.0.2
marshmallow==3.26.1
mccabe==0.4.0
mdurl==0.1.2
mock==5.2.0
more-itertools==10.6.0
nh3==0.2.21
packaging==24.2
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
pyflakes==1.0.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
PyYAML==6.0.2
readme_renderer==44.0
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
SecretStorage==3.3.3
tomli==2.2.1
tornado==6.4.2
tox==4.25.0
twine==6.1.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
Werkzeug==3.1.3
zipp==3.21.0
| name: apispec
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-tarfile==1.2.0
- blinker==1.9.0
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- cryptography==44.0.2
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==2.5.4
- flask==3.1.0
- id==1.5.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- invoke==2.2.0
- itsdangerous==2.2.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- jinja2==3.1.6
- keyring==25.6.0
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- marshmallow==3.26.1
- mccabe==0.4.0
- mdurl==0.1.2
- mock==5.2.0
- more-itertools==10.6.0
- nh3==0.2.21
- packaging==24.2
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pyflakes==1.0.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pyyaml==6.0.2
- readme-renderer==44.0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- secretstorage==3.3.3
- tomli==2.2.1
- tornado==6.4.2
- tox==4.25.0
- twine==6.1.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/apispec
| [
"tests/test_ext_flask.py::TestPathHelpers::test_path_includes_app_root",
"tests/test_ext_flask.py::TestPathHelpers::test_path_with_args_includes_app_root",
"tests/test_ext_flask.py::TestPathHelpers::test_path_includes_app_root_with_right_slash"
]
| [
"tests/test_ext_flask.py::TestPathHelpers::test_integration_with_docstring_introspection"
]
| [
"tests/test_ext_flask.py::TestPathHelpers::test_path_from_view",
"tests/test_ext_flask.py::TestPathHelpers::test_path_with_multiple_methods",
"tests/test_ext_flask.py::TestPathHelpers::test_path_is_translated_to_swagger_template"
]
| []
| MIT License | 516 | [
"apispec/ext/flask.py"
]
| [
"apispec/ext/flask.py"
]
|
|
auth0__auth0-python-40 | 013851d48025ec202464721c23d65156cd138565 | 2016-04-29 12:24:26 | 9a1050760af2337e0a32d68038e6b1df62ca45f9 | diff --git a/auth0/v2/authentication/users.py b/auth0/v2/authentication/users.py
index e031a75..bec1b4d 100644
--- a/auth0/v2/authentication/users.py
+++ b/auth0/v2/authentication/users.py
@@ -46,5 +46,5 @@ class Users(AuthenticationBase):
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
- headers={'Content-Type: application/json'}
+ headers={'Content-Type': 'application/json'}
)
| authentication.Users(client_domain).tokeninfo fails
I've got a traceback:
```python
In [78]: user_authentication.tokeninfo(id_token)
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-78-decf4417ce18> in <module>()
----> 1 user_authentication.tokeninfo(id_token)
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/auth0/v2/authentication/users.pyc in tokeninfo(self, jwt)
47 url='https://%s/tokeninfo' % self.domain,

48 data={'id_token': jwt},
---> 49 headers={'Content-Type: application/json'}
50 )
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/auth0/v2/authentication/base.pyc in post(self, url, data, headers)
8 def post(self, url, data={}, headers={}):
9 response = requests.post(url=url, data=json.dumps(data),
---> 10 headers=headers)
11 return self._process_response(response)
12
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/api.pyc in post(url, data, json, **kwargs)
107 """
108
--> 109 return request('post', url, data=data, json=json, **kwargs)
110
111
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/api.pyc in request(method, url, **kwargs)
48
49 session = sessions.Session()
---> 50 response = session.request(method=method, url=url, **kwargs)
51 # By explicitly closing the session, we avoid leaving sockets open which
52 # can trigger a ResourceWarning in some cases, and look like a memory leak
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, jso
n)
452 hooks = hooks,
453 )
--> 454 prep = self.prepare_request(req)
455
456 proxies = proxies or {}
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/sessions.pyc in prepare_request(self, request)
386 auth=merge_setting(auth, self.auth),
387 cookies=merged_cookies,
--> 388 hooks=merge_hooks(request.hooks, self.hooks),
389 )
390 return p
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/models.pyc in prepare(self, method, url, headers, files, data, params, auth, cookies, hooks, json)
292 self.prepare_method(method)
293 self.prepare_url(url, params)
--> 294 self.prepare_headers(headers)
295 self.prepare_cookies(cookies)
296 self.prepare_body(data, files, json)
/home/ale/.virtualenvs/auth0/lib/python2.7/site-packages/requests/models.pyc in prepare_headers(self, headers)
400
401 if headers:
--> 402 self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
403 else:
404 self.headers = CaseInsensitiveDict()
AttributeError: 'set' object has no attribute 'items'
``` | auth0/auth0-python | diff --git a/auth0/v2/test/authentication/test_users.py b/auth0/v2/test/authentication/test_users.py
index c842f55..446301d 100644
--- a/auth0/v2/test/authentication/test_users.py
+++ b/auth0/v2/test/authentication/test_users.py
@@ -27,5 +27,5 @@ class TestUsers(unittest.TestCase):
mock_post.assert_called_with(
url='https://my.domain.com/tokeninfo',
data={'id_token': 'jwtoken'},
- headers={'Content-Type: application/json'}
+ headers={'Content-Type': 'application/json'}
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/auth0/auth0-python.git@013851d48025ec202464721c23d65156cd138565#egg=auth0_python
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==1.3.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.8.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: auth0-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==1.3.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.8.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/auth0-python
| [
"auth0/v2/test/authentication/test_users.py::TestUsers::test_tokeninfo"
]
| []
| [
"auth0/v2/test/authentication/test_users.py::TestUsers::test_userinfo"
]
| []
| MIT License | 517 | [
"auth0/v2/authentication/users.py"
]
| [
"auth0/v2/authentication/users.py"
]
|
|
4degrees__clique-26 | a89507304acce5931f940c34025a6547fa8227b5 | 2016-04-30 17:21:04 | a89507304acce5931f940c34025a6547fa8227b5 | diff --git a/source/clique/collection.py b/source/clique/collection.py
index 0c3b296..db9276c 100644
--- a/source/clique/collection.py
+++ b/source/clique/collection.py
@@ -251,15 +251,25 @@ class Collection(object):
else:
data['padding'] = '%d'
- if self.indexes:
+ if '{holes}' in pattern:
data['holes'] = self.holes().format('{ranges}')
+ if '{range}' in pattern or '{ranges}' in pattern:
indexes = list(self.indexes)
- if len(indexes) == 1:
+ indexes_count = len(indexes)
+
+ if indexes_count == 0:
+ data['range'] = ''
+
+ elif indexes_count == 1:
data['range'] = '{0}'.format(indexes[0])
+
else:
- data['range'] = '{0}-{1}'.format(indexes[0], indexes[-1])
+ data['range'] = '{0}-{1}'.format(
+ indexes[0], indexes[-1]
+ )
+ if '{ranges}' in pattern:
separated = self.separate()
if len(separated) > 1:
ranges = [collection.format('{range}')
@@ -270,11 +280,6 @@ class Collection(object):
data['ranges'] = ', '.join(ranges)
- else:
- data['holes'] = ''
- data['range'] = ''
- data['ranges'] = ''
-
return pattern.format(**data)
def is_contiguous(self):
| collection.format hits maximum recursion depth for collections with lots of holes.
The following code gives an example.
```python
paths = ["name.{0:04d}.jpg".format(x) for x in range(2000)[::2]]
collection = clique.assemble(paths)[0][0]
collection.format("{head}####{tail}")
``` | 4degrees/clique | diff --git a/test/unit/test_collection.py b/test/unit/test_collection.py
index ce4daa7..11cb01e 100644
--- a/test/unit/test_collection.py
+++ b/test/unit/test_collection.py
@@ -2,6 +2,7 @@
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
+import sys
import inspect
import pytest
@@ -242,7 +243,6 @@ def test_remove_non_member():
(PaddedCollection, '{range}', '1-12'),
(PaddedCollection, '{ranges}', '1-3, 7, 9-12'),
(PaddedCollection, '{holes}', '4-6, 8'),
-
])
def test_format(CollectionCls, pattern, expected):
'''Format collection according to pattern.'''
@@ -250,6 +250,25 @@ def test_format(CollectionCls, pattern, expected):
assert collection.format(pattern) == expected
+def test_format_sparse_collection():
+ '''Format sparse collection without recursion error.'''
+ recursion_limit = sys.getrecursionlimit()
+ recursion_error_occurred = False
+
+ try:
+ collection = PaddedCollection(
+ indexes=set(range(0, recursion_limit * 2, 2))
+ )
+ collection.format()
+ except RuntimeError as error:
+ if 'maximum recursion depth exceeded' in str(error):
+ recursion_error_occurred = True
+ else:
+ raise
+
+ assert not recursion_error_occurred
+
+
@pytest.mark.parametrize(('collection', 'expected'), [
(PaddedCollection(indexes=set([])), True),
(PaddedCollection(indexes=set([1])), True),
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=2.3.5"
],
"pre_install": null,
"python": "2.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/4degrees/clique.git@a89507304acce5931f940c34025a6547fa8227b5#egg=Clique
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: clique
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/clique
| [
"test/unit/test_collection.py::test_format_sparse_collection"
]
| [
"test/unit/test_collection.py::test_change_property[head-diff_head.-^diff\\\\_head\\\\.(?P<index>(?P<padding>0*)\\\\d+?)\\\\.tail$-diff_head.1.tail]",
"test/unit/test_collection.py::test_change_property[tail-.diff_tail-^head\\\\.(?P<index>(?P<padding>0*)\\\\d+?)\\\\.diff\\\\_tail$-head.1.diff_tail]"
]
| [
"test/unit/test_collection.py::test_change_property[padding-4-^head\\\\.(?P<index>(?P<padding>0*)\\\\d+?)\\\\.tail$-head.0001.tail]",
"test/unit/test_collection.py::test_unsettable_indexes",
"test/unit/test_collection.py::test_str",
"test/unit/test_collection.py::test_repr",
"test/unit/test_collection.py::test_iterator[unpadded-collection]",
"test/unit/test_collection.py::test_iterator[padded-collection]",
"test/unit/test_collection.py::test_contains[valid",
"test/unit/test_collection.py::test_contains[different",
"test/unit/test_collection.py::test_contains[non-member",
"test/unit/test_collection.py::test_comparisons[equal]",
"test/unit/test_collection.py::test_comparisons[different",
"test/unit/test_collection.py::test_not_implemented_comparison",
"test/unit/test_collection.py::test_match[different",
"test/unit/test_collection.py::test_match[unpadded-collection:unpadded",
"test/unit/test_collection.py::test_match[unpadded-collection:padded",
"test/unit/test_collection.py::test_match[padded-collection:padded",
"test/unit/test_collection.py::test_match[padded-collection:unpadded",
"test/unit/test_collection.py::test_add[unpadded-collection:unpadded",
"test/unit/test_collection.py::test_add[unpadded-collection:padded",
"test/unit/test_collection.py::test_add[padded-collection:padded",
"test/unit/test_collection.py::test_add[padded-collection:unpadded",
"test/unit/test_collection.py::test_add_duplicate",
"test/unit/test_collection.py::test_remove",
"test/unit/test_collection.py::test_remove_non_member",
"test/unit/test_collection.py::test_format[PaddedCollection-{head}-/head.]",
"test/unit/test_collection.py::test_format[PaddedCollection-{padding}-%04d]",
"test/unit/test_collection.py::test_format[UnpaddedCollection-{padding}-%d]",
"test/unit/test_collection.py::test_format[PaddedCollection-{tail}-.ext]",
"test/unit/test_collection.py::test_format[PaddedCollection-{range}-1-12]",
"test/unit/test_collection.py::test_format[PaddedCollection-{ranges}-1-3,",
"test/unit/test_collection.py::test_format[PaddedCollection-{holes}-4-6,",
"test/unit/test_collection.py::test_is_contiguous[empty]",
"test/unit/test_collection.py::test_is_contiguous[single]",
"test/unit/test_collection.py::test_is_contiguous[contiguous",
"test/unit/test_collection.py::test_is_contiguous[non-contiguous]",
"test/unit/test_collection.py::test_holes[empty]",
"test/unit/test_collection.py::test_holes[single",
"test/unit/test_collection.py::test_holes[contiguous",
"test/unit/test_collection.py::test_holes[missing",
"test/unit/test_collection.py::test_holes[range",
"test/unit/test_collection.py::test_holes[multiple",
"test/unit/test_collection.py::test_is_compatible[compatible]",
"test/unit/test_collection.py::test_is_compatible[incompatible",
"test/unit/test_collection.py::test_compatible_merge[both",
"test/unit/test_collection.py::test_compatible_merge[complimentary]",
"test/unit/test_collection.py::test_compatible_merge[duplicates]",
"test/unit/test_collection.py::test_incompatible_merge[incompatible",
"test/unit/test_collection.py::test_separate[empty]",
"test/unit/test_collection.py::test_separate[single",
"test/unit/test_collection.py::test_separate[contiguous",
"test/unit/test_collection.py::test_separate[non-contiguous",
"test/unit/test_collection.py::test_escaping_expression"
]
| []
| Apache License 2.0 | 518 | [
"source/clique/collection.py"
]
| [
"source/clique/collection.py"
]
|
|
sigmavirus24__github3.py-606 | 1a7455c6c5098603b33c15576624e63ce6751bf7 | 2016-05-02 17:13:26 | 05ed0c6a02cffc6ddd0e82ce840c464e1c5fd8c4 | diff --git a/github3/orgs.py b/github3/orgs.py
index 0b8cda6a..62b3652c 100644
--- a/github3/orgs.py
+++ b/github3/orgs.py
@@ -255,6 +255,11 @@ class Organization(BaseAccount):
# Roles available to members in an organization.
members_roles = frozenset(['all', 'admin', 'member'])
+ def _all_events_url(self, username):
+ url_parts = list(self._uri)
+ url_parts[2] = 'users/{}/events{}'.format(username, url_parts[2])
+ return self._uri.__class__(*url_parts).geturl()
+
def _update_attributes(self, org):
super(Organization, self)._update_attributes(org)
self.type = self.type or 'Organization'
@@ -453,8 +458,39 @@ class Organization(BaseAccount):
url = self._build_url('public_members', username, base_url=self._api)
return self._boolean(self._get(url), 204, 404)
+ def all_events(self, number=-1, etag=None, username=None):
+ r"""Iterate over all org events visible to the authenticated user.
+
+ :param int number: (optional), number of events to return. Default: -1
+ iterates over all events available.
+ :param str etag: (optional), ETag from a previous request to the same
+ endpoint
+ :param str username: (required), the username of the currently
+ authenticated user.
+ :returns: generator of :class:`Event <github3.events.Event>`\ s
+ """
+ url = self._all_events_url(username)
+ return self._iter(int(number), url, Event, etag=etag)
+
def events(self, number=-1, etag=None):
- r"""Iterate over events for this org.
+ r"""Iterate over public events for this org (deprecated).
+
+ :param int number: (optional), number of events to return. Default: -1
+ iterates over all events available.
+ :param str etag: (optional), ETag from a previous request to the same
+ endpoint
+ :returns: generator of :class:`Event <github3.events.Event>`\ s
+
+ Deprecated: Use ``public_events`` instead.
+ """
+
+ warnings.warn(
+ 'This method is deprecated. Please use ``public_events`` instead.',
+ DeprecationWarning)
+ return self.public_events(number, etag=etag)
+
+ def public_events(self, number=-1, etag=None):
+ r"""Iterate over public events for this org.
:param int number: (optional), number of events to return. Default: -1
iterates over all events available.
| Organization.events should be public_events
I just discovered that `Organization.events` lists only the public events of an organization, and excludes the events to private repositories. The resource that includes private events is:
```
GET /users/:username/events/orgs/:org
```
Source: https://developer.github.com/v3/activity/events/#list-events-for-an-organization
It seems there should be two functions which seems could either be named:
- `events` -- all events (must be a member of the organization)
- `public_events` -- all public events
or
- `all_events` -- all events (must be a member of the organization)
- `events` -- all public events
The latter doesn't seem like a good fit, however, as `all_events` corresponds to public events at the top-level: https://github3py.readthedocs.io/en/latest/api.html?highlight=all_events#github3.all_events
However, using the former requires a breaking change to what currently exists in the alpha version. Since 1.0 is in alpha that could be okay, but may break people's code who aren't locking their dependencies on the precise alpha version.
I'm happy to implement the function when I get a chance, I would just like to know which of these naming schemes is preferred (or another one altogether). Thanks!
##
<bountysource-plugin>
---
Want to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/33479528-organization-events-should-be-public_events?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F183477&utm_medium=issues&utm_source=github).
</bountysource-plugin> | sigmavirus24/github3.py | diff --git a/LATEST_VERSION_NOTES.rst b/LATEST_VERSION_NOTES.rst
index 73d8fa34..2d523618 100644
--- a/LATEST_VERSION_NOTES.rst
+++ b/LATEST_VERSION_NOTES.rst
@@ -1,5 +1,11 @@
.. vim: set tw=100
+Unreleased
+~~~~~~~~~~
+
+- Add ``Organization#all_events``.
+- Deprecate ``Organization#events`` in favor of ``Organization#public_events``.
+
1.0.0a4: 2016-02-19
~~~~~~~~~~~~~~~~~~~
diff --git a/tests/cassettes/Organization_all_events.json b/tests/cassettes/Organization_all_events.json
new file mode 100644
index 00000000..06304ffa
--- /dev/null
+++ b/tests/cassettes/Organization_all_events.json
@@ -0,0 +1,1 @@
+{"recorded_with": "betamax/0.5.1", "http_interactions": [{"request": {"headers": {"User-Agent": "github3.py/1.0.0a4", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "Accept": "application/vnd.github.v3.full+json", "Accept-Charset": "utf-8", "Authorization": "token <AUTH_TOKEN>", "Connection": "keep-alive"}, "body": {"encoding": "utf-8", "string": ""}, "uri": "https://api.github.com/orgs/praw-dev", "method": "GET"}, "response": {"headers": {"Server": "GitHub.com", "Content-Type": "application/json; charset=utf-8", "Access-Control-Expose-Headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "X-OAuth-Scopes": "repo", "Cache-Control": "private, max-age=60, s-maxage=60", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "Content-Encoding": "gzip", "Transfer-Encoding": "chunked", "X-Accepted-OAuth-Scopes": "admin:org, read:org, repo, user, write:org", "X-RateLimit-Limit": "5000", "X-RateLimit-Reset": "1462322134", "X-RateLimit-Remaining": "4989", "X-Frame-Options": "deny", "Access-Control-Allow-Origin": "*", "Last-Modified": "Sat, 23 Jan 2016 18:15:03 GMT", "Status": "200 OK", "ETag": "W/\"ea404ad7fb8166ea4147395dc5d3ca08\"", "X-GitHub-Request-Id": "CEA95BCA:FD17:F5AFD:57293868", "Vary": "Accept, Authorization, Cookie, X-GitHub-OTP", "Content-Security-Policy": "default-src 'none'", "X-Served-By": "139317cebd6caf9cd03889139437f00b", "Date": "Tue, 03 May 2016 23:46:49 GMT", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "X-XSS-Protection": "1; mode=block", "X-Content-Type-Options": "nosniff"}, "body": {"encoding": "utf-8", "base64_string": "H4sIAAAAAAAAA51Sy27bMBD8FYPX2tErVmwCRRIgPfTUoAhQoBdhSTESEYokSMqGY/jfu7TkRHEucW/kcme4M7N7okwjNaHEOtguarEhcyJrQrNyXa5WqznpncLXNgTraZKAlVeNDG3PrrjpEuMan0yQTljjq69DkiMAvxQbocNFyAGB0NaYl4uQR0CU6X0vLkIOCIR2omPCXYQdIftkOByQxfZMSV79B9lH5JQTNhDAnUdwLPoxud4Lx40O6PgxxD4Zw77dfC9wrFp47qQN0sS9wIKGTuDpcRdao2e/RV3LMLt//Dn748Ba4WYPGIYytkNGbEdOC3pHqO6VmhOGGzbwKMNhZP0BLrQx9w5k3K93N4aNoNdv9jTSB09oOifPRimzRd8nN6mRHN/a0Kkz1ZM9nawodwKCqCsI+G2eZvkiXS7S7CnPaZbTYvkXR+lt/aGnxIZFXjxlK5otaVrEnrCz0ZRfrgEtXwddWDUBVGWdRMdFNWrB8cxW45+f66fKu8ha+peq99Age1YWN2gER93AjINgRu1MKoXKq5N9jL0yt+PiWxR610RTY7TRVQWY4v6U4bMTAqveAkf+9U25LPPr9Rr7zkY+HP4BnIZbmhsEAAA=", "string": ""}, "status": {"code": 200, "message": "OK"}, "url": "https://api.github.com/orgs/praw-dev"}, "recorded_at": "2016-05-03T23:46:49"}, {"request": {"headers": {"User-Agent": "github3.py/1.0.0a4", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "Accept": "application/vnd.github.v3.full+json", "Accept-Charset": "utf-8", "Authorization": "token <AUTH_TOKEN>", "Connection": "keep-alive"}, "body": {"encoding": "utf-8", "string": ""}, "uri": "https://api.github.com/user", "method": "GET"}, "response": {"headers": {"Server": "GitHub.com", "Content-Type": "application/json; charset=utf-8", "Access-Control-Expose-Headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "X-OAuth-Scopes": "repo", "Cache-Control": "private, max-age=60, s-maxage=60", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "Content-Encoding": "gzip", "Transfer-Encoding": "chunked", "X-Accepted-OAuth-Scopes": "", "X-RateLimit-Limit": "5000", "X-RateLimit-Reset": "1462322134", "X-RateLimit-Remaining": "4988", "X-Frame-Options": "deny", "Access-Control-Allow-Origin": "*", "Last-Modified": "Thu, 21 Apr 2016 23:10:46 GMT", "Status": "200 OK", "ETag": "W/\"7b4d32554fa33ad7750b8c67afabac34\"", "X-GitHub-Request-Id": "CEA95BCA:FD17:F5B52:57293869", "Vary": "Accept, Authorization, Cookie, X-GitHub-OTP", "Content-Security-Policy": "default-src 'none'", "X-Served-By": "474556b853193c38f1b14328ce2d1b7d", "Date": "Tue, 03 May 2016 23:46:49 GMT", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "X-XSS-Protection": "1; mode=block", "X-Content-Type-Options": "nosniff"}, "body": {"encoding": "utf-8", "base64_string": "H4sICEk6KVcAA2ZvbwCrVsrJT8/MU7JSSkrKT1WqBQAOtiqcEAAAAA==", "string": ""}, "status": {"code": 200, "message": "OK"}, "url": "https://api.github.com/user"}, "recorded_at": "2016-05-03T23:46:49"}, {"request": {"headers": {"User-Agent": "github3.py/1.0.0a4", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "Accept": "application/vnd.github.v3.full+json", "Accept-Charset": "utf-8", "Authorization": "token <AUTH_TOKEN>", "Connection": "keep-alive"}, "body": {"encoding": "utf-8", "string": ""}, "uri": "https://api.github.com/users/bboe/events/orgs/praw-dev?per_page=100", "method": "GET"}, "response": {"headers": {"X-Poll-Interval": "60", "Server": "GitHub.com", "Content-Type": "application/json; charset=utf-8", "Access-Control-Expose-Headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "X-Accepted-OAuth-Scopes": "", "Cache-Control": "private, max-age=60, s-maxage=60", "X-RateLimit-Remaining": "4987", "Date": "Tue, 03 May 2016 23:46:50 GMT", "Last-Modified": "Tue, 03 May 2016 14:40:11 GMT", "X-OAuth-Scopes": "repo", "Transfer-Encoding": "chunked", "X-RateLimit-Limit": "5000", "X-RateLimit-Reset": "1462322134", "Link": "<https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=2>; rel=\"next\", <https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=3>; rel=\"last\"", "X-Frame-Options": "deny", "Access-Control-Allow-Origin": "*", "X-Served-By": "593010132f82159af0ded24b4932e109", "Status": "200 OK", "ETag": "W/\"aaec7c9c419bf77af766f3f8a5cd3be1\"", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "X-GitHub-Request-Id": "CEA95BCA:FD17:F5B64:57293869", "Vary": "Accept, Authorization, Cookie, X-GitHub-OTP", "Content-Security-Policy": "default-src 'none'", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Content-Encoding": "gzip", "X-XSS-Protection": "1; mode=block", "X-Content-Type-Options": "nosniff"}, "body": {"encoding": "utf-8", "base64_string": "H4sIAAAAAAAAA+y97ZbjxpUt+Cro9I8q2ZkkwW+yLUu6arldd7rbtdTl0bqj8mKBBJiEkyRogMwUlUuz5iHmMeYt5k3mSWafiMBHACAIRKCkSinKclUmidgIBICIE/ucs8/3zze+ezO/GczG41l/Zo97N7c3x/PBw2d/DsKHbx69/REfOatjEN7M+dH2YNif2rc32+De3+PAf0RHb/ujF+K4+9B5dI5OuGCo+OAUbvHD5ng8RPNu1zn4nXv/uDktO6tg1z1FXhh1M81F41wjhhiJhtRmFeyP6BfH6PLufHHz0+1N6B2CuJfT4WQ2mtze7J0dXc0hdJ7uXO+xSz9c6xjhROzItAngD855Gzi4suebNQbHAy4fkdFkNhkP7al0NpxkfdpuF6ID8WXGHQie9hgyAJQMI41ePMo6Y/L4+UD/pmyOu+1CviWZe5i5e+tguw2ecEtzR1fe9W7SisaLIfj7ewUEtHruBseNFy7oGfmJLtyPjs06w1o8d+kfPMKEEeHZCz23UYdEG3SH7vFPz116nhjYaRmtQv9w9IN9s45JLYEUhPfO3v/RaY6ElhEA2CPe6KpYC7T0aE5o1pQ3ecaj7z86qzMNReitPP8RA6sAl2sLNDFl/Q03nobZP3oLx93R7LR2tpFH7y6d+og3ln1wiympzlMdv6yul9w3zCVvv/3qu1vL2VvOKgz2552F2cB6f/P2fNwEe+tbz3X9o/XV2zfWd6FzOHjh+5tby48sxzrwIw7O6sG596zjxjlaDj3yEYOI/N1h6wF15UWRdQwsPHiAehURWAcXRrPOzfwYnnABlTMrn8DyUw41v3LjShriDUUznP3BOyu0plbPXfwtXqgV3nFnGYQOVhQFOKn5czf7Kz1WR8/ZKaCyZmi+CQKVEWLN0NyPopNX64kuG2fWOurGL8v+tFvyeazOK1IGyNuhX04U+fd7z1MYmaTpM1uuaYyXobNfbVTA4pbPXf4Tu2POvUK3qBV1ZRssFVpj/eqyps/daOPwpeK4UOsJYVFLCSr01ordopYJ1DFUumesS9Q0AcKKdMTtU+hT3LL7LEZq6+zvT5i9FLCSprhztEbeO7AaFXDStgAiYzD0lyfV6SRtTb3iCzTeO5VupY1TKLbWV6+zZa9udr1nF7nb+dcW3TIc0VB6OJXB6LnKA9Lv1+2BS12jls/ddKbjE6jAbD5qYgaN+5RFFnsGhdsat+w+/+HgHDc0W+AEByf0mndQNOw+Lx1YJZ1O53njOczO3Hmh0jvF2wHACVcbGFTN+/Qct8Q6v3OOzFxdU5dcmK+031EYs6QpYPhNad4v3i57Dw/YTil0hjXL4uz8rRcdg73KHJa2zSLug6O/9ld1zPGyV0Fq/vxF5O9X3i0Mw1s8ZUd/5eO5w/aG7glMJ09lDHg7dBm7YcIJva2HR1BhNOOWz12+QXK9wzY4K84Bmcb0WoUeTHR34RxhZPd79viuN7rrDd7Zw/mwN7ft/wPHnA5u+TGD+ag37/fpmMMp2sgwwzt78q7fmw8m89GIDsFUJp5I/ARmomRHy2x/Yh1weBRt0sO/TA+eSya2OHi1xaOVe96v4T/m14hLDdCZTbDzDliBM8QK6ypGz8X+1w1WUQe7vC712/8Rh9mj6XgoLbmr4AReZ967vXlyjrDlsNylH8XL9M18jzcH53OiBX8Z090TPkrfcbEjoeOe/AdfOoo6GiWf8B1IeqqdH4YBNuzEEvGTBQdvL86W6RLfglB/M99L/We/uN7aOW2PC25dYnx2Drgp2hIeTsutv+J7p59oO5j5vd6DhxFNCLDxbDydgu2JqZuYKaIHqxEDRpvxhGdCazWyh3cHDNhPtxk+bzqZgpYC6FU+b2j3J6NZP72e0PmH5z1ii+bsp8PGF8VpvTyG2qWJrv3i7N5oMphVsXvS1V6k+PJjwni+ZPT1Rkid68v3qpoayR/diPWTh0mZ+iuD0eH/ZDwtElCGao8JzOFmiUS8oI3pQBmuKScot2YWDTpRh6YomRsSwkOPHcz3SaIX01mwNYpQPp/wKrxMnrD0UmqThcXWjRnDEght2rAEsy3usAS6MYFYgqHKIpZAtUAllqCq8YklQJqkYgmiDrNYAteUXiyBILJEnWO8AKhMNF7AU2MbL4CpU44lgHq8YwmgOvlYAqbFQJbgZUlMrFUNacgSwAxCilebi7wMyCCA2IBDLAHLk4i0G9dDpLctjxrTgHWopwud1OYnS3BbIilLkPWYylJAxnNq0ZUlsCqcZQlMO8RlCbAae1kCpEFhlqBp8pgliC2RmSXIH4PRLDmNDq1ZAqfJbZYg1iY4B735aPbzEJzFbl5jOS+2uEJ1XmwXVfGdxVZYcAzpGVOwGdL2Y5GeycP4wkjP/mQ46uGvdLv/58tBjL1pfzAapaTnMDrYoWq8XNJYjcQb8M78sjQnIkDHsx4cBZmQSYxkNoiRX+ZFfjMZBR7AGI+w3pioE5tJd6oZzeSwRlSmGAplDlNqr0NeCiAt1lJgtEdXxoC6PKXAaUpQimbNmUnRsJ2AxaQXH5uLFCd60SSkfA212cdMs8a0Y7atNt+YBWuLaMxiNmYYs41VqcUsRgucYhZOjUzMImiyiFkoHfpQukkNwxKzbfUIwzySMlOYB1KjCPMo6txgFkmPFMwiqbOBuf6IIEZGiTULRMwC6fF/Upe0ghBLkBQYP/nKWOSiagBi/jnS4fjy3dIm97KALbF6+YeC+qgWeJi7Wm0eL4unQuBl27fD3GUR1Si74g1UCjfMwmiSdNIoJ2GKWqGGWciPQctl8XX4uCyOJhEnPRlpfCJN0sUQw+FdHyGGIwoxHFwKMYyP6U3mw7F+iGGmf9eot+KhVzi3YoNKsi1zuGHZktDDjxpaWHjgXhrLNhqO7NmUqLJroYUDezIYjTORhf/mPPruvwdbd+eQG7FZtCQPHspBKJFLol+/MN+GCM3xFAzkZb4te60XWbfcgBD3loy71uioM2+5LlXzb7mDG7Fw0gApc3ElKDqMnASnxctJSO2xczKsLkcnoTVl6qTGzfk6qXk7rF2uRx+bu5NO96IZvLIrqc3jFRo3ZvOKCNqcXhGyLWaviNyY3ytCqLJ8RaQWuL4iqBrjV8TR5P2KgDrsXxGtaexgEUGPCSzHU+YDy+HUWMFyLHVusIinxxAW8dR5wiKWVtBgEU6PMyzi6YUMXsRT4A+LWHnar2HAYBFQN16wiBhHG2qlMxdhW+IVi8B6wYJlePqxgkVUFaaxiNIO31jEVWMdizgacYJFME0GsgjYUpRgEfhjsJHFs+hwkkU0TWayCHg1QhB0Uf+dPZv37LnN0ptLUqBxTO8dyMneYD4Y6vOThV5eYykvNbjCVV5qVslYFhoZ3vJn4y2TB3FAD9kL4y2hbjiye7NMdOAbUlz6miuYfFMmddifjKe92TgNEnyibHlV5jJprMTKib4UOMuBPSLtxnKlw2MQbCk3s4ZamCR3yNvJmocQgaTkj3nsRAEsW7goLV4JvyuMmwGpTjLux4eKz7lOvkZJZ5uIYxSbp33p5heMJnkklcAxEK62Tq4sVwqpREy21NUMaxUG3UWWhj6b9QbT4QDPERMeA4mMSOOjf9yS0MQ7aIJ+Q1oN1tMGbknrEAYkUgeW1DpgP/4UhC49ZaS/l5G0fPrhUcBPZtPZbApHgNKjL1p/oU5I855UjxI/phH9jCbqApZpYx2ymVC0OGYCaI9aZmi6jDKBNCWSqU1z/phatUMb8/M3Z4v5bHMz//7vTKGFJCqZyAlenG2wevDwbgrJypj3iqVSErs8/iCZX+Z4i8s93b13/f68P5j3L+WaMEsSB9g4hi3yMOEiIabDFVqWgXtGH99Yzs468RmAy1oOOoNbmh5WG+uNBZkiHwNLupfR6XAIQgj7WOsw2OEIqF6+sVbQz4Ru8Nby91bkHU+HzuHcwef33hHCmB4EMYUWrOWJecfbW+Fpv6c55wMeMS6OuaAxiz7M34fv9/T/Dx8+0D+U5k9utTm6CAFOfPIudFbeEsqb1utdEB2hrblCcA26gS5sIRPzGYOwrD/T9Pb+ZjV//57LdQ4G799v/eX79zg5ung4k5zn1t97FgwKCHvurQW+WewC97SF7Gi02Dk+64qFP+9vFuz3xYJaralLaAxRLS+8tQ4P90wd+LPGZ55gpOMTrwLXi0/n/eCtXtMHtzRUi3toHELuVMb/GlfGpUrpyv6b6dFG79/nRrQDqPfv485nr3rGTv1Hfr104j9d6z7psd4J0VOciVYjZl/gpHTzsuCj0ZDBZ8cwgvgzU9p5HXDl3A7d01sr/u0QPblXh7BBH+zehPWBnTMe2cjbrqGwRHqsojO8E8WT/9H1Vlze9O7e29+N7D8lD0w/j3v5YSvp7/v3CbI0ZpMpg32C2Gzc2xAvVLi31qc9s9le/x5ii9Gt9fvfPzzRTyqjRQ+Dv0ei7kJ6BZBwk78oy+p4e3oIX79yopXvv7q1XkF+NAi9V59dPTNefzIopHMgJ4VdIF4j8XV8nbvocRUeO4fT8Wm1eb1i6ImpMrd2J7zoSyjqWqe9Tz2yVhsH8wA0oW4tsA9APVITMWmQhLiYQ7Vt2ngu7vbt/ng4GQ3HmEur7Y8qK+13zFQWoHdZTPaFmsks2d9kBCa4JaZcvPFhx8XbIiVrLt7IaFhzcWeqBzTZbzWz6Ri4hlmXba9l2XEgPeOOY7Ro3wlAbROP4zS28ngzBUOPN2zJ1ot70dzcu2iWZUyuSwRf1iyLrbC1dQ5OFlT3IScensk2gm44W7kwZx65pPjZegr2r474O3zoWP8rOL2CzbNxYJ3hWMyI+3vv/Z5sLjZF4jM60HJCiAS6BAInPmbLJ1hn4mzO/njLzktG3GHr4ASo6mDtzhaRz+/3ofdPKFmjIdl7jnX2nNBy7oPO+/1f99ZXh9DqY9EggUcLqo1T6+1/3sJQggH9/sb6o0QKZ6QW/4QFLjh6c5h5+z9Z/xWwy3MDL2ITOe8xU1Nn3XyDGR82Fp57XAKWY1h7x+351vrrVyd0FEYpVm+X9ZrQHFicfuhiKxsecRC1YeZtsN+eaY3A6EJHEGMBkzU4LGAqIaYdn0LTfe8SAH0qJmf+6fnJOeNy/0Rf/n//1/9N/2DcMaYeMzz9R3anNujI0ls56AwfTk5FeS41hZD8AafH7WDHeTCJtpbr8yuxcEsefe+JBgGa8//uH/9yWlKjP8a1NuDhiutsNFxYhhN7SB1/vydFwetSiYUtw8vjBXvD3oAWaBHPeJUXTOiMWAESDy+aq4Qz8pZKC2lMi+RLnxhGMKY968vlVr0jhhE0jGDdujZEAynHoaaNtexGwwiCH4i6hhGMd7MQYAb1ftH0rMMI9mGoidwXwwgaRlAigQwjyOlJwwgSKW4YwaRoIDNDY5ZNZgRpk9U2IygwW2cECbeEETTO3YqqlsaUu1Tvk0amMe33Qp27FSZXYk5dZPsyJpdg+9pjvZpQXh9kzusD48fYhynlRR8yzstqwhllrvClcUaj2RgBLZkc2KuckYkly9TPNbFkVYWRa0R+mVgyE0uWKaJtzA1jbsQBsC3HkqHSe4UZcz2WrD8fTkXmgmGODHNkmCOEO5pYMhNLlvrnr5l7MnPUHw2pql916FOVO7Mklkxgts4cEW4Jc8TCZ0RmgIklq2KReKBREgv93A0Qp4OCmaIUO4sNqw7+o0PhC+NAJpYsazP/amPJMiZXBbuUmGWCXfoKwUkU1OUsEQGPsCOKxEd4URIPxqKrvrDeUTQSMdtWtAlOW5fFXyHIlkKw4LZ+v2fli62vTveIvqWEAMTtU3AZD562vnr7BuFqiLznoVgUYoU62BTV9YbHpDksiAyH8xgyFnNlOa4rGohsglsLJd+pe87hgPCyCD++QjiWtfSP7/fL0/09qKi/II+C1OvPVoSSuSjKy+LVNt72YPkhoqYCIFDPeBoao7V49oG4wPd7cU3oYGnsWn+gErvWVuYEhXthdBonT+Qi/RFRx8LU/ianTOCTGikTOOpyDPulhAk62++LORNpssTvK5Mlmp2Tp0qIa2RnZVGOFzIl2NVQ/97f1M+TEFkSv7+cJcFH+PJIlUT718qQQEfrZUdUjlmDs8u5EXT6Sls2PW1pDkTTXtXJgADm1eyHpuetkfkAyKqsh8oz1sx5AEYh3wGfNc134M8iPd30xiMJ5386UbCPeMRnk2hVCmZNRGPcJFD1uKFC5D9fBGvzUFW+PL7IFPbRFFVrJnm3Q1SavG6CVJGq8nyTuBooqxLplCZrPRMafm3rOeixsookQ46Be7lZ684a4frZrHXaFjHd1ZLtqQlsqNqSJgGuJkY1TrAQe+zfUGDDR8lah96Epqch8Ua8ME9Dbu9l0tXTRPz62zCTrm7S1e89ITDwa05Xp5T14v6tWeBVMlO+rMArqAiNpsP+KJ+sd2EH1MfhEywrcZreY3CKosAPFXP1Ms3VEvZ4fwoSXtPhBP0sV/AiA7VhrBDlA5bGWjEpF50NUBzLPJrOAKMh2YXWirl5mS60l5eXBdXf72TRVBS65FHmtVTtWW88gdJHos6FW5DKc30FelwQ4yxbOzr6q4czEp/51q1TsstZ+vdLqNi4S8enrR07y2Q2nA1wDqWHW7TWEOnKdanaw5k7uJHEQ7aterZeCYrOlkiC03LUSUjtST/IsLoCEBJa03hwqXFzMQipeTuSELkeNReGKNtR8V0MzZXKSmA26suUbKr6d70xSYENkfs3vSgqa0/e9VEVazof5aXAbki94a43vMsdEstSwDVHwgxPFkQdI58cijQtORZUo4goFvMTeRKDJyaqcLfFfdjGMxb5tRwil3fwzUHAqEMmzxuSeSCHI5yN91THK3YMMrkwIXXkbP0jE014++1X37FmrOmrHXNTrh2IpvlQo2B6FYnkxdk7/isUH3b+/eYYOyG3QQANDO5iJA2JjQfpMDgzyf8u5Mv83WHrETfG/ItQaYCmw+7MPCI4InKecAbqJbpHX0uXDi8nqZBhAA5wV6DLEUPr1LXicuP+Mq24Ph7t2pILxpbLktninTbGHMnGstJYSdadMeaMMUcmbRI1VSdAyhhzpAOJpZZGDquJs/d/dNjvjUfPGHNibysH4htj7jdozGkJSpZkjtsQsB/1SS+9end8aUHMx/6meGqxv0Wqom8LzKJjNUOf0Qo1iS06JbpBtNagGzLdqR7MzIGNaIa4nTrFkEPQoRcSKC1qIUFpj1ZIIXUphQSp6QqUNGxOJSRN26ERMj1pTiGU7PMLG/TSeODyTfzXjC6GzKBj/W7c69fO7H75W9P+ZJLZmr49RZvyACuzJcWW9IDhWbAdR2/SG0F70IbDJfJ/hJA9qGQXGQqIBz8ukk9QOhDy8VRAsLtBoGDU3UF9HXUjsKThVyovAk56tbbdkTOYep7tujNvPVtP+yt7OHTXA3w39cbroY0WSw+0Einm9wb4fGKPBp7nOD131LfXXt/zlq7redOxMxr0p/Zq6k7RhkhxH+FM8++fb6KNg8ajVW86XA7H7ro/ma0H9nq9XK5m9nK6XnrL5cz1pr3+eLkmz4kD+dCA1bhgepxojKhxcFEdotG9MPjynj6mJAMcLOo6Z7cW5JrZoXCGc0+9pqj2rzlH3xGUGCeKiLCPR+5mfgxPXnMvEAvcwoV2a1/eT7fxiDhe33Unk8natcfLvjNez4YrjON46q7HU4yx7QzcmTMgJ1C7I0JCq7ijLvRNfWjYOlsrIdnYbqRjbTF6FMiFaH6wfK5zBKno4z4eaOPS2pjVHoB0zNxBz57O1r3eaOROBjNvMB4NB6MensvhZDBe2oOJ7fYmk+VHGLO70NsFjxg1+WF6/VmHovqTT/no3J8wXK/54/b5Ozxan7U5cLVHIR04vKej5djrD+zBsGdPYN5OR0MUi5l4Y6+H93LYn62GTo9m5HYftiMetljDpNWnp/YVpYNgO/3BdLpEPfjlbDwYraYz1+6vl6Oeu54MR5iHpt5wNGKCI+0OQvzIZB4ORtwzwhtpNWsW4k9EdZGl59MvZaBgjr+PWX2oKCOwkk/prb2Qtccn81yNlvZwMnUGw5k7m9h4guzecrAcDQZYptzhYOl4WC8G7b+Q7LnKjOcCawTqlJBLtvMPxNq3+b71615kZnJ3l7PhdOnMcPu88XLlTbz+qr9e9VfL4QwjNHNXo/7SpU1mu48arxazx0QO6XEUefDCI80/3T+jRonXfRP9F03qqNQAaW24RvhK2Op76dS98nSwGhgjxcGKrfkvWYBoh0pgQNKbiimKyFEMsTASvoGraG/9mwO301Y2Ev7TC+9RcCubE8fMcSauLtGW3JCiVLr/5v7/whsbtWpY1B6bn/5eT0dc7Bwm89H45dUXZKFJ/Uk/U1/wLW7atzyP0WwgUgfN1fikuAIdHnPSn99uFyIbVKUsS5cVxO0CC28b7VPG4zGWUopzqaZ9ZA6NUASI66/XOVb8kto+Ozk161AjdODgIOu4cWPWirqvVOwlJun4EEiDG1ccu0ROx/X/KicUmt2yZf+kDY8JLcLUj1tnvFEVSSXSMqZdPlBCa8oF5gJ54F3GzauT+MSTQKTm7XCCuR415wVFUM5/Iu85xH4Z0TiYjYiWYVtBZu07q5WHMnDWg3e+o2KeqFRyf2KhLR+44fqhY1FdlWD5D1RSod0AaqYkUTsfxCbiwy0LdombUD0VvhNgDZCoj5osIlHUs8gSjrP8Af7VNmJp/jgAkT87KsvHzEBs8r19dEKu6dKJ/FUu0IcS9CPr9SZ4wlijqFcc6MMa0C/c5lx6R2fn/JBWgvmMDKFyvnT0DrUPR735kMU8VfGlsFNGuRKJ+bio+BBWx95dOEewPrnQKekQth1FkTVOTNU2sGCpI07qfu+BVeJVGkuLQ4INqvbmlqQDpstnTChhVEKPis8ke2ddUJFeKKOqgtIIEuBzl690P9GdFp81x+TaELSCZxMhadnE+64wmnHLbu29SMyLPnOvMh6g7IQwT9hTzqsmv/InqMFZzCqOp6RbMosrl/OQJm59DR8JTsuLJy+3RwflsVxICz3tvfCnZ1ZFl14aaR2ufnNKRs2s4nJ2qPYqTreFrGzawozs8aRnD6hYN+Uwz29or0JGJu2VStwOLEAO37NbbEz1ksfVvOQqoV7mJW/5JT+E/iOrAS5qflczFNKcIl5x14tYSWc4yzAvUBQ8FTSEgQ+X2XnHwvDf3/D6z6gwSPWMmSTXd6gefEClbFSpZsY9r30NC371AF8lD2SHTR48RTzBiLnl2LYB7jjY6FzdCxJcACPbGud5aOC5LLsSgqhlYRUaI+AGTdELbGcUEajlcxd/w7XNLcgtMjR4XWdFyBUqmicQzzAn01/pDEfP2Skis6aA2CBnQRGCNQUEZ5jq7Hf5XqE49sJijve+qRGuA8rbon/xPkfxMpPmz8zSo3Ffhs4ezi5FwLj1c5f/xO6kc6+IRi2pS6jbrogAIrLLmj93sQOg3uCThXqPCI9aS3AUvFFtEF56OAiOWidwx9BTHXrWNWqegDXZlhUf3GRr9ixGbovKuCfMforXmjTHTaBwuXvnR1CBauOWtgfYKtgfQx/6h4EqXBaBescj75bq3UsBUji2BilebiYUkF0wi2RRwxKNpQdYC5CeuzxoE4Kh+OSVURZ8IhbfqF25mInL0OkBIJJEDThu3X3+AzwTG8GwgO2rVYi+9PqpcfcZFJ/3E8Q2nykUjGAZb6bYS94WIE642sDlrHatz3Fr2BQ758j2x2vqmovN1DZAAJsabtIcUPxGqeHwtlmyi/m71MBY0yxWQiMqAqbts6hSjW+1nkoQz18wzdlbGKi3VMPbX/l4LsFv032qrzNQfDB5W3Qdu2zCCr2th0dUcTDi1s9dblO6iAYIzoysVBuEDADNkpQTK5PM/Tt78A5SHz1UYGBUdQmdnT1mQsdQQGeBqx6969vzUX/e69MhZFNwVQr8hMob+DveCBUGkcId0CRClGjSJFPXfX6pAVyS+/w7W+c8j/k1p6oROrZB7u6Bx2PGtdNpK9UhFUt4M9xgFXXgROrSNbCoVrsPBRJpSV8Fpz2YfcS9PpGjlpbR9KPYDKDd2BnRoxT+s3GiBX9x050ePkrnBBHzScc9+Q++dBR1FuGrYofId0np6XZ+GAZQCCfNEO4KIN1DcbZMt/gWifqc+V66BvaL662d0/a44NYtrkEQzFQZnmZrRiY5S49CYWOH+RVOukGsbp6Tjs+AIWQRx+PZeDpVFqzgrTUySDLdqd6oZw5slEESt1PPIMkh6GSQJFBa3HOC0l4GSQqpm0GSIDX1GicNm2eQJE3b8RZnenLBU/zXTHYnm9SO3sJxdz74Ijan4M3Ocs0l4kRolWWak1PSD/iuwDLHB5jX9gkvYPVaz7npZEj13UYJlHltM5ZAbph/Ha9tM/Y4/9p+UsyxsG4aS59x5rmWPS1df2PKWG6tTRfLcG1RxTJqY5pYbq5KEcsoLdDDMqAaNSxjaNLCMpgOJZy7YQ3pYLk18VTqVHARS5kGLkKpUcBFHHX6V8bSo35lLHXat9AnQRnDbGpK+cpQenRvrltaVG8pFndWNqJ589fH+GFVirf4XOnQu8Wu5aPRGlO7MmRLtG6+n5wQVqN081g7bTpXRlShcmWEdmhcGVONwi27mUr0rQykSd3mxhtVkJCIuyffWurAbUzbyqAfg7KVz6BD18pImlRt7jlJeV6aynM0be+uN72zZ+96w3mvN+8xCraEpoWSAzjYwXw4mveHF2laLtgYhxRX0LRSD69RtGUHX6Fny5pEVdSs1ABD0JyWhTD0uIyWnY2hlJsnZtmH16hZnnhPJKw2M9vvgUytxc0OEdUnyFnWSKJn6cv4Um7m7CKqGNoFFH4R1gGOlpJ46V8UZyL5hZjpdg5+Jh2RhX7m1fTS9CkQQ0RzlsBkNtbyo59kTaEtW7dKGtfog1jyKIEJQLFDUxcrjehmlJcU0q6GnYxVBbQOctGVK4aDCVpo3d0kuh+IsTmsBtk8xJ2cCTxDIg4ZY79RpkqSzxB/gERqFiB3c9o/IHOFvCkiuWJ5pv6WSL0bsalanGOiPaTPOSZQWpxjqgzVWoh6CqnrKkiQmroKMhJPTZPLkqbtuAoyPbngKvgbZXaWuwjSSXhYMnli3RJ7KOi8IQKD4kyZuOTctsfMrbj14g9wKHLfKT1qTWbnzXxA88HhBHn6VTwf5I2mrPZ0bO3gRtAEIDkG49kgXpVwMfeh0JujA2/I2CJX6aUVke5uIi+Lg5W06oSfEnUfSFiAnXfA08WRhgxQo4FcWQz9Yr44t6QxgIlpUXkvy6ybjGXxCxe0oBSz/AYmfsmqXUbV15XNW6sfgiubcekodRO6tZ7Hm707SRY83Sry2w+hJYUkfMpfOe0QdoikfEr1N9nmcZ741UhMk8Ji8tSirhQ71Y5RIEGGXnPDoKyQBe1h8e6rl7GAjVFuAjTJ1h6ySheF0oC0aWtNYERxVvylJEVgaBl1gDJ1gJRmIJtS1ayI1+8uV0UeTEd4AFUekTLdZo7GnQmqHUxVcZIeGt3m3EZYQb8lh6ATdZdAma10IXzn17KVLgZuJ7rNkzlftco9AjHdz48RM/l3VNtoH1gBK1UUT0HI6zxaVNQosrb+g2c9ea8gkUJBzqxcEuPQOtY77MRxBGWJHs+k6YiqRpF1OvxLw7pEcbdf2p583O+PplOSFq67Jx9C9HVGhfliqiF0XCTXoFDUGYmFFPnejHDghn0BRIl3EH37xepNtrc/NwUnkwSLit0+Rkltfy7293yU2Q59YApORpcTW/BWN4rjlzZVRmrCSE1QsBKL1l2deV6ZtM+upvtKqJ9PZp9ul2/Ua5abZElmKDc5tEs36qa4pHZxycR53tKutjceTUYTkrxtsqtNl5v8vjbF09vXpisZRL95H4v72oKRRStfYs9pWVyPnw/asvyqB7ZwDY3WJbm1epZZKY7OrjcHqLX3zWG1l3eWB9Z1KefwmjqWc82bp7TkANrhkwu9as4oX2J+s4tF+f5YXlDE+vG/PCgWUWFeLh6KIJKO9T/+9m7OtEuXqBIMVSNK6EUlYCZtFByOd6hHjJ/YAZASpfoj2NVBXRSCpiFV8F17oYcMdCgnRdhZk3YpttQOttnYdNNpkkrE//K9v0P+7t9fxxFYoKNProjBopVdaCtQCZkuaeSjgIuYkrr2EJPtuDfs2h4K0qxGq7veYDy4s21vfDfrL5d3vWV/jfJwE5RmGXcO+/vP4vrHYlv/5XIZeNaXMW1i1d/VywP54nb19nTamzWpNjyc2hR7F+/padwU53PRVG01oV6Y3TtLKq0v41DquqatcevedQJN4mDqqUSb3Tub2U5dezIbzgZTjez77Kb6qgGaO7iRlWR27xheowaLsnR+JPi1X+nuHQE5JcZWk9173+zeV5DbTKrWWRHkZeBEgStl9ynu3gf9WVOfdOXuXeC1unsnzOLuXRhWbM/OrTUNG0t9vy66Ub1LFwc1W3Vgc6rvyOmMorXOPpzBaO2+GUJ7e24Op7vTZihN99esUfNdNWvWzl5a9KDVHXQyYVfsoJNjxA76S3lTn3UrJ7U/ok1w2rrYLlsfKI3qAzzSKCZy9qIPndq+ZLbrTE7+AnedvWmPyujV9iWbXacwPzBobAnRoal5bBN7YX1IpZ6rvSolGzP9XaeJ6TYx3XiKmq37/j0yX/Z8t2F8xcZX/Gv1FZdvNlme+/UKXNw0GCMdvtRVbGK6kQx7Yyq+SRXfWo/phme5P2B1Rau3f+U58SW+b4Gmt3vOxnSLHprdc8P6m2b3XFWt0+yeuRoLNqfxClS1e46PEbvnD7z0Oa93/sF6cqL9KzidWelLuJ39bVx8p9+ZdKw32EJvffAH8CsjXHsXPOIwUZ2n3xlb0elwCMIjeEZR6zIHj/Dttb/35lbscGaC1hyA6Voj69SHZF147sal1zs0m/0u+e0dpIC+htRzJ+33seEWPh6Bl7eFn0yGPSJIa2/hTTj4z5CubcLBTTi4HCpjHMpUnE2L6s/yDnqUfxapRepfgtV2AWTRGrsCso0VXALZ5i25BuQeNXcRfJS0bRuiDroO5dF8xCpxF/K2TTj4p+hQtqfjVh3KAk9vS5wLByfM4pZYdmqRB8KEg2NNUUh/zsX86uuJ5QC1VqccVnvrUx5Yd4XK4TVdowqB103VxnIA7axThV41X6lKFpRkOx4vFlXb8fiYeP2wXN/lAeH/OEVH+Kmf5nHctHa09qA37g6Gg8Fw7Q4pWtvm0drTwWB55+ILb71yJv2ZHK39rReh5BKvwwsJUOuv/xsr0Rt60QHeEFTd3buU3x2hztmDDzZAhKC22OvBoDse9CbD8Xqa7fVq5t6tl6PeyrZdXIubxJg3pAPiO/Dy6IDRdDRq4tHHvZ2NJplI8jj2HsuLSl54prlStJPoj4kpNzHlh1x+86WweWEImYxwzohfqCZovPynJY/sRw5TTpisPRNLisSXItIx/lhNnL3/Iwp/qsQMNDWvcjppTY2r34xym23rUwC9+WhmKIAXE1Pe6w2b+sQrY8oFXqsUAGEWKYCMeUWb/8R607K11GPLM92pDjDIHNgo1iwR0FKOM8sh6BDQCZTW9j5BaW/VSSF1t/QJUtPVJmnYnGxOmrazgc/0pN2tOyZ5ps9ZtXWPj7kQh/5mbZ2Dk+VsuazZxoHXPPaPk6SZ62PPvL8/+RFLyIlTNy1kd6NdaD2FzuHghbdA2DKkvYftNVK53YDtxl3XcvDZk4XyRqgIfkTu93sUS8A2/Pz+hlEIr6I43B2n+8CKGH7oWH9Bdji+eaKk8zdW5CEtfG99vQmRGoSP996RUsSto7PEId6eb+yZ+prTyt5eTGM8f9weDbru2p7NVkuIqA9sUCcsf3w9or29vYIss7ea9G0pf/zN3nr77VffUQgC40iQqG5930Xhjm5mSNOc9ovlQKgcXdde2yih7dn94cCZOrY9WHvj4cwbT/v2arTs98cjz5ktJysuHb1Y+HtUG18gZOF3/zEbfsbYjwNCETD0yCbAuGMsKVEfufnfZ3oD/bpDJs2+uktYDQa2PVtPvPHY6Xvr4XDYnw6c5coZTkZTzx2P3dV61p/yLgXLf3irIwVR/O4/+sPeZ+hBhNqKrhWsKemfbhzS/qkQPX1CHfvbt/+RKAD8BWK4nG7aeNtD1DAHvx/bQi+PO+lPZ/2ZCaW4WGqgLAOBCiE4523gwBp5vnFWtLdBYGeLWRImlMKEUphQimLOhY4lK23utaxZmfBorQyO4VGqwls/yVAKVHorcXw1Sc1HUoXNqguaSApvh5qsn3hq/nA66JOIXPXO/xJrX0wuiPHapFEYZpFGkZ28woFghPVMJEVNYt5EUlTWcyiTCIcwQat0TLJYVNAxyTFxJMWrnfXohWfsh0GNIGsA5AkJ55GEfKyL95qoDtohB/SFZx02BwsVomLqhSnWvWG7eajTO3vCAnOzY04dCzSsjzjFrUetGZuy9r2tiwJtMR9DbAqnbCA8APG8L8C3MBrIwT8huBx/5zG5AuoCUUL+8Zb1I1iv/ZXvbEH1rE60OvAzuoHHsjPufcLYn7mSi/PknBuqGCRj9fL27fZ4PGmSAmFiHj7WXt0oGtTZq5PogqlSV1XUsqggftXONPp5FC6TK1VdHMc2lY5MugMLUbo+5iqmz0dJdyjPdmiiaGD35na/dI9uFA2MooGzc3742IoGw8lgRl6aJrQDPZtdLLslpINA0yMdZEUD1sMi6RA7pgXdYGI3Kqpa84UrceXrJ2gkUFpsdxpo0RrTnUKa2I3ux4rdSFatKrIgXtkEWfAOm+633wrpwHuqFIawC7j2EXbxCnvt++B4RChErLhPE98RjAKIBIRmrP37U4htfnBiugd7fwV1BFLZt/47CIl/WGPbDwSEbgTWyjndbxCycOBcBCiHJHiD7eYTxzx6xMI0gv32jL88a+fvwRDw4vTWm1cuMjB4yAjJGoI8YJIMrHAe77ooKiBkD+HO3Tor4hnoIpJXLMnSsPAn8rZrIavwzT9Pzva1CEnpxLkdt9Y7iCcyVqTm8X+mmJNE+P8JtQAvnZGQCycsOxWDLD+SOsYiQuJ6B/yGiRAREf2Au7CNqGbCo08aF0sUHMSIxPoS1ppiafjte0NqGLhzbNSQ4XK6v8dNx7jHQ/nh4ohlQj8899b6L9zAzyCuQbUQs4246kZyMVIrHL/EEyW4J9zeCNvIEJwQr6TIn79TxAJzUJkhPLJ7xoslEhmFR3HnnEE8+RQHxGJ3DqCRlv7WP9I108Nm/VtAfBIuF8+ps6QnGLEixERRHQq5XkPD0JDkLXxpFBM8A9OZPW0QGiLE4zMFGjKbJeCopNbkdrhKIZ+xqD1JYFA4BxFBFEg6HU6QBoRq93jSEb4hOW3Q3Uq295cJCzFUk6GajqZUAwT/9Y1jEw7CJsOoWoC3hExrHuz820mrKQ0HqU012cN5rzfvDQ3VFHMXHZhqqw0t3GKDYsQzPzLVNIS02nDcFtWUoLVINXHMItWUMxVZwWRezuj2RstuVE8VynWpmr0zzpx6joX2UohMwOXHDLgsCYzkK2FmlbtETWVXwpiaCp1HhKz8I1haawSceO6/UvjK2gnpnze86CMeDZBVSw/sEnJh3tAOnhNOLGMHW3oH+SnEGCwyFMMCbRY8X6jzjyjYI3llxWo8Hj0IfRJpBYaJCK4V6AGkmGzPt/iM8Sur4EBSHw6qaICmSppRJo7L6APBHDFyAj1DOUrsNRF/c8wknjxRItMbYmJWiJL5wvoKCU2MnMJFslZIqWZ9F2ehtfEu9P55wnVAMoKSEU7Qpl/dzKmkR4UFkhn3l0dGDAfDEVU0FpKfbzEI3/Ix+OYRgUH4BnkYQRjv7Q0XUZqigiynvedisPan3dLDaIFRoOdnu12IJ0qptgcBRORrEm6e8RgJa6PBVM1hBRDXX69zG5OLuWKJrUiN0JaZjI0bc0NTFDip3hOVcD9dkSrNh0AaXMxJR6KXaOjRu22wesAdmLMsRDzO/nFL3/43y1cEvXq424LfpMlmRwFvUQdtaBNG9yUpfCrzasbSiTBICSNQffNKNrSGSzASHUltF5WwFbM3pfrb+TCICktkMB/15kMmIHrRAswcU0iNYe49d+Ecb+Z7LD23N+yDBTfyFtHGiT+HHeffI/g4OQ42VXSEFyr+gDe5QoKVTfjJksdqDftHmoJC79H3nlg3aOqunoiugxJGAVUVNO7Uc5evTj8BWb2j6XKXLbVMSx3mV4ULj1t2bac/mE6X4+loORsPRqvpzLX7pG7nrifD0dJbTj1YFlOWgoPkbrYqOtDixxKa3c3Ndw4C3UM2emt8l/zKno2bBmcxay+eku7FVVMhe0badZu116y9Wmtv1qE7sseTnj1A7lvGpUumIe1vxEfS00cuX3wfPO2NgW1e8p+eWSFZWhlNXfVPqq76IfQf2S5a7JrrE+lMGwY31PWiVegfhFIHyeZQdJjlrMJgf94xru79zVtkkiGH7VvPdcG3kbrvd1x76P3NLRf+FSV+Ds7qwbkXgjGgBoMnMJGAiJgaHFBXHqRwECuFeDhAIe4IYLSTx0EPMU9XI76kbLIiiFoWVqExnOZoil48eGdFBGr53MXfC9/lFuQW5lcQOqDeFCFXQQbiGeZk+iudARI+O0Vk1hQQG2QWKkKwpoDg7iuev1THAi+OvbCYY7mv1AjXAU0SquJ9juJlJs2fmaVH447iU/vVpp4tX7zauPVzl//E7qRzr9g9akldgk6VIgLIwy5r/tzFDoB6g08W6j0iPGotwWGrodE9ap3AHUNPdehZ16h5AtZkW1a8lcnW7FmM3NaBWBtmP8VrTZrjJpAr7975EZsItZcqbQ+wVbA/hj5iRTWmohSBeseD08GTq3YvBUjh2BqkeLmZaHl2wYx4UMMSjaUHWAuQnrs8aBOCofjklVEWfCIW36hduZiJy9DpEarL3JT1l7fuPv8B3oSNYFgQ2O6pdRQ9pMbd5yXq/P3U6XSeifIgWEZ3Kd543hYgTrjaIIFerW/PcWvYFNABYKbzmrrmYjNFymiKvUuaA4rfKLX+8bZZsosRdmpgrGkWaxfTiIqAafssKvy7PvQO6khlc/Kw+AhKEM9fICp+5d3CQL0l3UNIKVDk+/6e7hNPtVQbD94WXYczm7BCb+vhEVUcjLj1c5fblC7yCoJzbQa1OAgZAJolKYtFkMU3/Z4Nfc3+nT141x/Me9O5zSpVl5DQ2WOYPtPhFG1kmDiub2jPB0wJm2wKbtXjJyTJZlyWhV528CWaRFHsqsTvX6YN5pcagAvf59/ZOud5zK85VY3QsQ1kUA9Y5cGWxjVKaSvVIRFXRCeweqWIIujSNfg/4jCoh/YRTJ9Z0lcBCqfezFFn44lcsrSMph/FZgDw+baLzulEC/7ipjs9fJTOCSLCgY578h986SjqbJR8wndJ6el2fhgGIb873GVA/lhxtky3+BaJ+pz5XroG9ovrrR1Ug1lw6zYlmCkag2brLC8d5xVc4aR7g4GzmkAI1vMcp+eO+vba63veEhVovenYGQ36U3s1dSkhI89Jx2fAV8wXPJ6Np1M439Wi3njrL9Sj3jLdqd6oZw5sJJCdZGooC2TnEDAnPndZDM8i3v+wlMnq6ZGT4gmUVpJlgkLvD1gDdIdISR1KKoXUTbJMkJoKZCcNm8eMJ03jHTPjgFZnvt5IqkqN7tJF/+5fM4Uq2KR29BaOi2xDMadcTx5CqyzTnFwB/YDvCixzfIB5balEaqO7qO82Su6OeW0zlkBuSvt1vLbN2OP8a/tJMceC/65BHecvozZtLDVsTBnLrbXpYhmuLapYRm1ME8vNVSliGaUFelgGVKOGZQxNWlgG06GEczesIR0st8Zsp0EFF7GUaeAilBoFXMRRp39lLD3qV8ZSp30LfRKUMcymppSvDJVtTRZfM7o31y0tqrcUizsrG9G8+etj/LAqxVt8rnTo3WLXiGjN0l+NqV0ZsiVaN99PHUo3j6VP58qIKlSujNAOjStjqlG4ZTdTib6VgTSp29x4J7SvFm0rg34MylY+gw5dKyNpUrW55yTleWkqz9G0vbve9M6evevxrCVGwZbQtOBgB+/s6Xwwmdssb7pI0w7u+uN39njeH+K/KzSt1MNrFG3ZwVfo2bImURU1KzXA5TWnZUfT4biMlp2NIdeWJ2bZh9eoWZ5mRiSsNjOLQtrwZtXhZodDHj0Dipc1kuhZ+jK+lJs5u4gqhnaBiEOEdSB2mISL6N8NDxOOme6cumVlqDYlpxPNWQKT2VjLj36cskRt2bpV0rhGH8SSR0lHAIodmrpYaUQ3o7ykkHY1bDlWnoW1F6B1kIuuXDEcFJavd3eT6H4gxuawGmRiTNcOPidnAs90SNwa7FfSWpcSH+gDZLDyPLPT/gGiVZRqJrIklpDo5C6P9AnBC1S4s/hMGHg38xGcBxS5RkV78cJhAnHh4xO/Uu3YDYJA4A2jHFU6oEEWaiav48VlofYm08GgSaV5k4ZamoZqKqXxstxlC4uY1FEBLptaUz9KUF5pMmgJI1TPKcerKqbtYy/FwJ71xpMJ3Itxmiu6miayUvZ6tronT6w36ax1JLOV3ZlSsIC+b0SC0/KPSEjtuTZlWF33poTW1MUpNW7uL5Gat+PqzPWoeQGaj6LCrlkpDZVasMW7II0VV7qBaidKzQSoBBwg75Mq0lBQvmPtAlYvWExFVPgGBlIx157VrOVqG2ge7JlGxxuSBRXisUwQl5Co5jCT6DjtWfFPh2uJBqwebyLt8QaVd7C7h0YJ4r18CJUw3VNqtwpczzp7x3+F7sfOJyFcUQ6HiuBYOAeT+3ChcuutHqj8MJMXeQOBkb31QgrODyEBMaZgFbXVpiBanuBxmrCpBywWaOCFVSlKpm9D54r6WIynyb5B8bJnlKSQmBHUddRLc5BZkEyOp1aOZ54bRAhnHHuZLArl3ODoXXbhEOvEX/xbqEKT1PMbFCaDmLSFzS0ioUm8/HRgJdD8qGN9hULh0Bs/QPeaCsSjOJnrMP3y6HiiWTnE9hi6ThECJZkKOp/VX29QcpyUaPwVAgTPGV0qOniDxQnrB8IsoFLFJBN46XTM7/tYVIqdAprcxxBZZqQuRXXNYg1q6FFhbXiiOmwHqK1DE313RvdXPi1YTA6drXJ0EiwpTHcKSttMXOZDRmNdUgcH4I50070fDpCRotprJLiNEm24/AhyV9CnInXsFWLRLQ9LGAYMXSLZLcSikup2yVro/UDxcx0LXYJyVqxghT7jN6E0xJdDVjCO68JDqRtyXv/KRpRGPtqwhRdMHhWbp2t1AyhdYflkyyITdueF5Kh7Oz/CsOL60ZoXhHvtdSDtnYzz66eNv9qQiQAGymdjF9sJn2Xl5nEfRRoIDSkOZ9XzmOw8PTekY48Rw+ehRxXnSdd87UGxZ42kG25meEzVnhZwekgoTRBHM+OCxo13w3FJ2J4lCZJFsPXhH8dpYCPjMQvdJoXsxCPOIplfGKUy7c3Gw96M5ByEsNcbokO/5goa35Qpe5lCdsQ/mqLz9flxQ6XMSIpNLRqc270a0eA5Y7p6O5A7uFFUeInNadRJrpYbkkZNkj3gq4mz93+sk5FUohBjqBSRZXUz//7v3H1E6TTlQoC1lKqY62yOd7nEIq5ZdH7wrmczlfEpmQsFZS1DpZxZMRsYlhEKIDMbk+wzsiklFskh45GZgYcQUrGw3BgxA7st8cTSOq3KU8Suum6/N7MnvfGUvE7Vc+cl4j9HpWTwWqNSYswilWLqv+G+JQRInRwHU/+tyYKTjFbTxSZTdQ0vMm5SHQ8bX+SSpu3w9R+h/ps8yZdTI/Ix8bxPG9CQSsSjtteZ6APBWWOeW4P8BuWAHavrQAKbcwRgxLdI/8ROF/v1o4/dPfEfNGOyycWixa5jfZlYGb4T3ZLeNQrKYTMMYgX7X9pv+3scLFWh9yMqE8+ph/hIaPqhtBfahjjFae+CieDd4Idhj8633NRxtunmdEWyuwelwyZzUATE6YCf532m/TtvI6b7hycIU0T49gy6ATjUivbu6O19cMvFv0EF8NpyxN0cvICUf3BlHzbe9vCaMR7ci5CRGkctMyDFhMq9R5zRfs1oC9RfK2Vq2ClY8TvnAHqGMwbx6tCx3qFj79//Hv+JLoOsYFQQOs1IHX+/D1an4ITSZxiceEBB9YC6iHuNqzqA8fF+AOVk4cJPLPlaVOYDBmOLyJUCfwc7JT4jMojCv4ipYiwGEWmo0Meili0Xd1MwGzRsXAYJjwZAaZUkfFwz49lqK5jLD+wLIzrG00lv1LMnKdHxZ4gxlRIc9gTK2YM+omxivefv/K0b4d6cnwKEq6kVUstjKO4GedcgO/9zllKj3CXScOVl2yD/Oe1B569K5U+62lgDrJCAmR8TLqQdj77eCKmnT+d7VW315Y9utGWWh0k5/KAMRielWsbTCkCQodqLQMjh6oYgyHBNbRm5dfMgBLl9O1ZNvk/NwxCapW3K56O9ECbKTyp3k4dN19gQll5K7fzNYuvGSZwlENqZnCWYbaVzlkA3zukswVBN7CyBElF08ZulIv5Xgsrh8Jg3Uv8rAdLM8yxB1En2LIFrKgBYAqGX9nkBUDn38wKeWgLoBTD1LNASQL1U0BJA9XzQEjBa17mOIHGaDXUAS/D0MkNLADO5pehhlmavpoR4gPBlQIUc0RKwfF4n3hVRS+GKAs7F7tHblkeNN62KVxw310oZLbl4MTeXwTeRAyxBjpur6QGWAupnkJbAqqSRlsC0k0taAqyWUFoCxG+1UlZpCZpmamkJYkuygCXIHyPJtOQ0OpmmJXCa6aYliFelARFX1qOc0z6Cje3LOafJMX39nNNiN68lnl5scSX79GK7yhTUYissYe3loYLHyWehEvUlRIbj1LUXIg2Inl9KPJXLNNZNmJMftBdHetoDu59NmLtMevbtybg3Rg5iTHruEbNI0XA+lexR4zxzEGqEnujYL015ju2RTTlXkFxFDIHgN7Jyc9mLvch45kaEEZ7JyOuNjzrhmetUNd+ZO7gR3SkNkTLbWYKiQ3ZKcFpcp4TUHtUpw+oynRJaU6JTatyc55Sat0Nz5nr0sVlO6XQvmuQsu5LaHGehcWOKs4igzXAWIdsiOIvIjfnNIoQqvVlEaoHdLIKqkZtFHE1uswioQ20W0Zoym0UEPWKzHE+Z1yyHU6M1y7HUWc0inh6pWcRT5zSLWFqUZhFOj9Es4ukRmhfxFPjMIlaeeGxIZxYBddnMImIZ28ijMcU3dUjSImxLXGYRWI/KLMPTZzKLqCpEZhGlHR6ziKtGYxZxNFjMIpgmiVkEbInDLAJ/DAqzeBYdBrOIpklgFgFr8Je9GenhUX3ty/wlHTOc2zimp89fFnp5jb681OAKe3mpWSV5WWjUKnd5rbQJl916IdxlRVkTJe4yfRAZSf7SuEt7MO6NmIJF3czU4dQmZcKYwFwuA0+RuBRNlQg51oufma38GNmo456N0WMkj4+Sfuc6FlFeLbN+kavS5Bl0oZtfEJp4sS+CxiC4QL7PbXxxsZ4JdTHZKlczp5d6g04wHng4mkz7gz5Vzz7tUPTwZg7wVNgLmzjkq28WvLjtwt+DrUHlN4r2XobBg0eKg5QfQWG68SuAuP29s/S+hlLCj+wAOhEExCbDvmpCqmitkZBa6FT1sBUOb0Q5y61BU5GIikJaaimODu2cA9QinnNY7VHPeWBd8jmH15R+zjVvTkDnANqhoAu9ak5Clyl+8dRQvLPbgGRIEgnSBomqmEpKElWHUG95Z8/mg4GQbC7NT+pN3vV78xEq9DEzNpOoKor4MZg+clkBM6BD4hQmZPWcUPjwHomZyOBk+Uu8bPegM2RJMPvAwsR1JBkTJNfcIrtmRRodkPPyQyS6iAnuSHMaaaJAKgQ5S0x8i9S8KGvHQUYLmw2R5IKjOtZ3HvucQCJ46CyPqs0lwl3IjQpPXFYGQh+PyIraW5TDhNmS1GdYig/ydXivCUPky1AF8gwMtMKc/QnVHM9IQooVRo5IukGmzc554Newwvf0QdJBwPFLWYfBjmqe+6KIOQo2JipnsbzMXyi1R+Q7BacjZQWxZuzCWG+pbDrOf/Jd1IemlC2hjbY53ZOiCpKUUGHLp4qLcwL98OED/fP9N5Y97g17Q8seweM4Hw0sP4DY2WE+7s/+bn3zw8rjiUNQSaErWEL6hqurBcE26rDqlc729R9jwTUIqziHzh/xaDrb6E8dsrMX9NEBw4nB7P0wWY+dsYPq686496db64+wIPaOG3RAZKxOKLC2P3b+fGLpbMGSVFyyrda9iTec/ukz6riFP+8gxOOxHr3eUc4vlRBD+hgb6a0THT9jV0pHWtafIYiLSvIoox12We+6UHbpJs9fl5LB7kT5+KgretXlY9E5nKnQPDLRPGsMS5Ju8QIPziIekfQ0FjpxtD5Pxup10lnVLmDSXj0g6g+JWT8csz3pT0asJ9kxznXkFO6t9f717yln7tb6vUhF0+5SyajYvC9/3Dq7pev8KdsPlrYNYb7Fmt3X1xggjCU7zlrPrTUqV0ZQZHr9mXa/Ms9Q5o7BdGLjxE8jjZDjR97C+2HFjKbXrKPJr8Xe/BGpXpi+/pQ8CwOOK6HoPnH3mLUynbd79pCf5bTPdv3se1sXmXqf4+1Hr6nVcRMGT69/H19Osf9dig3qRg9+eEAcQkBvV9S9P23xbrKXgSXRrvCyeqH0nNmsA2Q8HAv31XWODjrBumOJxlGH/8AGtKQbzV7BwoD0pmUD8uhskdr4OeYmesiSZ0r3dphHSuuRip+I7gKvGIKXF9kHazDmDxZ75LJPFj1U38dPIgXW/D3/iMVPGD2yCzrcPGaf2szVH0/Y7RWrv7wy0YTPXtb9qsbqdGXeSh4ybp3xX6UHjS8A/ItsR5j9FaEjISaMS7toLFj8K26tfc6ttuj7V9Lnr/6u/AiSOVz6gtjDCX9FLndOHtZMPz+Xew1rlS0U0qfFHv8RioVB6MD8ucO8e2fb6WIXL6KXhkl1pmVXn5wWJmVqa/VRK5mv3P88wdhfBM7puJGvGJM9TByh9lvjUaq39ly8I5OBWPMvPi257mFZitfoBUozuKgHwa5iIbQWX2tNXhf7OR7xl6/0nLlXEcKaUL0UhkTSL6RDYkeBnn5Of0EaApPs5/TXrYUbhsMi1u7zd+HJKz5HNc3si/0fjvjjFncn2+VFyAyuuAdksPE9SoQ3lfdKr0OQFfWwKdlKj2KPG7iXTy6NKvXQ+utXGLlkB/V6F91j7EQPOxjfpJc0DB22M406cqO59T+cyF+hmbPdff4erCsV/3h/QxtVIFjZujj8O2ykdl1I8HUf7W52w9yl/mHT174M0qQ3HY5GY2z0q/m6SzRnXgYpxeOO9xqJuZegBbHZhyoB62ORCBWEPtGfwkugwe2rR9qKblQPoDioEc1JbdTJzUxrHUqTwWgRmQyhPfqSw+mSlgylKVXJGjUnKFmzdmhJ0YPmZGQ5ZSjTgXUoQ8EH/juJ9pNqjYOaX0zFhsgsDlBbfCbHRr40X2ZvYNuQlME8VdeXmThpLrpy7kMxhdGkdkPOn/Cq2KRMVaON0iwYu4B+XhEa4+Osr7hLXknj4zQ+zmbKhzlHVqKa+NwNMGWHC2JhfsKkwZb4ao85F+rLAWqZBnlXJGhKWMLo2tMevXpmay11TpLQVeiktgSv8XHiJkB1f+G40AMUbkvIlX0kH2dfz8dpo27tBNnGxscZ1zgyPk5BMxkfp/FxGh+nhle94NIzPs6c09f4OGsyyHGgxi/4SBkfZ71Yml+hK934OOsGVF309Bgfp/FxVobiXXxyjI9TLbaRDajxcZ6uVnqPiVKUepn0ev2JPWzRxxnjtenjZJhFH2eB3DfJHibZw2dh3SpsrSFCDz5l0m7EDFJKaVMoOhIK3EWSAyYcfH+jTKxyIvSSfzVLRV7yr2aPSUrGuL7bsaDsxtIp0JGQ4kgihI6gcDllPVA+A8+WoNK2CG6+v6dau4i5QiVblASBJ3G5pIQNlLJdocTJ/rjwUfYWFUus44lqaiH5Yu3/gDBkqk3Ly6dGHoqIsKruS9QioXInVG0GcYBUYRe5Giir4t8jQtclUFqK8B2pya08qTx7XLKGVcpllXypzsb+XpSviXj6iAiiojIwVB+GUi0cXvv26KMJ5ZSwqrP4BcfQR8idQMVYl5WMQbz06X4j0CkIlc5FtWdcVBZGgdq/ICGMJVZQcZbTkrJIcBaqlbI7oV4tLhFVZXgaC11HhMotPqoLUx4IRmZNzfxoFdC4p7VvWdndLXIEqNDOwz54auLqzt7lF+bqHvVmveFk2KSgrEnbjbO0KI6J3iOdwnUmbTeZsbk69qWgMZO264SY59VjR6qj2ArWaKN4tlJXtEnbNZYc0thbio/LPWIqltxHcmkj6b/cRqyTtjt616f0zvmIiX6YtN0Q9plxaRuXNk8PNmm7Jm1XJ1H8F/Q/mrTdYt7+r9DX+HNmghuXtnFpc9GOXK6gSdtN55qLjknj0jYubePSPnmRJBdyOXPWpO36x58/bXfc6w9Hs15rLu0Urz2XtsAsurQph5FIcZO2W+aLZBme+lk6DEYrN4fn2baWkcPhTNpuG25lmQ4sdyvLxwi38pcyPwofp8t8mdytyxys5FlmkwDUbpHgczgtt/7qZs7rbJczmPKpXphvcziy+8PJgMKDsmm80TePcJnjQ2SoBkx+VQq8MQm8gYP5+5mGB6Eo8DclypbG2RmxSnF3rvfIfsiqCpv8XZO/a/J3m2oadE3YWpmp+Ok6OyHYX24q1HF2ygLExtlpnJ1GoziRQjYaxUajWEsV2zg7Vw/OvZfKhBcGxOTvmvxdo1H8yaurm/xdk79bpxwEo2CMRrGS0XDRTW7yd03+7iWN4ppeA3mb/9K8BsNJbzQYjYzXIJp3SbI6m1RZkqJDqqIH57zNew2Cg7dHGSyTIoWCjMZrUEgvMsnuJtndJLuLSp+fdooUzeSYx9XrGmr6DLK1DzM+A14s2lQxpLAOU8XQVDFUKaRoqhjWrjlpcle0Ss6ZdCiTDtVyYUyTDmXSoUw61JWSyRd5XpMOZdKhTDqUSYdqt4phbQ9BdlP/wjwEg5kNB8GASiuKvAKI+T2UZhXY0/5gzNQ/47SCh8DbD4gWV6kHljRWqgMWdwbpH7coWXkI0tyH0XjWn93eUFFnBPxLMe7HINhG6G9lubISvwBvJzsHUE3uwWNyaUT/jkYDVFmzUf4xc974dOvTdrsQn/PLZiR6/DUrI0RAhXEl5HTY9QZKvXJkcqeqVbeSwxqpbYnxSNrg7iRpZXVKKEntdSpICiCtZDSB0V4VyRhQNyFN4DStJCmaNa8lKRq2U00y6UVzvdND6D9Cl1RUgrpWPlacKPtuuh52Zpzbx2TyFcRFt1uPJRNZwRr6nJhPuGYnU7tGmlGsGkpFdF9F1ldv30CCNHj0XSZyGrg4CJla1unob/2j70W3FrIGeF1d6I86R9Q381f4lIRFH7zzUxBCDnXrhceow16N8CHOcqsxiZVcECFckeTlE2C+LbI90RJ9QK/UAKjhcxd/Q/qPyqXRWDpLXgtcDVFCeEbZwRSQTnD0nJ0aMGsJhE0QKI4WawkEnqnMX4Tq+ezCsDOAKPGq7E+7JS+Fp4GZKAY7UeTf7z1PbZSS1s8IxeSdWkJBd7VRxIsbP3f5T+weOvdqnaOGaL/cBks1AOgvd1nr5260cXjxweNCuT8ER40lNIgjq3eOGidox1D1LrKOUesEi+Yh3FC1nsWNu89i1LbQVD5RLLPS45+0xr2kdfXe+RFanEpQaXM2+eyhRr08YTJWQ8sCUN/4Eo13Uw0u0z5FYwLqateasRjY1e52/rXCmRcmINFWem518Oh5y2PGCadK1xo3fu6mkyOfdsU3Sqhi3i0Dp1uPtGu1Gx037j7/4eAcN3wh3B2c0FPqJvpHbbvPSyfyfup0Os8bz2HL684LVV863hQYTrjaQGNeqWfPcWMYDjvnyOqkrqljLjY8FGKlNn5JayDxe6TUO940+8gcsEtT6xJrmYXa+VtI0wR7xTkvbZ4F3QdHpv1eo6jChTdZQnj+IvIhiX8LLflbPEGwOn08j7BS6RZxdVmlgeVN0XHsvwkq9LaoCKA4EnHj5y43GV3vsA3OO+WXL9OeZkUqDw+lC+cI277fs8d3veFdr08qtr3BvM9UbEtkKwZ3PfudjQNQv5UdczhFmwzM6K43vusP3vXs+WA67w/oEEx6YjTxE2IRM8Ud8oY2xSiiRRRt0hZfpsfPLxyPaJJ9/kWtcZbH/PpS0Qa92qDWwQGLOZfujvwf8dPQnkpr8yo4QZNijkiZJ+cIYxArYvpRvJ7fzHnMC2odLPi7mG7S8FH6lgtNDzruyX/wpaOoJ1HyCd/apKfa+WEYoHQ05XHzk1H0jzhbpkt8Y0P9zXwv9Z/94nprB+J0C26bYgB2ToQ9H7v/qfpIXTUS+Sl7YaxhfzydjGfTfFzx17Ay8GqWsoem0kK7lRZIVYtN8z4s2HOdmVpiQtG6/hwvtUx1Qnrd/FzfxIy7CBqDoIv1t7gX0ZKtbjV5eak9OsFI2OFwPLAHY+J32Qb8Zj7uYU44+sctzYV/O7hYSayDf0AD2gtn6VyqzLLxl4wNIKzBpN/vj9FajdAVzb9QJ3QzHaoelcyBjUjduJ1QCFMonpBD0CF2EygtajdBaY/cTSF16d0EqSnBmzRsTvEmTdsheTM9aU7zlhVD0I707ZcaiIO7Qe+dPZoPJrB7LhqIOKbfmw/G894sVwrhUqTv+bgBp9zvTBjzizENUT6Kykm9QfUqMMXOFqmuvJrVGyjJszEC8+yjTJUT7vF+kIwu/f/V/47XjfjpQWfY6RFLTXMbxNJIN41mKRTsig/BAZ3ecsgqcQlT27VGluucUYDrPui8ikGTOlW8rFd4JjYb1am4dczPEBe7wiSITt97TJrzW++fJz9Eva79ETQ27G33jEZ3x+COujKPy3NZpbncGI0u6lsdk2gzQvw6OG25HFx8cv/AvIBi3dApYBQvPSjHOBj3B5PplEze426bW18zNvKlheN3zKgVgHdZPE5N1GDvL0GLdSTBLK44RrvyeDyUpHzxOnpGu/Lq6DRdR9iQNl9DWLN21g/Rg+ZrRwkJwOb47PxdSgLkjhHJG+9QwA9+P8danu65L1BMU9gIrx68EGX7jla0oUls/wo/sSqBKIlogcQT02lkYUeK2oZHx8IyQHN51LGAu3/g39A4g69B/cB62peFy3lxu83eZDgdNtltTvqz2WgCEzsOqXgMTlEU+LRbVwlWyTRXMtpFfwrRKtPhBP0sD1ahnQRRF1iASiczTvLJu7oL+aviGaeFQ7/Gn9l5Xq/rjM2hZo0/GmWz86Q9s9l5cvqga3ae3H771e88y4vw1d55ooTvcFQswmd2ni9k5zkCVz6YtLfzTPBa3HlyzOLOM2Mo0eqV2GFaVpM605npTvU2PnNgo/UmbqfOdOYQdJjOBEqL6UxQ2ltvUkhdpjNBarpDTRo236UmTdvZqWZ60t5uNTvnX9qtZo8Ru9XvqNw8kVREOtIOlMosnBAYcM/2oKDyvrD+k2reRyd8CcIPIQPbk0vMo2d9uLs7HbCbcr0P1nrrZAjBTswbUr1D4gP9PR4mVKVnVGTSLD7qvwL4To7UFX5qzlyCsTx44RrRttszSj/sPdYlOvEODhXwif6auvsKHds4h8OZ77ZR5ybTY/E95x3j5t8j3tZ5oGtcYVMND/LfX8dbrKenpw4PxmXVckJwGhTISxr5CTc4XD4eJgMWwb8YLmibvlh7nrt0Vg8LhAuD7XS7n2H7jtPRjp4GSLClb7/96jtr2GTXnr1lL23XPhoOZ73BAOvY9YoVqU8s3rDHRp7ihj3TXGnpiTv0S+3Y21ScMht2s2Gn11CF+Mq8R9UGVObARgZUsplTzgDKIegYUAmUlgGVoLRnQKWQugZUgtTUgEoaNjegkqbtGFCZnjQ3oD6KqxhE8yU3Qh1XcfaYGqJQxlVcFNRt4iquk9dbIFtemvU1HNrDfn+Ws76+rojQs6e92dieIewhtsH2uyNsf8WVI2msZn+JzhTsL9RB69uT2YUE3xVsbvS3qd+ENft4vhOCj8PmBprWmLhCxeA9uSftxe/lcPVD+HKAKlF8DIJC87tszIUzZTSezKbIYY/D+AaZKL6vXNf68I8o2H+wEBHv7GjD+98eUuSCPXaF/yR9BUrCzMf3JU86T9eO3yK9516d80q6U22wJYc1Mtd4K3W2S2qvY6oJIC1DTWC0Z6bFgLpGmsBpaqKJZs0NNNGwHfMs6cUnYpxVeVOQ6IEYvWlVHJ/dR8TufDAsjeOj6WUhpgaVwLNkjor4JFX9ykq+/tz05vrrdc0otWzLDrXDnIbUn1WcbxKTYZei3KT2rCGtnUn8C3g7mv99CvjbBk8RSD1EsdBcGnlgGT+8/erd13+xSI27+2h3d5ijKZH1AwIQ2QxLpOCj71gfcjPvB/BmWkF+2SUlG+g37NmjoY0xUBz8QqRfAqjmb5GXPrFyIdiPw5rl5yEuPK0QVm6Wn+pMEe5g/60uPxf38Jkl4KJzJXNMrOMMvwQL8oudJh+OmN+i7gq6Cd4RP3fFFBevIIsFm00/0FzJQrvFPLr0Vg7uTCw0wjwwkUU0CE2pyACiWHDEUztw06w2FoK5ndUKditwHrw9CyhfQY4CaiMd62tnj+jstb9F6hq5JXgx7QDoDGB5RqdxPNwse3H6mhW2+Y45Mwovbsc8mI4m00yF7bdY3Cl+HTftm7Iy22bDXF0sI7vFattQoq3WeDi2R7MpNnCKKzfW1l/WbIrlYVT4ColQyA41tjOkeMSSWXGFOan7OGPN7HUrQmulZVDZMWGMDWNsUCookuP9o7dw3J2/F8nqP89+qcIrUW/jm2yOC14JphDClRN4XCH7AEn1tPFbQPknTrmPNaLi3xNti/gD3qSW8Eb5/pe2zrGgDMY69B5974n1pK5MxBVcgikAVz/aJbH5ma1+3Lesuodef7mCQjwQosO0EGAmUxzauHW3N7Vny95oMpqt1oOe05t4/fF4Ohy440l/sh7Zk+lg0hss2Q4a+X0s89lZejS98hlwjj4Qh5oQJXST1vi6+Dl7cG4anNFwsYaLzRd2k1ZvoavFlLToLcauxNn7P9ZR0fltb4ZpCkvUbYezyRTav1KiEk1oGNCsuK2weGg+Fd8WtG35IWhnnCXNDFDjLClUcPxtv6DNtG2Lr6YsbfsfwdPdFg6gLWN9TnshNGZtnbMXsshbHr36BwUZ2uK5uVRTjW1vrmljEdp8e20N2jxgWxK0edzGCrR5AFUB2jyOMC1j36CK/mweMvHmN5KfzaNoqs/m4XTEZ/NYTbVn8+3hedOQni1DU1aeLQNTE54tQ1LXnc2jJdsVJdnZPJq66mweiYILuGYtbJ6morN5MD3N2TyanuTsBTRmaLNLra04m0cSTVUFZ/Nw9DLlIZvstvN4Zbt37ukV39ShBvKgYpYtw26iNpuH1RObLaLpa83mMVWkZvMY7SjN5lHVhGbzKPzGKunM5qE0ZWbzcC2pzOZhP4bIbP4ced3BJhqzeSxNidk83FWF2cFdf0bqsRAHG0wuBh6lxzABsZzCLIRqY3drb4gQpisKs7k+XhOYLT/8ir5seaOoSl4214T4w0RdlpPKXF92BtXFdBnNCLdWysvevGVx8oT64iVmSVSW9L2z3GpMWs8TGVpOqia/ci6131suJ+7Km3ru0LNH/dF0ZfeGQ2c1GQzWM3tqj6bL9cSj/KA8lxqfISZrxrPxdAqpX7XAVt5aQ7Uy0516/l70u1Fsa+IGUPb45RB0KJsESivCNUFpL8Y1hdSNck2Qmsa5Jg2bR7omTduJdc305EK0618zfDMeyDJPoMS0lqU3lDCtyYnph0tca3yQeYGf8CpWm+WcS0yGlU8B5gW+oD2WG63f+AvcjIkte3W1udi1s408xcQnXlZM0U3MxfPrVwUrXD0VBNMrC1aEbIuULSI3pmWLEKrEbBGpBWq2CKpGzhZxNOnZIqAOQVtEa0rRFhH0SNpyPGWathxOjagtx1Knaot4emRtEU+dri1iaRG2RTg9yraIp0faXsRToG2LWHmWFW+IiAhTXFx0qdvyLlJ4lFatsCJsS/RtEViPwC3D06dwi6gqJG4RpR0at4irRuQWcTSo3CKYJplbBGyJzi0CfwxCt3gWHUq3iKZJ6hYBr9K6fRT9ete3573+vH8pn7R/Zw/f9Wbz3kSoc5bRuv3+O2AMkZbK2GGaQ3mcBH6SC4cVenmN2L3U4Aq1e6lZJblbaHSR3iVKsS169xiesAsiwle7gBiC3WoVEMNxfA80x09SATH8HjPUlQXEiN1dbH3IuxO/G3lbxMY+32w4nXthA14dYAzBA2ASS1qCVCetl5qz6a6kPRJ36UHbXBbVymWwpmm77ALVLkyssCLUm0U4AzcX7K0Oz6ZjKX66gK4LXvTtioFBYVZd7EwEPEBj81odNTHQa4di00PMkwKSenzsV2dJBbIyOQL0wYLwKUfptH/YI0QUc4NIKFies3kB7DZT7b3CfSbFJR5hgIJacEmQSiOL8EWyPpXq23riV6riw1Qe3QWSHqlaYL9Wib7EvxbrA7ywdEZ7NhhDf5Gi8oX84p9Rv93kMTLdzjsS9KQfGHOOJ/fgnKkiLb0wNJl7bN6jCOWRiXwWGlCNXGnCtavsSJPa67DwAkjLiSYw2nOhxYC6DjSB05R9F82aO89Ew3ZcZ0kvLjjO/nY5da4Z3y5OlLzutDxEq9DnlMf8xkQ+1ytZnxtHxNRtsWkLQgcFSMGnZH+lus+MKa/2uXEjNo/bmGLPA6gS7HmcFuj1PKQauZ5H0aTW83A6xHoeqymtnm+vR6qXoSlT6mVgaoR6GZI6nZ5H0yPT82jqVHoeSYtIz4Pp0eh5ND0S/QKaAoWeR9Ik0PNwuvR5Hq+4g40FLEzk8/VKbfnRVCHN8xjtUOZ5VDXCPI+iQZfnoTTJ8jxcS1R5HvZjEOX5c+jQ5HksTZI8D3eVIq8T+dwyRZ7r4zWCvPzwK/R4eaNKcjzXpGVqnJNtLz/qmcLel1t/dTMnWr8ma5cLrn9prF2/P5727fGvhrX7uAVO81zdtD+djEeSSgFG8oJCAb4x6gRGS7NaVcDIh0BuNfR+Ro4Or6XMz5HuwC1ELCFqGQb7846JEbwX6T3Wt6xAl/XV2zfWdyHKgHnh+5tbC9qZjnXghTIOKMzl3Av1TCFJTBCRvztsUXc7lsqEADFBoXg3wPRUDmjSgRuhKf2lo26gH0abMUo+BrfX1eH1ui1werFgYktKBml9Z1UVg257PF63JQ6vq8HfdVvj7hhSG7wdA9Lm7Hh3Qs8Dy42kOiK3Y86tIb9NJZyZJFtXV6UApTv29ydMa6RplTJtCv1pRZ0AE8b+GPrLE/kCqEfctbWERHfjLqVNUyDGrSki6fFyBekAPFJNglqzs2qLfFxSjlIrkDXbuZZCWOVVZH+kEI7u8x8gbL2hFwdEIUqreE1vpWjWfaYs2J86nc4zSQ0SIGfRmuJpcm/d1nm3rjbnJpY3XaUBVjFHCo9O2bLGo0wBN8dgTxNnqhsk0WRNIT8yx1aoiKSoLIBFZ+vhUcWF++wxzZBjTS+5Dq82IUWB3mA+vBR6Cu4Nx9jzwUgcUxp6On5nj+f9If67EnqaecsbcGri0Pp8WoxdU0UA45wqCCD88XiIEDFLjn+Ub3JcaNq7wSrqgBPq0tLi/4gwH+S/D5X0BX7bLJv8wL00ls3uT8fDRsXxhlO7h5jDuDLechmo1sUTTZVEFFgvCiXxPi7H5qyYotM8rnCJNyeJDK4h4CeF28VbsNGMChOy8A8fFuO5zpQoAaG1YgW8TBcKk33s32zcnSxoDIIu8p2mFloSMFJP8oJNdrzoHR9jXvVuMOmP7SkCY+M6AfgSETr+kWJzb6gUwM6DWIrLqJV71CahyiBb58czL9y+X3lWsAajIq6trAjeD0F4okcDMZP2wO7bsyF4WKWHPG6uIRUiOlM9ZuKgRnGN1EZdcDnTWiemkcFoRTQyhPbiGTmcbjQjQ2kay8gaNY9kZM3aiWMUPWjOkH6USsSXit31ZdOutIoRHTMlC9FmmUcFzX/dGi58a9HlE1P1uymvGnRi0axBvZZkGlSpcZc2zhe4+4qK2rGJEga+vz0n82UyPVIRJd/VLVcXrynxetLt9+xZbzYdNCxVl1xJvk5disbWc51lSkz8SQ+LWlLC6qHlQZhSSmsDN4DUa6OKblQ/fOKgRgsDtVFfGDKtdRYGBqO1MDCE9hYGDqe7MDCUpgsDa9R8YWDN2lkYRA+aLwylZehyk3OdCTypyOkgtZDNWKBlUZ6FrLuO9W7jhfB+0f+ttfdkoTgNjkDZuf09K9aJlKqQasgF2DaHTz4rPwe32haF5CLrPgjcDlXqpP//dc++g8mIZsidOcBqJDjXQnj46WhtMF9+LcqxkGdudQpD0IKYOpkHjsq0eC7rkPXB368DqoKHInjgyVEtlMqE0oesKF6wPDrARo9iU9R62vgoeecGXpSttOfsz2Sx0oF4Bras8t2bV6619R+EIxBoT3DR8Yp5x42DKn1HgY/LRzvXw7m2uH4OI6ZhUcuPLhZpezG4RQVLqREbYFbsJCZoLXgkdw5ld2bGC/Y0nRKuRlzNYeusvFsU5aMxfKBuPAWnrWstcWeePFblz19bH7ijsyP68QHFVSNUCcTdwP3DPozdsmRUXnud+86tFVMgT0/EgJB3k9p3Q7wY9AtlQSXLy3D5eJgM2AZiMVwscfWLtee5S3hPF3DI+mjSdW3UMBqfP+M3C4T/6uFMpbIxeKhHuGHdZiUIBSou8EM6JOjzkTsJPNwIXCorc+j5IYPwnW1m+Cx6FtnX2Q+fkHbKPqOQjuRiKZQat1eMH329xYm8MBnwN+v0SaM7lzwdVPbQx9OHBx038xyc2G1Jx/8p2LteiJPRBbO3yLnHA/gvN3WDkRKLivFyL48mmQyHfSqeLlII36YVEb9lZZbEa12aVmgokwxlIl4ISitsTJrw3OTkRUUlJnsymEyE0RX/KmoZbpBSiz39l19ad6Ph7cD6A/4eW/jV9TCR4TlfpC/ka8o1/2z+fm/hP/qTPYZPEekh7AD8oeKleOGpaTynAFEc/SqZWF59BrM7wtx2XuwdTGGff25lvny//wP+k04pri85Ifv2wgnjY1+tHrbr3dnDyXyXnSL+AINDqwhGgtdcRUELuJTp8sU8eDjTIcQ/MXYLFHAQ+vf+3tkupE/pVPD4keF6s15NhpPeuOesx4O14w7d2RIzZG80WC+9nj3prVa9pQeBVfaqCzDF9nktVjImxA039nNGfYALETJTS1+ykcEY+5nKtXdfsP0s7N55hMgvb+HvHx0s4nPrOyppTEIB3FAlO4Hq0Qv7jsQBrNfchiFLkqyvPcwa7IytgAhQWGpREMJShWkCvM9gt5GHzzqGZ7JUUIf5BFsaqPibhZ8dQ9iPdLI9rGv84BFtWmHb90G8DC4LgjNTIjmmegdbTp/8DhPy6iRK8WVWkSyv05wF4KsTJ3VaUDERJFFxtUOWfkNBk5T5KL1wSvuHPZNUJVSRJslQWj9xm1AGVFzrU+4ebqqhDdKnUY3jrAPglyLMVEsbyy4iyV9BwVP1Chsbb8aVsCu+bGb8EZjyMTslS2j1NFBorUNaGW8G5rpuZkibk1afgjdDLLr12fGKtTATg1LBcyVrYcFRIbSEnKMkPCQVJ77pzXqjmbOcrGfT0cRzp25vNesNe+NBrzcZTvHlajmdoDAC3oqPVsI4XT3iKEOcTbd+cQ60neLFuVW5lcLFF5zmTUJrZTsnCa5tsE/LVyymV2kOg43dCKpRzH/mtTQa4Ob3b4Qr9m/GPX4h5SazHJkFpbDHZaPT1AsinNNgNPDw1QlHyaxD7XhBRA+ae0FoGUxVuCZI7ZuCpCEuCW8lvfhkr5D9Lj5iJxKfF/L6zOtXpU9g7Lmao/Mbev2aaWxlX76Xmb2XvYLauXtJo8aZe2lL7fIXKVRbilwpYuOcvbSpasZeitCC/lYKlgRSNsrWS9tr5uqlQDqZepkb49zTkr7cBssrW30uspa21MvSk3GUc/RkGLUMvVxXlPPzUhy97LwURz03T+qLemZeCqOXl5fpjlZWXgFHIScve02stLBqfWD5ueH+uRQsJp6rGbD8SxW30srGSzvWUi6e/BioZ+JJQ0/pe1p5eCmaShZe2rqdHLwUTy0DL3/TlPLvUhDN7LvM2LaTe5cCfozMuxRdR9cqRdHMuss8CwgaC84UmUar7EWn3XA+6M9HFTl3cNrhGKTm9S5V8eWB2b3p3GaFfim7+EK5h6R31zLu8gdeybfLH16pXJUcjL42z7WDmPawvWIPouZdK9UekNtVq9oDjmO7A6ZKL1V7wO+1qz1crOVL/MUw4R7j3wT72B9MR6uRu1zafc8bjpbL1XIwXPcnE2doL0ezvr1a96dDYqzz7GPMk+IrlqBjKvmCQ7pQWYOTIQmzrB9VkkBpRZYkKO1FZ6eQuhHaCVJTgiRp2NzplTRth6fM9OQCV/nXZpV8S7Iyc4xlckr6Ad8VWMv4APPamvq9qZPYvLYsiD03T1/UKKvx2jbjOvOv7SfFdyrWAaaZqa5gmXT9jYlPubU2+SnDtUWAyqiNSVC5uSoRKqO0QIbKgGqEqIyhSYrKYDrEaO6GNSRH5dZ6BGkRS5kkLUKpEaVFHPXiAzKWHmEqY6mTpoU+qROnMpQeeZrrlhaBWoqlQKLmr0+LSC0+VzpkarFrxL5oEaoyZEukar6fOhJneayWa/N2VQhWuU/tkKwyphrRWnYzlchWGUiTcJXBWiosIIN+DOJVPoMO+SojaRKwueekkoTt3fWmd/bsXW8IdlUUy70YLTqZ91GXd1xNwtYRPpN6eI2ILTv4Chlb1qSSkJUaKJGylwTQZuMMpYng2dMe8bTsw3ipBon2lmks03lRc5fPsFyuv60qvDNUB67FzA5R6VNQs0iYyJGz9GXKzrKLQK6jc9oeF9zsxKXsnAiZwCy1t708Fsrk0Mhc0avDm82lELVmeUVVhSK8pdG6AC1UZlXBTnKIknT0YnFfpWK2F+OWxXColt4tidwGYmwOq3W1efgyUo5YElNaJKPCV5VJHnuBeeiD8Yz8IiYPvbLwdq6UbYl0nwhUaC8PfSznoY8nv648dDxzjfLIIXpXkkeOT/nWyOSRc9pWpLNXZ7GKg4wOE3MM/fTM5IyY+nzWX1cdqJQOd1e7lAlLCW/q4Xv5eeRfIw+cpGH8PRO+YCpIpCRByjGBlBMuUsK/wC26GDKCZRgRIUxg7+JuJXNM9Rsi75fIiiKZvJI8bzZLv4A8b/RTw1ouv3BmIpk877TofZIe3oYwosnzZs7IrlGtvRJRwyLHtEJgGEJ74S8cTjf0haE0XRRZo+YhL6xZO+EuogcXQl3+hpWO7Az/6C0cSL5BIIk5ljGV/gJ53vF6afK8ESd0R6J9tO6bPO9ug3xsk+ctpThmsotpJsC73miXxWaPpEXSGlnbdXYjmdYmz9vkeZs8b/P6uc9dbZLD2HMXYyAr7LlmsY9sjMn6wCP7ScU9UoVwMCrSIgfpaPLQbk5LrnpM5Wa62SuoHfKYNGoc7pi21A51TKHaCnNMERuHOKZNVcMbU4QWQhtTMLWwxrS9ZkhjCqQTzpi5MQ1DGdOWemGMMo5yCKMMoxa+mOuKyfMmwqmYgKxTfzUd4kzhViIAMuxItWWf70+2JYBEYGAzjHw0IeVGKgHRq5AHE47QhuIJcSutsMR0tFsKSUwBKWLV5HlXrvuKlVbzN00p9DAF0Qw7TIFaCjlMAT9GuGGKrhNqmKJohhmmQHVqq/IcbpPnTbprNWuqmjxvHkdo8rwZXw0DoJ4fuykRkvi/lbnIHIIOH5lAaTm5EpT2HF0ppK6zK0Fq6vBKGjZ3eiVN23F8ZXpywflVI2GUriLWpjR53vR+N3If5F46Be3xHIJ5beuJWfzGX9tmXGfyjNEPnxrfafK8UVswdFA8HCmCWT6U4iMZm1mHa5BuMcpDoRnZKlTisHlzVSJU7kQLZKgMqEaIyhiapKgMpkOM5m5YQ3JUbq1HkBaxlEnSIpQaUVrEMXneBal5TlTKQ2XyvC+YD2WDpUum5od+x1T2tAhVGbIlUjXfTx1iNY9l8rwP0bzWq8lvphLZKg+6JuEqg7VEusqgH4N4lc+gQ77KSJoErAxWTcKaPG8WvSANGTNSd97BuW9Aypo87/2KCsKaPO9cSIz8NorlE3lEFcnY0D81ed6Kg/Dx87zjgPmXmOfdn9m0gz8faGZ7a+qNx6Ee8vxPOW3OeRs4UPN9vvlZ8rz7uC0kHswLx44hqMFKi2bqjff7fSo4Tv/YdlxyPK0NThXDby2q1sRKjsd/vK2/trbBkxdmCoU7e3cfrZ9eZQ+kBqF3PIUoxMuKj+OgYEflzEXxcWrx+TsEAn6WgRctdoHrbaPOfycHZ3ojFyMXIR6sDvmt5bvU2aQW+fsb+t+3vBeOtXV+PFNe6NGhCsGoJDxfbcGVzD/8n52vUTccG5sPFoIMrQ8f/t//x/3woZOeiS5mDil9Z4dTzK13KEaMCuaiFrHoAtULFsnndPcR84PixcdOrnI5SZ0UK5fjUx7xYjLOTcZ5NZeZTRE/OiGeshaCsVnat8k4f/TcRULAikWtIhhbJNd9HZy2Lss5XyGG+ehs/R9RWTyw3t/QPPHm33LzxPub6xnnNtO+qs4458fU89SnKWiXMs5pvXgJGef9tjPOGaDJOC/kKtKDYjLOKfZMWFKmEGyBAuRrEYvP0y+8wGC0gnEYQnuBOBxONwiHoTT15LNGzYNvWLN2Am9EDy4E3VxfFH/OyuJDlEih9dJknBdm8Yim8ZQZygsAVpuaZf4kRswngFmmv0mI/nUOK5Xna97H5pQNCFqPb84Z6Q4ug55/U1mcqGwaCbxcjULG2OyhHOWZaa0TKsZgzIICcRCRR/kyF5Rs9OZoaCqLV2bNFiwyhWhN8/odj2UO78zY/obsuWZRmOzZodUdK4bJOK98Vbl1lQxY12ScX0j9j7Ws443N/rRbQmATTxj/pI6BmA6zWoBl2l4zuDIF0gmsTFEojBEjYSqLk8uFgt3UgyjTQU02EM+o5UkPWlLtptmzpl4kR+qLeoGcFEYvaDLTHa3COAUchaI42WvSKoiTAukGScpd0g6QTOFaCo6UHwOTcW4yzqXoxWaT2scIfEwfUJ2gxxRFM+AxBaoOdhzf9QZ3VBLcVBZvFtxoMs5NxjlYqZgGhoVVz4/dlIdMeGZlLjKHoMNHJlBanGSC0p6jK4XUdXYlSE0JkqRhc44yadqO4yvTkwvOL5NxLtxM5rXlAVjV9hOnDc1rm4Sx5wbkE3ptm3GdyS2lHz41vtNknJuM83o5e2qEqPTwdzVJURlMhxiVkZqSo3Jrk3Fe6gYriwrRI0zlYVcnTWUcso/vnR+5AzRLf1Yv12WXp0ee5rqlRaCWYimQqDIOzz1AVreguxtKdxZfmzygemxQ3NJknJMfglcIb/4Am8riJdri8mNrMs6fujrkqzyamgSsDFZNwpqMc5NxHp28KK55biqLU+EE+Q1Ks8Qp2yVeU9Wywk3G+eWyXlSg/AVUFo8D5l9gxnl/MjUZ52X+a+mF/wUqi49GsE/TjPMRimj/VjLOceHXE79NqfErBX/SxOarfgVTatwkfjMVOkbYr860Nc5V06reI6fPGrLsLrj5rue4pYnfjuuKSuMB/vUspisBqQmp3vi1tO/BfDi9lvYdH1PPXX417ZtN2i8g7Rv9bLfQOAc0ad+FhEGT9o3qySz7T5gzJu3bpH2TZs+9s/d/dI5+sL8iNv3bTBP6BQqNx2uhSfsuzOIm7XvSG/ec9XiwdtyhO1t6ntsbDdZLr2dPeqtVb+mNpivaZ5i0b8p2jwWl4fiLk7DEEmjSvk2lY509FgulV9ljmbRvo7rQirYdewK1te0YStOoZvHwwx+DpaZOsmrGdmwnLFL59WsWCpkkzOA6Tdq3SftGLq5qfZ3kUTJp3xlrLC8W0DSyMR1VvahGGUe5ho4Mo1Y/J9cVU2icxJryz4le5GI6xKbQeEkAWTo8rUQpFma+MtS4ZHi1f6PsMTBp32Vu83TQXWz0SKKdbDUe49JsjDVq36Sd0IxCTIFaqnmTApq074VzBFXS79km7ZvlOzWvaWPSvk3at0n79qMjyjxQzIJI4a5eZ0z+KFT1O5m90KebP5rlLE2hcVNoPBOGbdQaPt3XthnXKQfafmJ8p0n7NmnfJu27oSZmMX+VyWlK6bBEdVbbaWWJw0yQEi0lKKI71aDUiNLi5alrZMpYJu27foa8SftOokxy9nzZi1NGfTKCDVl+rFJf8xeoJS1N+Q2I6dju8x8oBYQ2deggKgV6zTsoGnafl07k/dTpdJ4pOsmkfZc9Hxpkq3wDNQlXGawl0lUG/RjEq3wGk/YNCc/xvD/Ef5SAQfIX3IePn2BNZbgHaeCIlCAWJ9qkh3+ZHjwvOxjBqvv83HAN/3Gfm0wuNVAiZU2hcVNovMaKbNK+X3zadxww/+LSvu3+bDobYnYrFhr/5hHGIL5BUe0gJEEDSkFOc3a2wb2fSei5D51HB7pQvMwymlWGaqXReHQC3jDXgH0YCXaajhcGIZPkOHXjjnxB6WtX2OE713uMS6dUdqvEHrqU7x0cvL3nove8ZgbKgc9mxWqnTc/W5UXggAVkGu/xsD+0Z9NBXRlfOS8RICxTXO7GpTWOndxkqZkstf199RYrE0tripNW5fD9hqKaf+4stcymQiVLbY+pDitPFPn3e8+7mfPfk11m/AFRIlCeqX4dSlaNdB5nBT1JvCZfIFQblKLOCqiqPS1jheLPmmOWWbSm7Ci3c2qYNI+fD3BjNSyqag0DijwDvsk/M/lnJv8Ms2h+R2Fev25pXBCLWNWqsMEQ2ovX4HC6lTUYym/IUmsWk5GEKdNe0otWoc81kec3b7/96rtby9lbzioM9uedtQ5C6/3N2/NxE+ytbz3X9Y/WV2/fWN+FzuHghe9vbi0/shzrwI84OKsH596Dqo9ztJztNniKGETk7w5bD6grD0o/x8AKGdSriMA6bOEKH27mx/DkXWEZcrH6omktwyuNzqazkZ1FRUTrWEJpS1N21JQdrRvckD41Jv+smw4GqCpTdnTJDJTcZIZ95f4Y+ssTyFmanjJpZM1mKZN/ZvLPkFTcRohE+t6qqOKnrZ+dcLXxH70F1t6dc/zpGQ//mqI1kkSyZo+4yT+rfMQ/RhhEejN1QiBSFE3V+xSoWvHe5J+Z/LN5D6J08O4tOIt5Q78/OcfVhurd0C+ut3ZO2+OCV1NC4uLOif7/9r61x20ky/KvaLM/7IcuZ/IRfAlY9KN2GhjsNqZQXbv9oaqQCJJBp9ZKKUdS2mMn/N/33Ai+KSkZQdptVgVm0GUpFZfBIG88zj3n3pOw+jOrP7P6M4rNI1bfkufIfSvW7vFJempy1zyJempzF3Nl2bKjtuyolY3+Ft1WD+vssEq/NbzT6s+s/szqz6z+rI0GdsUG08DSri2rP7P6s5I0SyKNisVUqy3NuUHnmEZWf3Zo54oeoRkwAVi7Hj4PyNq1aQa0dm1Y/dn2O6gcT5tsA93jZveWgG8Foo5BvbuDOQV87VqaCMD23hPxtN1/JLEpIQPZQfCTyOskYDplRx1v7ThrNyFx2dPz8aFlpsJynWDtsrUjf4KprBxE/Ov3qz8LIFWoIM2quGdCX1YVwgFqKh4HxuyBH2soVJEu6JvG1UsmBn35YfNuc7MuTyr0xRMIHgBNy28klaK6HlQNqLq8ORz2B/VIFAW3hbyalB2lm7iGz95vNzvQOaBmOYptYVams9FoTKicRLoSOdWZ9aFh2NqyozSNNEXMyyqsRLnGU35A5LBVhWHEylo/3Zq5/S8pO1pWJIcEqHQeueLzdNuQ1asv7mmzjq9vnnfvduC0wmflj/P79GObyC6nWwpgHMT7jfhQ6f3VdxVNfe2CFk98KyrHA70XPuZiK8qPaJw98B3ZLjZbcm3vM4bn6TndbrKKk9Wfz+ugWouzvzihnOO4jt+qj/o3zGVWIUfLek8hR5M8KSqUXjBgUezFcQidHH+kd1S2AIcP+ov78qs6OIvvJSWbGvfkhV3t4TTFoOX3Wn7v5TWhJW7TD13JV3mesJU0dTFk9X+A1sNdjpuTuOf5Iylx5TpBkzFV7qT1oNqHnR63vZNDSwHadj7L7x2h122oJJbfa/m9lt/bZGCTuc/GYBSNC02DrBs70+Dqxk51/pWMWuQfeMs/gcygd09NOxhps3T1zFh+b2uV6pOfp0LSzROfBY5uzJXn8nNWbX0JbGho+383D/TcDLoZ7Nx/aA2EoPJ4aLr9xHRnTW9mSnXWGLT8XqP6EvNBy/WjeC2tWf+Hr6Q06//8eC2dWf1jgnT3j4Kg2dYpiBzzFkB8fgKGvs+Ot0Bo7uQJ4xN+5l5KZdbixlZwLb5qYGQF6rYh5BY6PBlDxqVGIcj4nZLxGXN7uxiXBubF1r63DmIKPiwN83ICN4pYiJ6XyaH+nfDq71WW0LPYF4tdhx5/mRoqTfcCrU0SGZRNjYAe2Yt/VV6oEvzEbdfo/ohD5bkQKDIxxTAjtz4bqLw+jtk/dgyhtWngsI4vxHf9GKJ56L9ltDKCLo4hBp9LsdKypkzA1vXEF91RLrNd0RhLdJH5YejFDIB3k9ILIarT5kSY+w3e+3SzE6sjYHSxy8SqOPC3MoS52uygUn46IHQF9XL55yMplQlLaoOZ/+9dXnjl9byQuZGPcJXRO162/pM5lln15fqQVb/SShYiG93VTXDDdZqo6++wwt7azRH/frnbY01CkBCDSbFwmYFgvJ1JCQtUV+bLWFDam5qyQJnRzVmgWumDmqrdPKhm1YdMQN+X39eOW64wV2BNNQ3drH/+VWUSIp9UmZ9omttn75CNr4I8R+V1UuEwuPuleJXD1sxZBwGt3c9PeYej0CUXuD79ppWHql3LrM0/oDnn/iD+81kcUfHs5XqSxHOTXhWdpFnruudenOwMMgLGt9QIlwQjJatS8p6pbH/+mreyFYWJyiRhrZC10SD0Dtl3KBwXOSFzKDxnMih/kAbLJelN25r8w/W55txTapYm9IgWl9rmcE0odzr0q3L7ZLQgqE2P+XJQduP68JU/0loMqI35WtBqPWUpkGYmrQTSwnwLgTI3dR2QVnSXAdlIfxWQzeZZBMoe6K8BF2dqSfFSs/CYmbqch34CleHd8b/djD/OqblcXWiJxznscDEj1bl+jw/2GEeL2keqTktLEVEIZQZjkD1iJ4ic2KsgEPwjx85zs8tO9woUwTeKXUTFY+6oosTxrpQCYx3CR+xQHN/nWeQGvhCcO3nguYXwhEjzXIg45IHvxW4W57ScpwIYBe1qwjTB31ieZvhVwcIk9Zjj438EfsrD1Mm8NGQFpQiuWTs/g9D2wNFYxFnAM993Il74SZ4Fws1FkDseTvO8SNzCjXic5xka82dkapLnE/HIN0RAPtFIHG7fgSQn/nxEPIcjsvy2QFwn3YgjpSFBs5K18ZP87ep/0W9pQX9EtiYFKZmclKqh1cjr1N1slCNxN/r+P39XDZnGMxoOWZp+Sg8fM/HntzSG3SH6K/1h9VeAIJ3x+TuRs1a0k0NqK7kVXP0BZ30cKPePK7VBljumN+4vu192/9LRHD00n38dTQNr74cXOYcGlJ1zZL50i4g1h7QOrDLrSajMlO7FkQ/48foe9vwZBU/0X3UuKpHCiaeMzuBWhNBL52OLadE2SOsY0walKBGIxbSqTA8W05JwpwlVr8JD5sCglMBlBAZVcsM5AKgzMJWMVMmfSH449DJqXzl6J/A1sqrHXyKrujJaapK6FHn9uVnhdCWk1FInqC07XUTf5vnwSUV90rdXk6ZG75mrYw3o2TwVdGKQzr8ut6t4c9SBqPmsfSixIZMPdnlRGvd2NKjMUilzD9Pii4MD320+QSCJz9df/bYZXaxs2SETAvlaQowgcWJ2TYhRnj2VQmOgxJB/xdBLPNsGLy/UfWnvE6cg1srOJMi63JiBxooM2oiikrRG5tJ82lMcteNS413IeqLQB671NBkdP1ymKKNzC0r3O4IG07RCxEgv7Xqr6WRdRstWtt9iq7M/cEqz/IJtZ/ORfOgEhPSV5UfFB1smZRu0fdjvoYbWGxXZhuZgGZccz5xpXb7cRVaRHIUb0L0YWasD+FXIXfeO6nYvMnEgdUTl1RwnJWjdWNXs5U79Sz6fkbXl249HM8VQqymx0s3r2/cMGRe379kxq2zf7wxJO+rkLzpHjpah+rzxglOBymqxe/tMGQI0vcBcpdHtTSnvwHuiK9No2Zmm02h3aFId+6EhVaejjhFpjnEZT6mfOWXOKL/TtDRVq9EZa1mhvnOeVlNhedLW7FqPSNJWAOioNXpvA/XQrGp991ap1v2kkvUtcyb5glrN51FstAyaSTZaBtSjM9JstKxMFG20B5jSIZxQgbzzcnakF5ov55eQbbQ63Kf1Vgl+NHs5MS9Q+5W4mhSoJtx5a+YiPH+VlKd+411LCiS5fW70SlKgpnuvSTcGv3xFuzH4/VXxRvNruXHUVm84PjIwNMLEllSjnwaoo944nwRomQoOIvmk/CgzU1ToZRULXNe0DYVf1h8VfKlByOjDl9UVaM9OtJIwCeMYrHIjjl/ZegLpu9WdcVFS9FsrTFZHV43Z3z0LUzCU2tQkGKW2Mh/7rzE5lQFYW9JFNuuG+kzAuuk8bMBWTy4AK//RAnvxQp5LetGGOmMWJUF0DeisL0n/gMUB1ln9wLrtBzjg9S2BwtfrIa2lHsa6jdqUdduWBL43zL8Nt9XDRftu+01BozY/+xw4aecR32lDpd3mpmhp18oMgGnXoBlm2rUxETbtGpuCnPYemCZ42m09DT8d2jKGUIemzFDUoR2ZI8cISO3amoaldm2Zw6mDPpkjql1T00DVXrcm4apnbRlAq/37I2pV8yJooqvD96oP11bMo+tbt3PatXOcJW2MtdvBmWDW/hCSDtsUae3bepwMtnYtmuCtXQvzQK5dm2ao67mHaQS8dg1NxF574z0P/No1+iUQ2O4VpoCwXUsTcdjee3IViv0y+dkl8ZTmQRURx79++/nZEwf5H4awrEzF3gdmv35+dkAoo7LrUMqMMr2Om6BRp3gm/bGpnilv4mvlZ49JdkUwJ0G+vUTgrYP1cFaSmVfQVq5bZxqPyCJeLnmloWpNnWpL0p4lQxm9G2TxHtzkiH7W+QSumDbr9SUGNHpeblam2MXAfoH87KOJ0BRMUDz6SrBomp69yXRNqugSb7SJAAbAl9SMT8cWpZlJuKJS7s9GqlTmpoYBpBXdEEApw9ctzyqbzQP9lz24APtfSQbTTKiIVw4mQnxXTzHdcgZ+p5oBPvWqGbi6qQCU6mZxMlYGKrgXYLYZKWOt+d5VajcZkKYN26GMZFJ08wZfXOUztqj4+KlRDLRKfKVWwIbffibo8yYvK5i91q0zx2Ay38oLwDMSGuAOaW+DREO9HGFWzBqXSX6+oJiVhh4D30v1ZKWsVso622KsaDZW4WCgcJhJytpKudaSsqossi3hauuLjkxVfT8q/RvgV31wtHtcohpTg+3HVKNWgHo1aY3Kq2MFqFdKulR7My3ilpr6jFlb7eZTKFvKzqQDWjmJ20XBm+mUpgbUJL+BFaCqU5L1RCtAVboVSXjKPpL6qedQ1/cNbRG7iSfqEa2UxxMwjn5+UywrVYD2KsjQF1ziFqwA9a55pPqsqlZbU0pVy8QMfKqWNTMyVcvARCZVy9IUGlX78WhyqFpNpxGoeoaM2VM9O2bUqX5njHlTLUPTSFMtQ+aMqW5vzOlSLTvTuFLtDk0iSg0NGbCkOrc1iSLVe3+m8KN6nbICVMoONgo8aY2cCSGq1XweNlTLoBkVqmXAClCR5QtHfSXkRzYw3TdiCvep9RwmEp/ar8RV1pMVoHqyeh+20wYC1AtMJ0Rq+zwnK0BFmNEKUCtWljy9bbd7yoo35ohcs7mMocyehSloZm1qEqBZW7EC1J4Ibh4WSj2+F4EVK0DFxE90Cpr8r5aTt267r/IiW7c9VfXhv1231cNF60dK//jWoFErQLUCVEgmRpC+zTDTzst/NxE27Rqbgpx2LRFoCceUWfiubxfPkP5ADJ6SwK/bE7JlDKEOTZmhqEM7VoA6ykWmgardYS/5ZKnK691ml+m/oe3WeM9LWFPfTh8PtQLUETHF7mOtMgSapfrr27IC1KfjuNVrAvDaHXQrQP0wKO6tkwWwO5oTcdiusfwqFDurADX4yYnWHkOJVitArfICLk2AikqMlwWo+KMVoFa60aESdsR5oRGzXlGJHueQc57Xtk6xXCnFmoQFvzkBannwN1WgKs78t6Coa5H/F6eo86PASxi25KWi7m+oX/Bv7/E+4yuox1Sd1U7plN+plI4maiFV9TQaAYtsWZrrx7c29dM4qKXC3NP108rOpHCWMjFfLKu0N1VCrczoaqhVK/1kjKrdPPGrqg/6Mmo99Ftdhzbq3xr0bVnBtizNsU1xNkO4mxd8KrzdsjQF226Z0QW2W02nodo9Q8aQds+OGZ7d74xlBT+rWol90QMOarvTYZM+U90qTNctDPr6bmNgaBp63Xpg5tB1y8hE3Lr3/vStVYcgzSEanjGrNAjlXzTtzZQvsTNsU5Ildsd/cqbEljnLCsYbIisydpavich0e4DnyYvYsvglkiK2zP+2WcEOas4kF8vSjIaim/FadlkaQ1awQqwe+PFezZQ362WWpEGKoed0u8lUDjmN1FOqvpFLL9LCgDIn8r0ocJMGKPv+IPhJnIXKyjR4FVBG2cLQziTlVNnUKOOU7MWfvm6+KVWG6D27dW6dlFBFfHFfIovYgeMLxRC/V6eLNmP8G5HrPj0fH8Sh6jNhWDKlVut9B3tBPvr8np9wA57jKolFdxZc4PsdMj9s3u8fMBD27e5kU6N3416Ww0JdsjjCfBBQVZ9PQIWRri/fHKEryk739TfKG+jEeEfpX45Sh0JeoZLB3BSeHwdZkKep6wnBgjTNUp8VXhRx5qZB4rlZ4cWMJo9UAH/GdW6KKAySsHB8J2K+58RxngmR4lPoFV4RBGESZEUqfa88Jdysf365UQXBNC7In08PCvwXj3yDVLQ3afopPXzMxJ/f0he3sI5r7Pgj9eqv9IfVXzHPyQSjxyOqo+LrHw5IfnbYflylz5ttvnri2Tv8YfV8hDhhVWx2+X351fEWtqoBrFKTGjBbpPTybvRtfv5Vy7f9NQuWuXY5fuS5jW/btUuiwd1ciZ21y8dg/d7WLtdZM5nNfHlrl+Mx1kkLateu3tvdXbvCKHaR8XzK2jV+HarXLu44qS8Cz4u4J+Ik9gPXTVKRFA4XzE38PIqz3MvT82vX+AvOsnb9bfNfWLberj5sTg+rza7YY62S/1r9KPJ8A3xy3jVr7O3prVnwaSdcpE9jbxS7sV2z5KaxSQl8Zc2ihKq/rzUrXPs+/n+h7zfmYKd5v+15a7Aj665ZzI0CNmnN0lh/6jUrTLnInczl2EL7wuNxitMWK6IkC0SesDj3Uz9iaXR2zdK44Gxr1ubxaX84rcThMPMKNfpmdFYoeLC3Dpa56wTyAwzArlDjVyg6gv7eVijXXbNlIt6e47CAJra62II9VV09VXmRGwIInHCq0lht6hXKc5NYhK7w/CRx3KRgUSoijlLiTuriiMWLIOdZ4NFWY4gIalxwlhXqR7EV/ChWqTjxlbvaFysZK5j1KDX6nvQWKoJHlnmUchPmuxbav3aQ6m41PRb506B9DaesHTlNRSCcOClST/AC//ZCVIdBNCD3hJ8HBUAuP3IYHfOGjqxxwVkc+S95vnrkh3f3iIXlK74rPz3v5OfTfvW9ku7IP/3jOX1EyBulaGZ19NH3rOfo0NU53iLPlG7AvLiF89sz5StnSsTPWDxpxdZw2trRWZj6WVYwtxBh5DI3jwsesLTIcLp0U8/NWew7QELPOrrGBWdzdIV43m526f6/ZvXg0Tej48HBmoFossxIneuTD9s99xXMs7NUR0kSu2HlwZDM9qLw+Oa1KLyGNzaoEEIWSZSnhZMVbugEHotDzliS+8zLvYxnaZTHyQUPRtQjjbHMB7njOBmL4OoxSCSJxxIWellReKkbxQWF/WfxYNQjXB3Ftrg9yMAFIve7YvP2lrgtFL5ffXgQu9VOiFzkFN44ieNp3nj86Bv+/F3FVNB4KLOMEW1nvtgsN/pm9GY5L15obCdMaJvS4tLZfcr1fQoOAbGDqp8KWTCa5UAZGjtj1bNc5qSxI3B6LESU5U7BWSKCIOACi1TMgCx4KeY/7zzXKPfCjIewEIa+J5KMJ4WbipAQdCfABMhSRH2DlGCJWTz4R/G4fy9W+20OstUWtKPbp4+z7lVG31Azi2kM+ixj8EVnsdE3ozOLIXwHavkCT1tR4mLRBgHP7tVG7tVcHwFPH2myJ+CjGjNSPYu52JVxAVZlkbM88vOQDl4pwNEkwN6tcIh6kiYeLUdDWEXjgrN5MG3A7h9B98Ru7CRAG6cavnJbNut8NvrWRvuz95ODs1eydhcIk8Kfo8RnzEbkx8Kk8OfAC+qzlxEDWsM3a38uYh4LkXCXe2nsc2xK/CwM88AJecZCnKvyHBQzh4pdD/1Z44JL8+fRt6bpz8HaXSCWQv4chyywYQ8Nf/ZCr16fjU4ZGr7ZxC95kRRAJgCHeH4ATUNUBDwTPsvDBKx1zynCmIf++fgl45EbhNx1vIJlWOW55wQckJAb42MUp5mDPD2hRNRm8+cysoFMSbQkQwS7RXflgj3r+jz61przhsbwzzYaX2m3MvrWNGc3d+0vkH1Es1sUOpY/eJV71EaKXSi2wmja6cMbP1M1SLGDuE4UId7DBQ4ffpoUfupiamK+U+Spn/PUwX8oaDfcrWhccGn+PPrW9PzZD5ETdXmxW/Jn10vcViI+i4lexURdnwWBV2OiRqcPoJNjfbM5ffgc0R4CFNKgCAvuFoGTZ14YFL4ka+Shy3McQc76s8YFZ/Pnr7RbGX1rev7s+OsgWqQ/h67v+xYdHH/6cBLfm8bFKMb7Zu3PDgO9ygd24CVJEeKReYjd8rRIRBp7LHdDL878IjnP79e44Gz+3D5vSFCwlFPT7gFpW++3G2igZz2IjL5LHddmaxCj/WUChUEMJNu6toZrO2win1LDTWvXFgkxn1OwD7LEcXIndnMR4f+SNC98EaUADgvPy85vvTUuuGDXHn2XOq6NJTtZO8s8VQc4mFmq9PhTtZeEiZ9MiulpuGmzCwebIImcInLyBGBhkfkszRG8wf47c6HPSyhfkxOcxww1Ljiba3+lXfjoW9P0Z7b2F6hhwqk6iBioMzZGPzZG7yUB8PJJ/lyM983an1lcIPVRWMSgCyVeIZhAAA9cowyqhyyJwjyJkdqIUTBniJJpXHBp/jz61jT9GfxoWTZoWZleyJ8pwZXdeo/eensgGgfTNIkavln7cwypfEYBijABuSbMXRTLiIrEKdIkLlIeJ2GM83V0nnOjccGl+fPoW9PzZ4Yt90KP0uCCRjZGP96fMf0l06JYGr5Z+zOOy1nqQbPkp5SlMIDmgvki5QEPA7eIXJFzB4dtSoYwXJ81Lrg0fx59a3r+7CZIi7HI9RmEGzey++3x/owcok0mNmiOe/olfPOafknDN2t/RmbgJOFwWua5SBuQOFnmMg8R6jwWMbIYQEIfRMI5z+zHJgzsf8p+CrTcBWCesEgEecHC0EWegTDHnj1mXjYX56Zk9j/voFjKV0q99FyyYgvkiV/tn4ghO69mafRNNuwbpF5g0HABd0ih6GQILgZZBpwxizgUnRkISQlk5VKfO9s8B2V1qeoqtnxzuH1+ypGaeIVksUhet323Up9nDQeMvs1mZIIwSrHlA8GWR2mISKlwPA9C1yTHYTPzIC5JhJOFtB38YiOTI/PESdzz7XbW0Rh9a81oIF9HGqdeDppbCIfLPcSQE8DSDmhuoLDyJHV8HHPI+WYbjU50SGryq2+OlC2Kn1TRFXEk6f4XkuuPvu9mqDTmttmG6itBdaNvTWfrgFxa0VLp9wyFKC1UpwG9BzRNTILqNLYB9dYhi6MAVN0E+YUAuqeeJxgCxEXsBXnGBJLEhWmShfz80V7jgkvz59G3punPwTpYJvQOgqPvt9KHLTTpeOB6HqRBiHGVue47e/uMsvJ/d6Obql42O5vKlZJhEb5pmibvf+8/vNmijOBWLvDPu02m5GNb/lEc5L7whx//8s8V+yPtgogga1bsAlIvYMjh2lkmOYv05/J0U6a2+59yc7i4chdf992knSKqnuCAwbsvaFnMZfr7FCwyoQHCEnifQHFp3qdFkne/1tvUoeV7zEUp7cupDRC7GAAgqoYQRruso+I5KfKtZCIWOROAIII4g3cznkU+qHtujGwFaREJej71NibJIg95OTJXsCygPL4xNi1unid5GkT4HCEZkxMm53m8eeAD7SyKIEWkkkF6hFzAQYR0CFngMoF8MC7YRXFKuPYs25i/Pj8+0cFMLg63q3/QkQ2l904r8sXV5iQee+BHWVfMZFlCBU5VRWX0TTbHNY0HMcu4/F0cUE6Gqg6uDuI/n4kM+QcvWRWH/eOqXqZb09Yvu1922mM5ukjzYGNQD+XocdHbGzLMl8uUZtJ8KfOG1Kllt9sf1RO0izC9R2c2iDwjlBNVlbLtHkAoBm/3/JiiPtnaw+aUnAC5GKUT3KxfjHakd7J+551MhUBlrkIfiG4SItz0cHrc9orBojrzQ1nEd/jikyFlJ98UhUHLW2qHbqDmRvZg0l42pPs4Hp/FmDq2w5soy9nK4WgP9fEEiLX9HLb77B0eSFXN8bQ5bWVRrHGztqwxhyfWKxdID6AsJDih/N/7/0Gr3oTSg9effFmfsNhvt/sP4nC8PtB0p8c7anNXt0Dv1L9REkyzNVq83O1PdIIhy5/pRlGDbXwn5K9f7ug/KOtG7fFoD8DTR3ek/D268WGHHrzIkrjS0HN6zA4bFZgYb67dClZAz+G7zSd5ght/W2hFpbNldd7Rl5a/RitVgX50M/XzF6BWm/c8+0i3fhCZ2LzHIGqa6rWDpXJxQP66Aw0p9jn3PH/cYA6U+xucn9N9/hGehr+er8koj6lUYUAukyoY0q/bqI6yWEqlblBNrr3SjvVPZKkRWfz5opXmJ/dqJ3Wvau8FFAQCVJaCde46tEX1OPanFCwrRAp2TOimGUgzuABHqty3SMh3s1YVYx+rYsjVF+UW7fpTUtWZh9NaNclXuxP5yN5vxAfZX0hIXnlpRtklMwPDU/pb9e3lTk3F9KZN6289vcuBKDtMczsmE8MhqFrfjd47V2cYTP88FVRmsXpg68FxG3/sfqdeLI1r9Zea6loYS1nWMkSQLoYGy2jBKVv/yXzJaXXn+rLT+qHW0lM7g/Hy07MwZQmqTU1ahmor8y1Fjcmpy1FtSXdJqhvqL0t103mWplZPOssaXKZcnv6jtUjj23PLFN0FbfHIyc6iLCWMXNAm/grMLHcZ7b1i1TvrwB9G7h/r51nvOo33kLUp68Ctw6Da5P/GHFhub+nQVwJr45YnGcstXbtb3lwrSoOgzbv6yiaQHmYHsmG4qZFNYeKd+GhogVq+3OF/y1NWhuMiT/egcO1fOzJe2mx2TLxgB9dYpJ3hSfBHw87KpjDxsN+bjphsSlOyxB7GHIMu3Wa5R62WsmbrO8Woaov+VecMw5Gqm7/cVedvFQky3T5XrV/u1L/kk+RvDbtHLWEg3e5TQwuY1e5k85c7bLUVvnC6N+8R2aPWHXOIbkzoHrWuzZ0OwnToZdeoeW1sppPQSzlyW757+4zi74b3WjfHE6Wd7lv+6VW46ZJTNe1hDHRSRX2bMBU1Fqh3atMMYNbwVlsGGnMSSDI7P7ehKHnDMrxjZqts3HmBJxmk965vdNqx/hxQoCbi8i9md17OxOes0ytkDptUre9e/kgFpktc44kfDFFr9JAa372kqLP2+fb29oWipGRW4laGD161hRF+yB6A7pkN4kvVGnuKR36SeGlBXctxstnueW7Yu7o5TKkHZdY/1bYNMUmwzMyYbNq2VcN4hgab9m2ru/1pU5QsH0PDHRMvfzpudpn4DkTp7/AinTbZBu8lTlf0nCRIZXgV1RZdR80RsnVQpQANrVWtX+4Ucp8jDef+4wT4smWAZkmUMSOyC0cw7QaFXsM3jvfGCX/yXNQGW3syucUZONl747KfHGS/iNaBFPBIhtVFrFgizpgCyxcM/1rfIRxxObB2iz+iyfFYxcTw+c9Ng3V97KpOHmUDoNq7vs+Ouc77/ppzrRE69rB/FE9Y5SugWqV6J0ixtWhn++cdhhVhxQ8UpKOFsvmqWugx7D98PD0g4Amr/HivXPNmraLx9E3j9K0vP2zebZpTGn5FvTnW36hjUHO1xw2VSVbDr7D2/ZPYlRdr9UqdgajLrb93bkF+yJGA9nl7ui8JUeubkqnymUIVmI4ldNPHeWs2i+K51B8VuqvBU7HoLv/wJhfvzYOLFhzSCTbWo2XRXYvunlu2MHXb8Ew5J9nwTA8Y/obCMxbdteguNt78aUPb5YrVdgk+suiu2XnYorunp+P41wxvIqEjFt19OGmNm0V3Lbq7BmFZ66Wx6C6hJBbdHT89W3RXQi0Vznhn0V0JEvtvvC6fGMu4RXdrwPc3i+4i7f4OnA/gu1Rpnf77oOBczYWoVuAAMSaS0RlL1yB4qeAh4Q2ayzXtTPuRx5xySVS2qrDnDOYatjX62COem5vvkNoll3tgfarxYegXlyhj11Nte0nNxofRinphbrWyMJ6ITiEKJWyoYizyE09JylTKEKov7sk8KZyed+92iNxi41BqIlKoMaySySqZ7jr6J7wfVslE/nJFydTMsIgjDqZFfFfPNFDE8zzfSE0Y6OQRhR2RvUJ9ZJDSZw/gWiHoTLnx8As3Jt9+ek63m6zy7TPh7cHOZXG5oZnnscDHy1aLio8P//YeiwEdbDJwTWluaskqe4LLCSJJI72MFHf+Sa1Tr8gDOltt3I0RCZheAv6RWDY0Dt0kDH7iR3iRdCqzd6VIZSYGDS1SnYkBKaG9wOepmxVIksiRNsHxC+ahTjtSLCBLpYfEgcjFQDlHah/4GfusBw6P0rjgLBkHRmp6qyyelb+ZPDCDRAy/ajk6eCzLLJXIHKQepTSaNnvA/lFKjo/dtHHXsgcQgcVmD7jFVDIcs+r4dPsVsgfQc8ArbHMH2NwBNnfAonMH1PkFWrkDWidmxf5sfdHJC6C+t4r/NpaswJwh3vItK/41drFqr3yGCTpW8a9xLcsJtZzQA6WVuIBD90hZk2EbywlViWms4p+oLmdCFdjyWk6o5YSWmeMsJ/RsXmar+B8ImToSf5DirOJfg0toFf8jg95EurSKfyO0ug45W8X/pc32BaS2CpZSygBsj9p7cDOmtVX8l6lXSgjBbBQtJ9RyQvVYxJYTajmhVxT/wImZzA77O+CEskUr/kGxuaD4x1+qpAVW8T8WVsQrPy5hngWHmpi0FQx/u+CQRXctujuUCvVjKaqWwAy1AWowzIZnWkSni/UCRiRktop/q/gfOvAlfMYq/s0QFKv41xIhW8W/2WtmFf9W8X+R4HJpTrforkV3Lbpr87nOls/VortLyec6D7prFf8lh+uVFbaUtp/X5MuMCdMyJVzJJzDV+JCBvgDF/2hyeFvxX5a1MZX8KxGDqYTZhWa5JWH2BgrmQEvATHKMkMJsyxMwO64TBo2u8XuZhNxKmKskQ6SVbkmYVYKVgWjivlSFlkm4kZuCH0+oEdtPyk0vXV2jFaEsrZpMMq/7obqUVFnoaG/rzcLC3tE4TryQxXHzjv6AgVjcGxqzKKHECedK7eHWdPlvvfeyLa13mOM7Uagnraf3nYqjl6L6LI6CIHeSJHB8lqaeJ1BGlhdQzecZE9xxwjTJQk5lqmtRvedwHjk8TYPACznnue8UEWM8414csYLFYVKEfkDVZoeieo0LziKq/0uer75XNVj/tj+ggPkt9hRbnon7R3y8RR+n6ulrLf3oW/s8Vkvv/uSGa9dfe7Lew+L8OfTiMKBXp5w18QDenfVnN2IhYw6IHVXejL8f/i+KhGBjh4rMZjXGuxaMsmhU3Rok0viyPk70ECrQXNb0ZH6SxC7GpjWjYEza9Txb9yqD/PSy9Mt5dsdDluStR33a6JgX5e326TqPo/tbLZ1Pe3iMq/MOjUwp0Nu2NqnEZ9vQfLSOjtWplXrbxnTLObTbysMgXu0x9QlVvLzdep6c8N3+XKjaeyUVk15wuH210q+7+8offvzLP79b8d2KZ4f97uPjCnPH6peyus3qR0G5nFZ/+eHfV/888Kcncfjl5rvV5rjiqydZ/2b1xLN3KGKzOj3w0woT7v7DUZo4bh6ftgJWM3E8rk771UGa+u9HMkaLplIRqYI5I7Y0Z25EkuDGBGX6bVGCRa9o6MDA5JqhA4tzlQwdGNauGDqwYBpgHhgqoxqVH5nUCx3YNBMPDcxMrBY6sDcluDwwplsrdGBgmnDorDnjSqFnrZmlkj9ryrxO6MDcNNHQwJx5ldCBqUlB5YG1dpVRzMyaNUIH5qaVCL1kTtYYRe/KA6HRvF+2NS0QOuga+VXfZoUzGneQsOh2CUU5a8qsLPiLkdWZosmD259WHPSMuem1QQdGTUqDDozMUxl0YNasMOjAjHq8RnVBB7YmioQG9maqCjqw+yWKgg4uMiVr/MDYxJKgA3uvVgRFylSgP8Hac9ZOcKEiqP/GTX5CydAgWLsy2+LZiqCuLBrqrx0ZuLiiD+p38rWCoBd+/0o90AutjtfKgfbb4D6aaqC1IpZOSED3eH5CWdR9drzFYfOOVsTNJ6ApSOvIkOq2WXtbJTiv1gpVAah2ndAyorWwQqHdPL1jw171S7jEsFccuciDu/i8vV8WbmyFFLwk8pPA1czW2wspaIQH6pBC5rOcZUmR+ylz/czLWMLTSPhJioBEECJFb8ajBPUCzoUUNC64tJDC6FvTCCmUq0G0xJBCRO4ctUIKNkQItP9i9m3y5yCOkmkhwvG+WftzEQdJELqJH/MgYWHBAw8hwgSZt1PfCUMuiswJvSQ7688ak8HS/Hn0ren4c7wOknUgy8YvLUQYJknkODbk30lJdc2fwyRgHkiGOtn0e+uzhm82efQDL45TN8ncnEdunAauw53EdeMoi30nFk6es9TJ07P+rHHBpfnz6FvT82fPXbuLDPmHScwSl5gfS66T8RX322GCdRCsxAn+nI73zWa/naRJ4sYhE2nuY7ctnCwKfJFmYeYWbuhn+MicnCbmIYVH44JL8+fRt6bnz8BxgmSZ63PoM8+x/gxoqc4/dHV9jj0cUSb5czbeNxt/FkGSBqEn/DBLcxEEgSNE6gfCSXFu5uhUmInQyc/6s8YFl+bPo29Nz5+dcM3cZfozdo9RiwZuz8/Xz89h5GNPU/kzFuqKLXqvVmx8o6jiFAO/I1KtyiLRptiO983anx03y4owZk5ReLnnFYXvOW7i8gh8W8dx0yJhgePk3ll/jrgncPYO4f88850s4MBMYq9IIg8bdUwRucMSj9HiPps/VwHO7eYRjCO+y1fVN8ShX/HT6bBJn09Ckon+8Zw+IuyFiPCs/NvR9/35u6rEVzb+2cw2VN9/JTby2FvTmfoi1AQsA0mLgxqwJjpxaLcyY7cyOOMHMeXaU+GzwdQHFOK1qU9jGqunPoQAXOF6XhpAmeCE3He8oAijNAlSP0ENPydlgkUuTbDDo4nGBZfmz6NvTceffenPyzyaBC7Dkmj9eaw/syRAcG+SP2v4Zu3PDGBh4cB1eZYyhmMKZ17mZH6Bf/jCjzIvjXJXnFcLaVxwNn/G7kSxnW+LLd8cbqV8VNyDHT3rbmX0ren5s+8vFToMnAiKNuvP4/3Z85k/yZ81fLP259hJQuFGcOk8yhwWp4gIBFik3TzE9BLjwMLDjAXi7PqsccGl+fPoW9PzZy9e+8sM7eGM6SQWahgd2oNuFhvuSf6s4Zu1P4dRnETktUUKV06gBs5z7hdOkicuiyABZkkRhBFttIb7bY0LLs2fR9+anj+74GIuknoTMj92JQPLhvZGhQJYiMj4tPVZwzdrf/YzhPFEkjuxy8GmSwMOtBCCfTdDtM8p3BgwIk7X50vea1zwi/nz81OOzCRShpg+b9+t1OdZ996jb1PHt1GBgS01LMAQ5fMJTX5Nqe8lMaaBRqfPd0iAwo856XLfHkodOYnLb/DFVWGoUui22xup0FWH/tUK/YCFlGnoskK/vs+L+vz2SNAAViM9ZVTMlfnt3lzX5bd/qaXKb4bEWJPfNzFFkd/YmqTHb8zMp8Zv2ZyqxW9M6Srxm5b6Ovym7Twq/HZfvrQGv7nWohX4g9sYrb/vttRW3/eaT9be9+zNpbzvmdXW3ffam6rue2Zm0Nz3LJop7ntGJurte9amqO37j42/pZ2ILLM5Rmzbaz5NaX/GmLHO/owtM5X9GUPmGvuesWkK+54xc339sFeHt/yTKnjZFscbvBDTtPX9jqnF27AU53ljBrr6nqES1DFV1Z95v/oWSyaIiUNWTScp6ntdnElPPxjG3Ym0/3cvf3zip4fPCi574gdh9NpN19L3+meipO+ZmEdH3zNqpqI/+0iNNPQ9SxMV9P1B32yRcXC/E52UEB31u8Hb8SXU872OT9HO90xNVM73Xxckb9x/pFwatM5nMoltfs9PABzAkAvfOFI3D008c9eB1MQr+Kj3m5G6eXftg/Ek5fdXdPPdLr6mmj/761c082fbXFXMd1ug/wZ6eTcmYQIqjanV1Orln57T7Sa7Wau0Z2NevsWR5HzmepHV440P2nme49V6eSN+sAZgX4P8QSYinsQRLzIWId7ucBEnecFEkWZOwRMncdIoZOf5wUj1GyaeB4lumIZ+WAQRQx/8nOf4LuQ5aFUFuBgU6vlSIP/qw+b0sDoKRRXeIIU2Qf7H56cnsIVnhfpH32xDCtZ4ILOMz4/icf9erJ53gMSJN70rNm+fMSDgR68K7CBWe7nLP846LqNvUicEotbLRWYKCX0PCWJaIRCrjLiujGBugIliEl1BYxZrlE54SsyPoG5CThDh+cgjkCDgmccepwin48agLBTifKYQjQsu17NH36SeZ3uUiXyJmqfAdSPHapKb6t8yjnE5Z0gcx6BWTyMuZOO9tPbsFOUMgjCGgjFJHA6dkhNFIvNY5qEagSNYHqZBjL8oZAUyIxT4+bkW8oy/4CyeTWUFekThr7enGT26oz3coRxxUCn78qy7iFPLr/8fkigthw6FBgA=", "string": ""}, "status": {"code": 200, "message": "OK"}, "url": "https://api.github.com/users/bboe/events/orgs/praw-dev?per_page=100"}, "recorded_at": "2016-05-03T23:46:50"}, {"request": {"headers": {"User-Agent": "github3.py/1.0.0a4", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "Accept": "application/vnd.github.v3.full+json", "Accept-Charset": "utf-8", "Authorization": "token <AUTH_TOKEN>", "Connection": "keep-alive"}, "body": {"encoding": "utf-8", "string": ""}, "uri": "https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=2", "method": "GET"}, "response": {"headers": {"X-Poll-Interval": "60", "Server": "GitHub.com", "Content-Type": "application/json; charset=utf-8", "Access-Control-Expose-Headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "X-Accepted-OAuth-Scopes": "", "Cache-Control": "private, max-age=60, s-maxage=60", "X-RateLimit-Remaining": "4986", "Date": "Tue, 03 May 2016 23:46:50 GMT", "Last-Modified": "Sun, 20 Mar 2016 07:28:16 GMT", "X-OAuth-Scopes": "repo", "Transfer-Encoding": "chunked", "X-RateLimit-Limit": "5000", "X-RateLimit-Reset": "1462322134", "Link": "<https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=3>; rel=\"next\", <https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=3>; rel=\"last\", <https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=1>; rel=\"first\", <https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=1>; rel=\"prev\"", "X-Frame-Options": "deny", "Access-Control-Allow-Origin": "*", "X-Served-By": "8a5c38021a5cd7cef7b8f49a296fee40", "Status": "200 OK", "ETag": "W/\"b4fc80ce6bccfa6c1c1c284ad498e1d4\"", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "X-GitHub-Request-Id": "CEA95BCA:FD17:F5BAA:5729386A", "Vary": "Accept, Authorization, Cookie, X-GitHub-OTP", "Content-Security-Policy": "default-src 'none'", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Content-Encoding": "gzip", "X-XSS-Protection": "1; mode=block", "X-Content-Type-Options": "nosniff"}, "body": {"encoding": "utf-8", "base64_string": "H4sIAAAAAAAAA+29bXPkNpIt/FcY2g/e3emWAPC9IvbO2B5PjCPGd/vp6Y2NuOMNGSRBidOlqtpilWRZ4f/+nARfiqw3ESS77bIZ47ElqpAEUEggcTLz5D9errLkanZl+4HLhB964urN1eZ5pfDs3Ta//+ZRLTZ4JOPNcn01Kz7tBJyxN1fz5V22wOeiaKnwkbu1fJQbub7VAvFgu57jh/vNZpXPbm7kKru+yzb32+g6Xj7cbHO1zm/KpmXDvQZaWl42os/Hy8UG3Sna3+he/PHq5zdXa7VaVn0LHD90/TdXC/lAY1it5dPbRD3e0A+v9Ynk5PqTuyYQv5LP86XEoF6uVpgSPT7ORBA4Thg4b67y7Ce8ir+5SrJ8ky3izW39ZK1SdAL/zm/ulUwK4Q76Qb/R1DnMdr3A9uIwZDJRNvN9FQsnFoFwmXISL3ID/AUtIpUu1zSkhKeu7weBCP1I+kKimQgVVyoRQqZ2yEJh2yJkaIOJfsg2+dXsHy9X+b00e6Hcbu6L71w9yIy+yij6KVo/x+pPd/SAvga8o5zor+gP1ldYCZixB5Xn8o46+2WSWP+6XfxbdrdA729phteb3Nosrb9vo7VKkmzz3TJRa7nJlotriKvm8Gq2WW/VK4voyBd2U475pvPU/vw/9B1vo3kWVy+N10puVHIrNxiCYNx7y+y3gn1g/kwEM+79P3R0ub6r1hz3Qi8Igp1GVEsIHzPTCgjdLUC07qUYZXegGj+/aak3s8MAQif17qje3HW8QeptoKq1eqeJz2LOvdiJoyQl9XZYGgovlXFk+0lgx66fuMw7qt4GLxxFvd+rh+Wjsm4fFPaKJLfS9fLBeq/Verl+Qzr+kOU5VNuSi2Sn8qMqeucxmyk6D2YsvExF5zgyaH1Mit5R0Vng24MU3UBpa0V3ktSzleMqLlSYqtTxGEvckDt2ArMiVjxOVMADshYOz3GDF46i6HSOF0fz3TbL77U6bxfNJ5/hSO88aENN92bMv0xNZ4EjaIFMmt5N020Pe+MgTTfQ2lrTmYhcGQahEwoouc08Jw5FmrhBoFxf+HEaxUkoVXxU0w1eOIqmv5dZrqwv3337zY+xWpFZbj3dq4W1uVfWXz98eGetVb5aLvAZuo7JbJFjM7DUer1cj3qsdx63mbIzMRPOhSo7c4Q/KXvn67ktfIZb+YDruYHi1squolSxxA8iKLofJK5yPe7CZE9dXOh9GTBY914a8KPKbvDC35Sydx63ibJ7M1cAqLlEZXcABwWc1sh0snc72QGY+cMu6waKu7Ph0SgIkjhijuM7oc/cMI25Cy3HtxcxHO2BCOLouLIbvHAUZScb/gjqZj0BlbXkarWmqzwZ9mt9qx/1NO88VjMFt4OZa1+mgruhw6bTvDvYzh0R1qa7ewC248lrYLvTXVkbl3RY7I4vGZeShxKwe+J4nh35IvBUZDuOSkM38d2jp3kEDM+2Q8VS5np+Ku0oiDgPuJ8kduj4NsC9KJCSkPrRFPxxCRfEnVUBcriVfw0nALwmFQJXAXIw3DdyEat8VD3vPGSCpguXhOO5rgiUn8RJpISwAXrESgD1TDyJ+20aMJHEitEeOsos/SX7sYQoyfMwt97+n/LXr6S+1OjZGndWOg9xNysq5r4fu1KFsR3aXhr5oe2HKRZhGPsiTrgIEh4wWnijzEqJ5ObAbclNU1zxbhO44Oiel2Z31lJfBkddLZ0HuZuXgEVuipFHPGTciTxcm1UIffThQQtUIn0eKeX7BH+OMi90aOayPBi3C/3jSaUadXI6j7ShSt13uNEmp3DpfaY56byFm5gR7swNZ+JSzYgg5JPP3sSMYCFiFwaAAg4sgI4mwQ4UCNIwiUIuPS+RSRgnjuvZ0vdS5jueLWDbuPD68+NmhMELL1Sru86ooVYHM4df6OXAD/xwuv13hvo4gH0xSKtVdw3d4fowPHjgRkL5Hlz1Tip9n9sCFokjeRB7gR8D5E+P4/oGL/z0Wl1E4oxqvnQen6FS+zN+oUrtiMCebvzdj2rmBeEwpWbdFbRW6iR1pM0Vgm0i33N95sX4RwgRxsJL4K73QpslTnTcLW/wwtGU+pe5nXQeqZl6297MvsioGye0bYRlTmd25zMb4cZ+7Z5D+M1e9CyevAboGajqDtBzES0L1xxCfmLp29xJeOwyV3LfRhRuoBT3PRUwMr4Oo25cxOcIO0pEAG+ekK6IhGfHtoJLz1EAqWIm4f3Te8No6n0I6NUYvob0Snhv1LO78zh30EMapDzCBHKEH/sRQDwWKO5GygeABa+n4KGSwnHSsabmF0DxOg9xNysGK3SUBfOLoHidB2l2FAh3xi8yLMsJAu44U6R1Z0vP90Pm1pZeP99O9219d33zI2xKnh8rW+EkSHzpsNBVCZcJQrJcLkXkhQGnLevwKACKFCmHC1ek2OxsR4YyUnEScxH7fhQjkQZxXnY8mteCcOhqqwfkOn8edcPvPJrd1gYrOEA4eoITUyB21RUqcZRiHhN2yIMkcF34LdyQUldG2drewT2dJTrqDDlJmYLryvovJAV9XSQFfZf9mBVR5oX32vprlshorvTzUeeq88AbhyM8Fa4dxH4cJKHvJJGHsE0svRTGgy+F6yANKPXHOxy/1vkrcGthnXyaSUi7jqhxFjqRiB3YSLbHktDx4LzyhUKwswBWEqowTGJPJMIea8GQxigkImgrCY4+hRn5PJ4bXA+7jbThuem+f42iTTQ5v4RliXD3bua3ia3gzASfscsM9AoQ8S8mqLe7reDhBob8zgEOHNb93K9tBYC5iUhgJYhYucwJolSIxPZUDMsFABEMgcSOYvsEKtT9haMp9y+z83WeWhP1tmcuNFxcpCfHxykX0pE2xXF2iuNEUJQ7MI7TQFV3/lk/dWKEbEeJdEI3cpX0XQ/RN7DXHIFMrCByYtfmx5MuDV44inp/euuu84hM1Bix2LjRX2aYBQKKPHtS4+6nNHIf2LCUStVdJXfgboQgQugukwLJlSmiKynZSsXoSxz5PMa6dqRgx09pgxeOosa/mgtt54Ebaju72EMbGHZAsTjTod3t0GaMhaC/KGxyAHl7rhw8ec2V43TX3FrbQ5ZCo7nvRg6yL+JIIaKKx57t+Qz51IFMBJ3fyXFtJzveBfTnuC5wGCRxpLaTJlGAvMwUnpzAI0QLl9Wx0IhvH1ZzpeOwKxQvy2/Xy+XmTf1gpdYPcp4tPo4KV3UeaAOM6P5ljLITflpss/PSMtne2MxBIMpFJpI6ALWFR0kH0/bWaXvzQiJ6GgQ5GGxV9fbGuS9S7iqFHQ18MAk81dJxET7q+bZE0DugU1g77nGeJ4MXXoAKdx6NmQrbiPy+TFjBA1ubO3kYO99HEIoZeHAsDkANDdSxVmEJTjc4w5Rw7UjCXyZi0Doltg1TxReOdD2B8IhQ0hd56GE0eOEoKvyLWSidB2qm3UJcrHYDNQwm7TbQbidgw5K/DTS11u5UgInRCVLBEOcEpjYiYXSTNFYSoW1eGCEdHGwvyfHwb4MXjqLdZGNHMs9i0DbBJ7pROWgYwShpPVASZH79KcLIOo/RTLEBI4rLDAzyfI/5E6uDgWJTLMmgY9tASXfOPm7DzHZTuAHCIPEQH4SIHnCtguaTOwwWuWunONCPx4gavPBiFbvzGA0V257Zl+nFxz7vOVPwd3fFdhEQPMw/EHdX0lqxwa2swkSlETz3aQo7PA193w2QxeV74GKjdHTENaUn3HzdXziaYoOTJc3m6lYf19f5/ajIYOcZ7KrFPPxARnd4qaRLLjj6NHPABIx1A8Zcxt1hDGsGGllrcRTFcQyF9aLUgb/eweGMxOo4CtKEqYic9cjIANni0Vu1wQtH0+I6YeM6J0Lkzaha3HlAhlrszOzLhLcRbwPu/EmLuyZiwXBxxTBSVAON3MHbNmobgOEGBGnKFmByCXH8Bg5zbJtLloIMGSyJcPAd1WKDF16GFncekJkWOziLL7JagUMGYjhRG3e3qLmLsh+DrsogUeiqkbUWg8BYcYAaImI8deCVSAFqg5sp8YIU5F1IAgSJOVzzR7XY4IWjaPF3RHBYFpHJrWVqLedJYVpb2QKpAfh11KO58/hMlJrPHOdSPc8uoFLNfDsZ2N0MbI7wlGFHs4GC1koNouIIkTA8QuxrEqQpT0mb4bjy00TELuxu5ie2OH40G7xwFKUuU16huxrUtmKZ52qDH0dV5c6jMlRle8Yuk83EBfmgM0XEGpzP4BcYVizMQC1rVWaJ4MC5gGbjiEZgnO0y5nLEuAmJIBKRcK7AYXACyjZ44SiqXBIUlwyl1+OnuXYekKEWs5kdXGR6ihM6rj1FunbXYtSCC+s4kl6RrgYauXNIBUg+iYIkcp0UiQgijkUoQw9nsO/6EinXLAItbHI8FAyUw2AlQfqaCL0I1npqS+lEkZ2AYJTL2Aa5URS6mntwFC0uD2RpperJSpbxlqJedX0/a4WigIXXmaqJbLIHZUUKdMWjHtWdx7sLeDX4TkaZok++0XUekNlGhzRb273Ijc5mqIk5gYKdNzoXDCZiWJpt3H3T2oGCwgFPKgN9BnYzhMzFAZJ9IyqDCj5wBOWLgCVp6hKpxGHAnMELR9PiWyppur69JWaBMpKm5FIkNvFR97XOwzPQaRQ5tP0LrX7m2Aw+5ClMzkCn7YAPC5Pj3fWz1mnoMgiRwJwGvo84DeOAKQ8GSArqk4TKnCYc8fUoP31Upw1eOIpO77jVMgqTQ9XpHFbLWj5YXyCpaTWXz7dU8PgLCxWL79UaVdFQ/OwL/WhUZe88bkNlR8T7ZYbOgbHPcSd/gImyc38Y3mCguDtllyl02QOpj5/4juOGHr44Tkc2yp8lqHWCLVuCWOuoshu8cBxlV6TaoAX6Iktul+tb1J//ovAE5NVftO4XV5ZGMWPaG0ZV984jN1R3B86Ci7TXhUBF9Qle7Kzujh+6A91/YXfV3cGLqQ3eTwnKDMTVxcifTVDBhyMP1+eodBADaEAWvgiPU+gZvHAUdS9v3YV9/l7NNSaR32crK57DaXD9/eL7xYf7LC9+1bRheLyMM2wRiRXjpCf0QiKQntCLdDmfL5+o1lL1IZI2IyG7eJ9ILhYqaT4h9+M6i7aos9N8THcHbEV7D7d4cfNTT9nH7FAkPW2J/X7xjYzvycGJjqKmUbOD1r0uZmP98AMG8sMPxSjxW8GahwdF5ahrC7OV0QTJ+fz5+wUEPcO1sgBeY2Ub6iqmBO5UWEDwm0ZUB1bPyjyDCwYvrmrA59hP9R8afRh35+y6ak12Th+xT5caiixYYPMpFNlg5+TOwBwD1n0XrHfOwLbhPwNPpuaXd7wI1IcsjhCBwROgsV7IhOdyfpy8wOCF085J+9j1tHMm6vGGlOKmBM5uOi8is51TsAvFk2wUdpyiU3K9SDpFp2DfRM29QSFnBrvg7ooZIsDXlxHYD6LUBlbsBIj6Br1TCosGxTki0CO4qL599Ipp8MLRds7CFrq+UwUVbfnraglLqSzTOapF1HmIBnrN3BlzkNlxiXdJ6DWiHqaoMwO9Zh4fFkoadtfRWq+RehVyFoGnAQyruEnGAnAxUrXAIp6g7q4SgfRRkue478fghaPoNdKlAQ0RxTaZF7pybOH+saJnK8cNb3H3Rt+tqLj2cqvD0tTbXC1yXKoes8249O2dR2+o8nzGL1TlXT/kk8p3V3l4hpxhcS0G6ttQeR9XICdhoRKMRyICH77LUIrBlQ6CpwPfRh6X1EXTDt29XhoyXJLSSCqGHSNWoFShILdEoaA0QtKlg/i2VJCDcBSVJ/joMx/lnYe4C2Ux+BpGmZVf10bYefQmGyEMH+DoF0kUZTsu6lo14l4K7uBvUEFjQ3oRA3G8mr1cZcnVzEHtJlTGnS/vssXV7CqKlpS1cbeWjygCvr6lj1zhAZwk+OF+s1nls5sbucqu73DEbaNraOgN/KXr/KZsWjbca6Cl5WUj+nyZkFG0v9G9+OPVz0QZuVpWfXM5YGU/RCg8uVrx/tbOFqNm+Ws9I2l7dxvdDC9ayef5UmJ4LyVN5SO7Zte06YC18rZMVdjIOzx4kDnAz9toLRfxPbpR/I4/JCqP15mukI7Hf1s+vZ2rRxSXp20LhCtxEf8G9zEQU2Jdeff+y/+2nD+Q4b/a5vAkV++hGdGd2kbzLL6abdZbhd1PF/RIbuUG0rFVem+Z/Rbp1mSUOxcaQo616SHzEDNQTvGfdY2OaW3S2j65NhOoT3tllotx+EKyZ+IinYV6IV18DY3PtcXRdqM3c85EwG0fhaoqCOewCCOS+w+Ye+s9717RpnmF+n4C+HXMFTjyAcYkqHwsOaAYhCv7+B11FsHOFSZYtbXhl/g8kBKMuwyXPqEQhQ57XUqw4bkcufsgvvRiP4hoczg0/OBtDGEzKgcpSigkpJC/7IDfG+XVQeMhEwU2YqnABIzGo5g4ZPiR2hWBQNiitNPr7yrPsdsjS+F/t8g7akM4qZzn2LbPnpUnTqQan+08zJ3xp4SHkmZu4AsUMEPaJfK2fHgTPMVsGTng+ca3Eow4M19tH1YWnIA0ETQn+twcfyY6D2s3EwaLcpQ18p1a3ylrtZ3PrXJBWP8iAitdLx+s2lahRUT+4R4LqjADhq2nrnpqaBvj2LhU2xjHRsP+eIdv732hzZMRcsIIwY2hMHHj+TJXtKcvtg8RTNaZgCeA1j8CovWGSOZ0r/VKQvIbfW+nY8qzHcZRxeEN7hwPc4rRalw/GlePgxvBDQkq5CRZmvZoeU3tyEKXm/i+T3vdkMaR51u1J2Dv5nTqNNBNy+loTnWOlBq6CNXfw3wZf8QXMiuPnk22mdPfO+81+uKB72zv/te4GQ64zz3+B7ljBtwlz3/35YWzCMnBcXR+qneX1Ju6BXpXB/QYtgbU+3KzpGjgW5L8Mw0UUTDdO6E//XJD/4FhRu3x5a4RptS5I+Xn0Y2nBXrwckOrSQvaRvW9tHuHWq0gZbm+k4vspyJeqnOn0CpHY72wOzfSn0YrXJ4XBlNYfPwFvsvsUcbPNPQ1uN6yR0yioai9dpBUXk+p2ChNKcKfbmXyQCCJ1jVc1qNl8gxdw1+PX9SDDxx17VGjUN+vtisYAicu8zhMdT54odZH7/vVRx7I5DgOCbQ+cltY0Lf5vUQfFUoZxU5sR4GfonSd67qRHXicypizFG4XYScpSDt0nW4YuHcLhV1kgZ0U4AeY6PLNclE/KE3z81/uqY2t2uYre1d/ZY+ZetL9ff3b7ySXxBwIHtLfqm8vN8VmTCttWH/rDV5PRNlh2t0p96BfV6vWN50NZxyu+jKH7R9ljOmErY7T2Q5wwNPyl3Ipdb9t7B8ulXTMnr6Kglk1CGBA9DpiytZ/7H/INLpz/qBpfNDosKmNk94Hzp6EIYdOLWrQwVNLGe/w2YlsHltYI8YHUC3J9BCqG5ofRHXTcQ6jRk9aBxmmozyQ/rNxLOPpsYOJRnEWQy+h85QM9zPQurYrmtZh1btJgSkE/Pw2Xdib9fdZ25m9rcZa1KTAB76n35gCa4OWLnrlta7b8aRj+ErVHuCZgqPqY/3mPnd57A4ko6cZo5tCxEf13FMCtXy5wb/Le1WMnA0ZLXWiRU+RLREvsNl2EskW3Ciki53fDk4ZrropRNwvl31nTDelLVnjDV0uPqc6U1ql1VG2M3aHCC3aon/VzaLnTNXNX7TDmea9cIL1NZir1i83xU/6m5R3PbtHLalL82XUUwJ2tRvd/OUGpnaBKGxu+/eI5FHrljg4dgZ0j1rX4jZr1XfqddeoeS1spLvPSzlzc6RtbYl3pp9W1s3xjZKleyd/ehVgOqVUu/YQ1sjY6tm1pgTqXWE0A4ztKa8hYCdOhzT0m7omjKQH/ADu6559Kxu3FvAggbTu9oUOu8gfgwaKjbj8S79ZLHfiY9IrKtN+gqvWNy9/AE59XyIZFNXaV54Oib15QZkh9fP19fULYQokViNVPb/4oi2EyHV8DzyvX99eqtawKR7kRiOkKXUtwc2Ggn969q5uDlHFF9Wvf0XbJqik4bF+wnTTpqwauOspcNe+KXWx3GRpGdnUU3BLxMsf82wRqzdIOH2DhbTJ4gzrErcr+p40LNXzLUVbdB2hYyRrjTgjLNGe0qrWLzcFVp+o1Xz5PACwbAigXfIw0ku8Zd4HUG8xASfbCQBZvOXOBxYi9aoseqejys6jw9gCywWGnxDLd86ZRvF66F2O2JHiIoDf/7RrMKuvXdXNo2wAHHuxr7Nd3vO4f+aca4SO3S8fQNlwV0PTRaHHgAgwdod2vNwi7HGGIMcncszRQbl7VB30QDnfPSNmf0FSZX5bqGYViEdPdkpfRufRQ0ow3N3S8EBz3dVPimvQ7m0P2XpdME5U6PpypRblyxq9Ku5A1OXG31tD0L+gpo3czjcHgYk/k3MCa/0oslsH8hQhPvWvBbprEKIzobtlxtyE7p6JCq63iMHuxVrShO5O6O6xYwtb9+SeKfekyT2zh7//itwzE7o7obuHSTSn4KMJ3e13H57Q3aNW2allhpsWoSMTunsix+3UvE3o7oTunkyMPLVoJnSXUJIJ3e1uBUzobk1ORBEONxO6204HrmJ/cYxP6G4N+P5m0d3bebZAzAcid3M1T+m/9wWca2i91Fk3QIwpyOiIpHMQfJ1sg+b6TDvS3jjjBbIqt+cI4nbx1ZpcoBVq3l98K4xdR28fSB8q/ND1W04M7K2hskVQx99DaBV60V9qJaF76Dm5KIpUhsrHon+TEaUvlYkH1YNbEk9ZTdvFxwU8tzAcyiyICPkXU+5St0hUypYqM5+GhJFrMYMiULWE8fDJQtzQ0HEtxdSxoBtpGx9LskuIXIFJ6mbj4JFlD06Eip/JXdrtsPAjHmyLeFZGySDDE2Z6SQqNfYe7cK4mRNlBvNeUw4GPao7s5DalTCGkbpJudycyqSwXTD4pcysxpMpKrFw/mGazbEL6RmtqS7pwFKw2ZuQ0VabJFQZWdPDK9gPwT9ig8oXQSy5r+wvxT3h8R77crZ5emYNUsk8YpDjV7BN2yu3EQyUaVL1zQj9KosCh/6XKcWIv9VxJnKK62ki9+P8BA0sn0HWmZfjl2CfGIAvoyrHxqyafGGEiOq8vM9YEJ5zZ3mWyqTpI1bv0Snq/0GZn27wuydNnszPYuOrNLnZ9LiKO8t2uRNVfH0Q9Elw9PAJjjitcFceoD4ryoEc3O+QIo1AosoQ9pkLpo5YHCnvEMZjrRUBc9ahD6vBUlxsYhUalM1sBinEhNDBGNNcIOt55lLvNzuCbGGViOjDtjDARnUdluNnxmXOpmx1qQ1x61bFfaLNDIRLD+mNty85g49rVI3IjwVTqegHowJQjRZKADxZsB6A1SNwUfINuTGXPj252Bi8cRad/mc2u8yjNdNwOLpQiFVUMUEi0ZdBMNFAH4dx7PKk1DRRFJU80UJq79mDOKkz8+jPQQNH3gE1tIoEyC/xsQKHIBsAETiRQJX/zzeAo7d8bkDoGCVSbKKpBAtVwhBRJPY0HLYKn4vlE3VRWu9IhAoWP7tCN9mumbmLdLdke1E0G0qfknim553XK/ym5p1H/oJ6Mk1yCE3VTXSyi4dHrRm5TW2mvEh0cJV7qYefV3+dE3XSWBWhvvk2d6A21MXWk103HcaaPosBTcs+U3NM9rHdK7pmSe47UJ6Ls1om6qd/SmKibJuqmdnmyU1k4x6CBibppom5qlrY7tXYm6qZFPlE3ke/gk1I3tZHh30FyD+KjLpi6CYXTTlA34S8V+9RE3YR42hN5Sm0sAdo1gUMdK8LsoNiiiswIVWEmdHdCd00TLiYF7lvSaVLgvXrSE7q7VzJ6IuavlwT9oJ38EzF//5pZEzF/x9x0QmMnYv6zTsBTOEmdGT4R85+685ycuh2zPy5CzcC8fsD4RMxfVkgpcd9+szhRN03UTRsjZr2JummibgIfwhFi/gndvShi/nHQ3Ym6aRtpXO2U3VOesCVH0XFypbEYkI5L70+EdCrm/AKomzoHhzepm8r6hH25m4q0he5cNNDAFhdNi4oGrqM9JhrRlYmmvRFfGhMNF6CjcVxcEkommq91NZlvqJg6HiJrb6mruBK1jhNwBnafiliHsnXwETNSnR2LEUnvw6ije/HHgjPtlVK1LdsB7+tzC91LYyyY8nYVrm/LaSsYo/GOoibKQVkVWm15vM50cTT4sP62fHo7B4gyt2gFbxdlVSRrLp/V2toD77QBsK5epdMrOtIkYXG6M9ed2eElModwIdwg8HaL8x0m4uKWZuD4oeu/uTpWLLnHotxbkLQ2UKsUbFpM+LYThJzc4NlP4LzDjleRXtzWT4oFTGzdN5QFVAQQEl9ByY7kc98OEidhoRKMRyKyY9tlNgcXCNKevcC3WehIm/g76hx65MwrjDJKU1CGJIHjpKAMcX0vdJUMoxAN0xAPjrMjGbxwlBz6r5e66Jz1XhEz2VcodWQ9LMFKllvRs5Vv1iC4e2PJRWI9ZajptN1YMT7yNlcLVMvOHrPN8zWGPpRMpKJQvOk8+s659YXKgxlNXKbKM9vhfFJ5OCnq4+u8ynMeDlN5A/WtVR4saDKSjvTsNIhsJ7J9J5VBlEaO48aOok0A5cAUGQh0wGne0ZoQzeCFv0GV7zx6M5V3gpl7mac8D13mXDplzuc85e1AhODOHHDKG6jvjgNR+IInHEd6AvYvJlnIosh3bOmmQjHP90GXA7Unc+1Q5Q1eOIrKE1NOccJf36mNPs/LX5EotrEeFM72JB/1KO88REO99mfCvcCj3A8C1/H8cHeU/2W5/njUeieSO1Dq7K6WxFAeUMs+l8u6ca/rZdmXgwvmp9VvCs/AYVletF2H+xz8sK07A2YjRZne2/IaUYxSU8jgL2C2xqWwwWddTwJdDer5HTQjj/9hD/1Gzkdk1n02qsRXTkTvQpqt9kPYrUtBg/itSxnjMVxXAodyXJdyTBN0y2bmPNdlw3GSc+temHNdm6Xkli8qlbKN+Lx7/+V/08XSkvF6uXh+0CDP92Xl4PJwsr58963132u5Wqn191dvrCy3pLXStYWtlYw/okCwtbmXOM/m8+VTrkXk2cNqriA1VnlubZYWgn5xsf0iJ2F0wBWhX525HttjoMadyl43moEpHo3w5o/q2bgttXm5wb8BahQF5ueo17JcSwCRxsLGivBqDm6j5AMNTqffdokDaDbum7PblFH6OCrVWGwfIrWmqSqemHapXyRXs0MVdRC+N8ImqStVAT3TzgwpvNf6kuQdfUc68Mq0D8Mitpq9GJSJuy+oX4G9fSmbtVK7On1VhJXpFA2LzGr2qX/GbVPKLu0HX3q8XADFi7b9NoxdW4gqw56gX8arqNF0J0ij/8aT3Ti79fA0nGIqpWz2cgNOetJQygfrKYqW9b44+r1rNGnzm6vavdzs9jHjPNqmwJFirNp9XGxobDcvfwAv431xKmkQuceXQM1uXiJgyj9fX1+/EOpuljXb7Jn2nRoWw2u2f5Hr+D57VLc4sR/k5ucXKuNJ3UlwoZgv4R4wHWHdEEKKr8JUwoDs2ObQBkZOtWaZapNslgvaOHerdLHcZGnptjOeplbjkXJhm10eUuKuKWeNai1Yqhh4YY0lajVfPvfS9EZT2sW0m7kgJryCo8l7y+y3PPjA7JntzzgnwGO7gp/1+GeYN3O1f+NobJT3gTsAQ0t64TOZr42hUuI2bdTwphVuYvz+J/z/vog4mR1+FFyLi/0dYNfg5rBB/ri/9x//OLpxv3xQK1j7jWxHulRcY9aSzb1KlnF+jbvYDfW48PW5LHD75L8WYRT3Mr8tFO9qVsZl0KOdNpek+fTwKfuYtT5FHQW8X7YrbgxxXUfvIVuvl+tiUouXEfNs+TYcQdsyjqu4NMyAAjX+rvNdOyW/tssGdQ/eaC64CwveAEYUBKFoEFH/5RTC5gRC0PqoYjfu5GO24H0Rtl3rXoBS0ZlfGGFjru17ba88lKmJsJWjPAmx7WahCI4pZnjIjPRH2HZ9OQ+x7T5nhLFVU9EbZGsLGIKyVZIGwWyVkPFwtlriUKCtEmSKtFXtzKG2quU4WNuuH58abKvedNFo294gOsNtzXbGeFur8WDArSVtLMStJdQYcmu17ou5tYSMALq15PVD3VoiBsJuLVlDcLf2V2UIvLUaE7DQP1fyQNQGfWnhHQShdbmOHkjqh70diOkPvrVEDUPfWqL6w2/7PVrfyZ8KolxT/K0lqdmYLje7PEbjL24YAtceX28Ibm9wGrrri8EdrKchINxBxwjhGoTCtSSOBMMdrI3+ONz+gAcDcS2BfZC4loBxoLiWyH5Y3JGvsbksNK5mrIsD0bj2VI8Dx7Vkfgo8rvWCIYBcS9BARK69QHZo3klIDliaP3PcGWOnITn7Awtm+KcIOR4GyTU7+Bomd+Szr4ByR1qcReWan59guRq2+9SwXHvRXRosx1BMEBGaWC+XXN370wa8NdNWbFdw7hlWfqRbfyNtxe4enFoHtHpe6KWhK+PIjyUq+vHIEV4EYFAlPPYDwcPEDVD3lvbGgxh2gxeOEtD6X8hT+eb/u/3Lt9/87c8U5bNaLx+zRFm3t8hZub3VoUBF5E+ZzjJqbGvn0XaOba19NPYFxrZ6gccJep9UvGOaivBDz2OA2wfErBuo6y5NRQW2Y7OIhwFLfSSo+EhOCWLmRyrmEUfiihfGTEZHVdzghb8BFe88WgMVh5uWhzPuXKSKuy53HMpznE7xLplowvcZdwampXRX11rFUY7eiUPH9ZAHx+2Ah7FScapCVJt3WJL4rp3YieefSEvp/sJRVJzSUpDsvbE2iCgp4neLtBSc3pSj8vdtVBzi198vvl/Qmf/1PAPm8c2PsdLBY/qcJy9u/BY3QQqPsBT510dOZek6LSZ7gTtzcJO80L0A+SnutBe0ODTPZKUK3/M4edcHHPcGel3vBUGooPy+7dqxTyXdHcdGkloII99xfS6U73GwXbDje4HBC39Pe0HnaTHcC8TMvUzT32GM25QKNdkF3ewCzgPbHrQXGOh1vRdw4XihCHgsw1DZEltSHAslBNLSEylCFQjbjhP3+F5g8MJR9oJvFo8ZsoIosNJ6lOtMRgg5tXLwT8wTK1K47as0+xGHPZFSWF/QPnz7xag3/M4jNlFzZ2azMgrzwkA8L7BhOfIpK737kc8Ed8NKzcFIscc9gyevcc8YqGyt5rjGg40mlV4Ux64dJgEAPOYLFfpBirM+FDJKRcEocgjiSS4D34sUtyPuJizFNhHEnpdKl3P8ZMdIYbYTQTjPKGr+N+Q1Fyl6dRivNuiX0PKCfwymvYVNYJ2pcS36ziP9+c3LFVJEEHhs8G2MMjnv1cPyEYmM83kFar7BD0SNBXqe29uClOf2FgFuhHd+igz+ziM22wPhQBP+RUIgIuTMDSZTpyvKCQzNDxGxPuDaY7Cf1XtgCmgzTW3lsxQui5ClkjFmhypVcSScOIidwMdOdpx/y+CFo6j53ylfOUufLVVjGvEcsWzY8KwPoLnDDoD/LxfzZ6Q6I1vRWiyfRrV0Og/YRMuRtgKivUvk3/H8wHZDu2HpfJvnW5UfZer4NVNAflqHJZgwC+ZGRBjkKsGeqEOciHPDlFbypgyOckNClHTAdoZk1ecuUTVtcwwJHTICdZ1xy0YXbvZDU0xSOVvdaQqthGCA3dPST0qro2XPZzecak9fFjEkwgy0bTtEHkyRxncFjQVItck2c8q0InD2y/x5EX/7n1a+Xa2Wa8oI03SbDWaVj9tVJj8uKeuy4FZhoS9A1dIr8cMGmyJa/7F/5kejO+cnp/FBo9yPqh2izIl0oojxLH5GHsf5lVewve5JGJL9UYsalP5RSxkv/2MncmgCSC3JNAOkbmieAlI3HScHpNET8ySQYjsCW+D/6HTGDWlmvefOl/FHbL5VrmEVAn81KzILH6rQwOpBvQ3NiOH5aM6p94ExbA0lydbxnFP9GVHR6Ba9uZUb9KxOXW1/JFomz/jr34tdRN/xJO0s9wB5ltu8ur1YGa56y2QbA9nJFtY/3n3zzmK4s/7Pv1Y1Lp+enq4LAhid75mox5uVWoG1Va3e0idv/k37jf7xriCJsa/dXVudJdpobN88gTsmX6inG3zumrYLtCalRuQIfElIBIfRVfHJNLv7v1u1ftbEqCpfqThDo2c9qM09nubWPPuorL9++PAOgvBZcm1poOoHmS1pJD9cW99urKcKzXqENAt7A7Lt6DYMfIvwLvoRDy0iyrEWVXe0HJTrAhNrhgzs6+/X8Izh///4K0zFYqyzmxssFLDjRGiu6eGLQb/VU/5WPsls83ZDfBDoOCaMWHUWMD+lJs5ZpngrRrHrLHVEjxjDq3hiNTNP8yucgwe2a5ZptTYcHUN5aUgY2YeB3cgy1fbh1wXhw2QlIvFYPhNXAhkJOyux2GsmM5GsW51CW/sKJjNRlunwu6pemgg/L4vHk9VEeTDAAYsisjeTmahZpbTRZ2RxTmZig2Jizxr/7ZmJRByBZfJLG4ka620YiYVtOpmEvwuTsIgVx9Y9BBmqri03PPQZQtpcCjc4f70/dcT+iwabSoFvm/L0H87vpkfKLLXBKw2rVH08xErKEjGN4jG9UJKi5Et/jKTsxvkJLD9khI1Qm/64SKP1EExEixl00GkJ42EhhbihOIiWYoqB6EbmB5tuNg72UfbAHPc4g04Q8qAPlVfQCf2Z8pz5cC8XH4sIU7rF5ts7kDQRhH1t/dcCV/fNFlds3PfhVs2LKzcIutQiyeF62d3i6Y4PQQnIrO72LvjfWsly8QUiWQEBwKl9j1s1/slBXKVBgYKntr7yp6ARy+iSX1dVQRmWuaIe4G2ruYyLNgrhsYlazwltaL2wqIxUX/+/Ta3n5Va7ioChAArIKRQWGEq0zeYJNc638T3Yc4uCSpo0F93DPggKv7oj+oKfLehqoubLlQ6/qZCXhuHU2lupdHjxzlTGJR3vUwanNWAMWFi6EwRhOCR8YT2hvIx8pj9iXgEs1BN0bX1Jk6csjYYQzdrGWqklQRLFrN1L8oYvrGUR+FuiI8VIUmr5bD1JdFijEhVgXU/QXxRqS6XkP0NDcIuv9LsQkxFv85wCie+WmKU31j0gXfTgjfXtFw/FM2qgrZfqS8UxocUa4x3For1AvANVDPb9YefwDjjJhSuaBPbcxnrMFv+kS8u6PPPoFLwiJV7PG7Rue2XECxu90bzXgVl16DNzbE3ox+tF4Sf0Y0I/VlmJ9BTwDuCe/GZykmU/4WReLswAHlMD8TfsJBuMfqBe4BkjtKOLjOlaNRP68ft0iH0K9MPXxSbPX95N0I9C3rjoB2Qeoh8NK64o0FlZicNsuv44SKND56ez8UEjPKRq1x8T2ZMwBBepRQ3CRmop4+EjO5FDMZJakukxWDc0x0rqpuPgJY2ejI2ZFEfRecyk+EyJmZRBFOLat57uM8AHBGhs6G5fhoPhdp8sVU6wR3E7r4vu0OW6vIhTzAohCQS0IIsn10DFg8JN/p9b3PGLYAN9GQcsW9S1ARaAIA6a0AI0OCINfaHXwq1MsRe7/oA+O5tb/75akrtQB2b8e1UxyL72azgAcNDGymWGPAN0RKM1+nXAJQDjFLAMsozkE+oGxdv1mpAQEIts4812rf5ofavHSx2Yo+yNBjmAEpTRHVZRkEgHoiC1meAOCgPJKJQXv/1TM5M8rbONMg+ZYN5lhkwgCSVsFK4+E1Jb+nZ33NyVidoTO2g073XOVL7m/Qrrnym8lqzYKby2wLCnuAmULpzCa2e7OJEJORhqMk3htZQw0A6vHYwcIOVpIHJQowsTcvD7RA5+bhdw6bagLs215AYhKAm6lETmnu2Bfgr51FXJlge53jxQAFxPu7DZvpdhWHXpMzuVqCRCozSyHXpgmjpbGrke6cnSLc250NBMPdvDZqY/NNPs0XlspvlJI3BmNy29M3n2RQyBZ3ayBuEzOzHjATQNmUPNjZ0oU4hm19Ico9m1HQekafbFHKUxq6G8exfBEdjsLrOM8sEwaB97xbdVhNu1WxoXd9lrPri8y568sQq87Ik1LvGy175vkZc9MWVmbqU1fWor70ksRGEZ1+VauoRc7gkZWOplT9qQYi/7X5thuZe95sMKvhwR1rvkyxFZ/Yq+HBHUv+zLnrBhhV/2hPUv/XLYq/7FX/ZkDSv/st+xXfEY6F9ZDqZzCebjwnRzcnB3r8J8MMRBRWCOrK+yL3VZmSp6vMdGUzUdVAhmr4vlnnpMdJnl1OdYrJr2K8p8+J0MLgezJ7JPQZg9EeOUhNkT2q8ozNGvtFdZmD1JAwvD7E/6OKVh9qR+iuIwe6/Y5+Coqrz0UOGBBWL2l0uHEjEulYhBgvw5f2uHEjEOVZERoAwMyOUH06AcPH5ChncjHLvdxdeKxBz99CtlYo62OVsopt2C4lV6VHB2iT0biaBFKbVGbeSqAnLjUXV8V0wHv9cKzu3Fd2mAIALNQUncpYIzB7c581wAXxUg+JX6iMSHteyJBzaa9wO9yg790nCgI0D3CEYf+UBMISVm0KzjXA30JBrYmAkNBtYzPWxe+oOBjQ6dxwIbHzSCAusp6Y0E7kkYAgTWogbhgLWU8WDAncihKGAtyRQErBuaY4B103EgwEZPPjUCWL/qogHA/VF0xv9aDY3hv3brwehfW9xY4F9bqjH2127eF/prSxkB+WsL7Af8tWUMxP3awobAfntfmCHq1249DPQ7lNUb8zsU1Q/yO5TTH/FryxoG+LVl9cf7DvrUH+5rixqG9u11axDYd1RWD6xvf3yDoL7DdTUE6TvsGmJ8h1V8boscCefb76fmQcr7wXz7spD8vVY3L5HM1c/X19cv90omRCxUgHVdwJa2xD4gX1vCOBhfW2Y/iO/Yl9kL4WsLGgjw7c33OPheW+ingPfabxiC7rUlDQT39tZJN2xPzAQIMsVp/okxsb1WD1+D9o59+BVk71iTs8Beq8GE6322EtDA9RoL79JwPeHZAeqdYr2URaL+guiPb5BBQ4zSoElYakppzYvEhON7jTg/0K3gn6dtT1iv0bwXfFX255dG9Zhjc6RUnkb1qnGeRPUaE9Ga50Gz0h/Ta3TnPKbX+KARpldPSG9Mb0/CEEyvFjUI06uljIfp7UQOxfRqSaaYXt3QHNOrm46D6TV68qkxvfpVF43p7Y+iM6bXamiM6bVbD8b02uLGwvTaUo0xvXbzvpheW8oImF5bYD9Mry1jIKbXFjYE09v7wgwxvXbrYZjeoazemN6hqH6Y3qGc/pheW9YwTK8tqz+md9Cn/pheW9QwTG+vW4MwvaOyemB6++MbhOkdrqshmN5h1wZjem2RI2F6+/0cguntyxqO6bUl9sH02hLGwfTaMvthese+zF6YXlvQQExvb77HwfTaQj8Fptd+wxBMry1pIKa3t046YXqME6Zn6/rbx/lRxsT0Wj18DdM79uFXML1jTc5ieq0GE6b3GTG9cuE5F0jqgho4DkL1porQLa41Yolp1L9ZbfP724LsjIfcC4IAoXllNVSQBOyVhOZ49FpNaIPapnU9VBGI1EZRaJUEvu/4gRBuzJUKPGkLP0zwI3eSVBGpfml9gQ+hKnvsuaEQLExCmwfcSwRzPZkmiR8JZseeH8cJc1OWRATkbjf3BZKrHmRGrL5R9FO0fo7Vn+7oAdF74mMliPkV/cH6aqlozh5ADCXvqqqDy+ifikBhTaGEGdEIsS4Er4uFof7xfNxy0J0HuSsHrUSoHMe3XdvxROzZsVKCe7Hrcp+ncRImzPNFGFAs6yjz8h0Vg6bC2Kj8RUW6VlTh6w4UUrtldDUDQ5V6hVH5WA2D8lu/6TymxjQEKSqOR5HneH4aCU8JmUruplEi/CT1pJSpjxz/0abhvaLlYyHQd4kCQSvUJFvuKufSg3EnpOvoGhPiBSqOIzuUHjweKaoNhzHUDYpnB6mdBCBJ9yW31Vjrgqp0lhqxVkkCwjFSGl06nLjVrZTsyXEnpesId5OCjYdhndjcSROPceakYcqkzwNfgtAodBLhcWkntAONoizvQV8fg3cOBPeJAo38eiOzhWbUX1p/A2kEfqSidwnRy2Vr6yH7MVuMO0mdR7ybpNDz3Ai1qqVIbIFfbEc5kZtghgIv8N3EjWXkK48C3MedpDlmpCh5CCa7qvj0mPtK55HtJkM4KASbxGgZQ5+44IkbiMix05CxhKW2r3gYx5471mT8JfvRQiVuFILAwkiRSFAsHSqkULL+japDnYe3mxHfd6WbOF4Uy0ApO4hsHMA8iQM7UKFQIFN1fOHafKwZoY3lvd5RromFv6hgma4zXRRD7zHYXLYLXQd6XN3pPNLd5ASxp0LXUdx1XBYkCmWgozQUUnoud7wgVCoKBTabsSbn/W5dWBsqJIK9BZU07nPaYQqPY0UfqR+Pung6D3Y3PwZm4yh7ywfMiQXSSznPNqipgmyse1RWwaJZZfFHXT0FJkyxur5C9JlVWHzjTlPnMXcubF+n5F1iYXtc2IQveAAVKIMw3uF2cjwII+AMd5Eqs4pq+KBVn8odZdN+QQbUi88ceLF3YbM9KmVeXNhwc9u7r+HJa9c1g6tXfV1jwk3d0MbODtsx9WMpgoBJOw5E6Ae+l4RR6jvMp43+8LoG0yq2BQ5MFoU4MnFsxg4PpRNgA/QUE6mAhWGzEe3x+oD8/Bth58E27Irud+HfzEbYeRGabYQ8nNm6usGFRaNhI+TCthsb4de6OvHFbYUuF4L7IYLlGnFhdV1hKsyFL+dseaUjYIBu1kaxik3ukV2za+Jwxq+35QkCvzQePKBWl1rfFs53oD7F7/hDm6rqb8unt3OEDMyp7NjDdpHhhkaVt4qSZFRaTZc6d/5AxhJtxJBZvocMUN2pbTTP4grnOM6xqQ9pbs9cDedf3toEUXbYyID+s6KycNPaJJj15NrUViWt9sbaLJfj8KUkZu6FbnMsRKlQzMol23ufa5NrW34u7oyIty0sP/vA8kNpoAPLr971KIsGu2ACeEtK20kZd3A/Dx3XD7mUPu6pXBGLd+rFfhAJfD+16ecEEpabGwVAD5UrhRT4pAt81ZEqlACGJGNOYMdoc2j6ASjxmXRDIPV+wp04DoE2KhcogYxjWwIsYSr1ODUexbT5+3ZFeMnuSlepaHmba995UznPe2DTpPI3FT7deYQNODbxfdjCmO0kDBNfBC6sYSUSFdooJKxcnwEkcAI610aZlK+2DygtuaQqlrqspD40x58J1XVYDQy2+3ocZSa+U+s7YK9blAuh8hcECPyL8FGDc/mgoZHCUNEb9/eL7xevraZRsIHWauqsoIZmMc4Ldqmmh6MR1fq8mM/fF1/dZH+csD92NT6pRGyCyStIMq9mAucHrf7bcvX3KYCu16sOL7oRlD1DrmTwPXMeMteoBlgtqJCTZGm6l955rr6v7gJaXlM7dAOQZnzfp71uSOPoVWtdD6IMwdPT0ZxqiiMl/3FBmI8/wgOEyjxXs/Lg2WQbWIizq077jL5vICNpDwHTiTMFNjYA0eqfNFOiaecTZsoPGSXL6CrdvRNlGq2HJMloMYMSZLSE8ZJjCnFDE2O0FNOkGN3IPCFGNxsnGabsgXkiTFk8DBp45n7OwhnTIPrxcLviDl8dpI0aGFeCce8ts9/y9kd0lGpyKzfQ8HMfAWMcxSvf5vdSm+hB4ntxGqQijKQnELKBSBAGWCUJmIKzDfenkIWEFFVpChW/3EMVplk9KG3ULknzrYih1hZfWbp44Vo9ZupJ95eCmEeQS2IOBA+RW/WtGUI7rL/15q4nouxwlSHQr6t1fkFnkxkHq77BYfuHG5HCmKovbNbEGeh5BTsUy8ngDfsHTPUGOht1AYaQgsWw7B4lCBD3R64f5mXRZBJVsr8W9ZNvytZ/7H/QNLpz/rBpfNDowKlVoPehsydhyMFTixp0+NRSxjuAdiKHHkK1JNODqG5ofhjVTcc5kBo9OXEo/SeSdhZl2WwoUo4Ki7cyecgWpREIAJlGUaWaH0WWNMT45qpJmFi/mH4o/758WrQtxOpDkwI/QRX3N6xjddrqaS22gEmB7zebVQ5qX3l2tn7nCmxWPOWY6g5wSlF+c32h7OVYw87Ste7JQd+N86QPJQzOlT4UOVa+9KFk45zpQxF986YPJY2QO30otF/+9KGcgTnUhwKH5FEfSqPsZZyL0XwZ9bzHDMunPuwRyeudU31cXL+86uOy+udWH8obll99KK9/jvWhLLKUC3J3LI9mpvR5++FE1ALujIvNOou2SBah9VZWOIngieknryFgJ65zwZQjw21Y8XrAOnW6X9/KxnWRE6oPMEggqcS+0GGX+WPwQAH8ln/pN/KRcrAPv55hJVSOyRuei30otU8+9qGUcXKyD+X2y8s+lFN8yb1ysw+FDczPPhRYg38tvtBWknW/tf0p8rQPuz8kV/tQ2sB87UOByWs52+It8z4IPmNgxNPFUY6AyEIjxECZ/ZmruRp1QNl5hJj20MKqx0/tGisHvXwtb/tUg1dyt081O5u/fdAIw93VW1nAd1dFsgQU02JcUeXq3TMSOxckVea3hWpWMXj0ZKf0ZQIiPXzKPmb1XYkerJCtiELbpTuuuAbFyy0oAGeINX/IKKOvmP6ix1SFu3xZo86LbqdbNP6uf6+KwuhfEiSNbuebg5jEn4EBEd3tUXS3juApYnvqXwt01yA2Z0J3y/COCd3tAuk0S/GR4QqIp8YxXzEPaaXlmvNRh9P8zsGhCd0l/PBIJAcW1eSeKXVkcs/sbRq/IvfMhO6+uSKMtt/tYUJ3+6IpE7pLaGi/VTehu0ecdqcgSg08r5UCcIBbBdXaGCm65aUUV8Oz/b7MCd1F0H6/qWta8RO6u6iglBNe7VMKMqG7hJIg8Gsd32ePqt9SnNDdfLNcqJ4H2oTuPmpEQUemTujumiJAcW7/aXevnh0griUcPKG7RZbfL4ju3oJdhhBi8BeqeUr/vS/gXMODqM66AWJMMbBHJB0BWnaKU6bMUNa6PtOOtN8L9HrlSETGC2TVcdG9B1aesMLfxVjrwMhWuHn/3lbzdkb6UOGHjuVyYuBNHyq7nBaSBKGVcd5fqnn4ObkoinSGyseif5MRpS8VrpH6wS2Jp6ym7eLjAjGpMBzKTIjomfo85S+dN6AKJGzKXzoXefp7y1/a7bCajndvW8SzMkoGGZ4w04kfjwjwKGeD3I4g6yh/xQdBrE1ZSZqCE5mDpNndGUwuOI3YDoMQW9FEO9FwzVUW9VmGaNcJAywjI8KxKgeppJ0wSEKqaSfCFOTUThQwn9leAmZNz+Oho/wgCHmqfBCRiRDsz5QHVy/+miC6MynDSAwLryXwjkG12OIJ6DzAXzXrRD9q6NZEdF5ahoQJrAzRuTiuJhGG3CXqkIkwYflQpCvvYhFObHg1YQLF70yECTpl8wBOqAkXPgNhAn0PWMITXcJ8+fRqqPjBhQEtMHl1Ctt03Riaqfp7u26MRpdQUyo06BIacEER+tp40KZCcF3GUi9IeALCAyd0QcgPow9s874bcRWxSCBd1nM0u3ieZ3cLtUMjJiqEGuy6DCoEA3u/JxWCwRumYNkpWFb7WE4A9HvRalOwbANUWKuJCqHrysHJNXGZoERQFxO1vpBMwbJTsOxRfu6JCuHw0j5RIfRnKpuoEDpGA1AEqWZSaKV+9w/f1RGpCP5tiSMyg/PnxKlgBRI3USGctklOzdtEhTBRIZy8Ap1aNFOw7BQsezx049SKmagQWvVxbiYqhBafbo0h4xgvz3/8dDyntA51nKgQCt/6Z6VCQAWSE1QI+MtEhTC7AtxzlqGvfa+fwCGwgkzgEHEo6KMT66G4kZ6/Bf16wSEaBUUAE5v0RHTbiLGovfUdff01zDNxmUxcJq/ll+/tB0PcMxMVwkSFcEgGfepeNxHdnj+oT83bRHR7dFM/NV0TuttvmU3o7oTuTuiuGVQ5Ed0ar5gJ3Z3Q3TNEtxO6u4NGG5SyFecRnLa/FqLbcdDdiQphG+n8mlPm7ESFAKjzRPYRVZytyxGOS4XQOTi8SYVQ8kY3uBAKr8sxLgSY28SW0CJDKDIffiXJ3diJuWYlv8CUR9/jjZTHr9cKDBRTeegT2Y4F9UyzbORtmStaIA+0RmW+UesDpnIiE8jjdabrjeBo+tvy6e0cPom5RWt4u8hiSX+xjkZnarL7dfUqnWFhwj3AgpntX+TyZIHww1YJ8/z+4hZn4GAMYOpfyAfidmkFmr7m1jxy1J1jHGBeyB1YGwXjAH5IsnyTLWJUyM1+wsvxpFjCFNN3Q8lAxXnhkMNUV0m9YsJN3dBOwBIQh6kfSxEETNox0sf8wPeSMEp9h/mUOVYTDsRSKSScyZgHoevZXHIeJb6Ugevavh1J37e9wGX0RR4SDhi8UG5RuEBX8lYPMqMkkyj6KVo/x+pPd/SArAO8o5zor+gP1ldLRTP2oPIcFQvQ5Msksd4rIvq4Jk2ynrLNvZWuM7VIcksuEutBWekWc0bEH2OwD1TH7k3nkZok3Hsz2y6LL1/c6cOYI7jAN3bJ1CKfU72hTRyUNYUyOwfqjSevqbeBqu74RDzl2jZP0tSN/Egl3PaUE3kJdgjbT7jDYi8MpEccMYfq7TIvZq6v0siVTMVCcS/BrhC5TpSqNPFjhg9gi0Hj0dT7YQnmnvwaARDQ8kKnl4/Kypek2pTeOqpadx7hjlCExQK5uWkgeJDGIWee7UtXKe4roQJhhzGSdJnN47Em5b0CHwAMOxgbibJWar2R2QIlWq3N0vqbPiLuir1vc6+ytfWQ/ZiNvfd1HXFjknB8xIntBpESjitiGTqpH3Ap7CgNozhBUe3IsR06u0ZZOfUkzTEjFpgyMWGJFc+RID3yimFdR7abDAO9HWUy/pL9aGUPq+V6g4WRwqItls5a5binxZvtWo2qQ52HZ3I0uiCimfGLtHz9wA4C156OxqoUDlnNZyzfIPRFKHD+GXFtQWbD8g27H3M7y5d2JNAp8EjYgS9TO+B27PLEgzmsUkdGcaTZto4ejbZkDjZ8l8sQZyNs7ch3mUwd1/WYK1wibLCDWKRjbXDf0SG4nCfWcruxlqm1kvFH2MSjKnLnMe22NoN5H2Vre6/ogmApqlCWX6+e6RBUP8ZKX83pwagT0nl0JjubA4t/ZutieZdm9PvY2lzNPTcZ/bV76+zO5rLQr1kEe93p4+671M7o993AsW3pytCPPNuLY65syVPu88ROU9sWuOS7KWGHh0a/wQtHUWm60y+jf6oYtYK1vY9rEJjJ8MujXGfLbW6VVwL0toJEKjres2HlR/CX+jrfeZAmmm3PbG/Gw8vUbN8JwxZ/3oTWndVsx3d9dxBaF3bX0lqzg9BLocBCCo/ZLv6XKBYD4rNxI+ewXRjMkigO5VHNNnjh5Wp250EaajYvC9xe3pntC09DvtOZ3e3MdjzPHYbDG2hprdmJ7XsKoFeC+0OSKlfJ2IUc6Sd25EV+EogkkqEgrO3wzDZ44eVqdudBGmo2u1AHsOeEwnZoQUya3UmzbaygsLbG7QMIHk9eg+ANtLTWbBEo3/MdzlicRJ6UISAGDjbvNBUyFp6d2MoWgaQgiUPNxp/DgGNHUknIY+lBkIxj2PSCu5ET2zj+hRDJuDhDttiou3Xh1NbGt7UBdphbeL60ADOXFrmVZEirgaU+7q2785B3MITLU88LWRCkSiQxd4Ty4bVIJWY+YlymrpPEDgAbTPEo+9/XS2Roxps9lPX4tGGyHsfFaTqPdjdBBut2lAmiS93ft9EDwroAzJRuiu/INTEqQNN5WJ2PBPsDh6VnzwS7xGuc53gs4Jde8OHzeWUDGF3CHwY9G2zv9ZHgRa7gzHdwBOAwkDZz4IaFn0zAlWgL+Bad1I4czz96JNjwrEUihvvVYzKEYxa3QS7gb4skVyp2kwBhHrGkiI1RdLnEXMmzBl9jCboSpo+g1nwzNubaeXS7zc3gGxhlQmhz+xqRX6BY+wsK88Hh2IKtKu8jfI/LONOn6LhO687jNdn1UPmBzeyLjISELcRtHYUwGcKdDGHhM/CKDwKvDHawetdLEUqnROz5PlOelziShTBo4T72JXeFjII4cfxY27JHDOHuW+YoSv6+QqGLeLPvyAy+/n7x/eLfrS/n8xKxhn99rRD3+dOzFT1bCZps5xt86t+tH25TtYnvf7Cy3IrlfI5ohKd7tcBOYcnNZp1FW0R0ZPnii42ly8XpRv93CWMRAn/4Z47Sx0kWb37Qz98rsiETCEWQ+Ab/QnYRAhx+uLb+LDfSyu+XW3gGI2Vt1nKRg4zmAR8upn33Nt3X7xeWlavNqEZY56VgsB0xT3vJLtMI4xzXOzq7p+2o23aEggasDo3r5SUz2Frq7ciO/JT2HJTYojt54gfKjcMowM0Rfn3ELUlf4LZ+3Etm8MJRtqNG5Cto18t7Vdvu2N23isAnvVu1L2LXGUrgrZcPlDo96h7QeTpM9gB75oYz9yJjgNwwROTIFB7bDpinxAf5PF9SsPqLzo68JVIkzlDRzmeAuwfFABno8w6bQ/Crk0qOCHeKaHWRtJA6iN5M4QJwBKKBYKcIFR8Pj0XD0OOIH/IZAuZ9NwYW5yRO4NmuRCyoL1KZKMkoEmyUPaC8iH357tvbd19++OvfKfil+mVUbe48sN0dzGDyR5mLbx9Wc0U3sCofAJZPgo0Nm2MROjzqhHQencn2xpHZg0vXJeJMLiz5UFcgKk2cP1O11Sn3rCbi3AsdqNwKBWCOacODg+yz/pli9geG63swExd6Vro+96dExpPJ0UcX0yO7ZteE8TWWEsjj8eDXlcVYrE1/ZrsXutHBzUqBxpd8lzvKOtni58AATWPpqB7tuQgpjvCkEHW5i6juQ2/rsXzGYumiM2VCoxNILpgbBQCKFIwqmFUsdCMZOVKFUopEIg0tsCnXZhciFSGWm7kx4qFSLgIP2ZBSJsD5pStYjOTGmKkojI8nNEbwqaLUMqCplJGHVTgMSZE2CxDHGcCVKGweyVRSfNUoZsxfYbXMFUIei5Mht/7VZuLfrMdMWpIsG+1g3UU4t42aMg+/zxdXB0J2HvDO1MNcBAoVCrmN8oT4KUwjFQWeQAA9ixKHh5JC1FQ01hx9tX1YkamrNzx95UWpd+Q5wbWILKiisvt8eWfBGEQm6B7UPsYcdR7wbo4MFu4o6+g7tb5DctgWyORa/e+WfBL/Ijykxi4frPoOVq0ywgUGLbwRClh3niATixpGkHe5FrXn+C0fxnz+vvgqLy5r/nOdN3UB66LOKjacxfYhAsPCTCDMh7ThttQGwht67ZOax+tGkHVKUIUnPBfBPAAruxWwqy8EZRlpkpNkabrHXQgyx/uSIOewlhV1AT24pnZorutQ92k/YgFrGkZzqsHysqH8/Pp7OFHCuseus1+TlIoSl1+GE2jYWD7CCbLeZ4PUD3MqiICZJSHxElFNi41mIdre6LZ/fPwPAmcQ6lSIoG+4I1t92Y3zq6D8UE1Xvd/HVVb2r+gUOpnf6KLLvQmuG61xOL7cLBGthamB5J9poPDXv1JMqqBN1mL0p1/0fwDTUfuyBGHnYYxXslB3aHC90amgtdxghYuq0EhxN3NnjNPdbLtKKGv69thnvJmjM9YaBa33xVQf0SxQ56Xoj7SLXisH9jkCf2ykYPoiQZlrPwH5gHLCJFA8Ct2UIVgn/WxFr72acItu2I+ZetL97V9VrzpI6vrUmM224PNqdSSbiW5g+lzJP2HR62IiaNzFBrDBDvHKHnKqq3RKUOubzgZ1dRF8udLVgRoMPLPKmNXTmOIvjQf5vcQDg7fsHzLVIQzh2kGBsOEgCHoeNWXrAYdNozvnD5zGB40Ondro6H3w7EkYcvjUogYdQLWU8Q6hnchtVPOBkVogJ0gusp+KUK/zalycr7UktCQBWmOMGuoWaDmVRiFrEAE0ZPVjm1jeZUTR1lCESYHvjFZWsQVMCnx/nCN7UmBSMA2rkTt/neHqhKtfibN1O552brJB1IpT4ev6HKEftDk4Fb7ub6JPha+nwtfyqc1br2tyr5XaFeWubjHnD9VX70AvuKQQplJRnr9i/p2SVzcvbmcwRH96tZraKVlTaZSpNIpxoYup8PVU+Pq4pXxqm5lKo0ylUY6XRinw6ApI/h0UvqbMU7ipykM7Xm4XwOfh0qqqRv+6S6OgoycKXzeGoMdTZkYdMOtTVYlI5rg/HkF462CgAt+tfy3R3e5hPhO6W1q1E7p7tMTiHqQz2M04obsFOEQGwFT4+khsB+5qk3um3JMm98ze9lOgUC/ASAGtxs+EEewVrz6PPOyJmwpfn5+uU7eUwrKZCl9Pha/3EEHQemR3C40IkqqRflalq/uttKnw9VGr7JRiToWv+y2zCd2d0N0J3TWDKqfC18YrZkJ3J3R3QnepYGMwobtvrkYufO1RxikFGRGudF8Aw1Wg1hGg5TD9Bs21x/JI+47BD1Wlat2VKiZ8BHG7wHDI3Qs57y/+MOz8QPpQ4dUcvNwUKUE/VxMDe2uobLGLw0e/q9CL/lLNQ9Cbha/3ylxfzYpK1sfqXmtSN1wNy2yI6Jn6XEWnTjlMx82wArqacphenR3TsHE9peYh47rZOHhk2YNYZSAyvK0j7UpWhf+isthEFbBRtzJ5aEaY7nZY+BEPtkU8q6kiwTWAxPBM861SzgbVWCVamuJ30F0WqeJgQKMyosjlJNVebaN5Flf1hGJdQv188tKlFQABV4+LWr2Y3prCYsosPgic3WOyqDOLlyu1UAkmb8osPpizKqf5+jNkFtP3gG9hyium9OXSfjDyITZOVQSI1h5I5HmchxOnM3lNeYdns0GmvOJjOcNd8oqrzzTyihs2dXEQNx60coaL55U7pjbG6XDfLBe44RUfKO2D86v8lLOhdX2i+8yBDTKC3DK5dsoGfpVPqKCFOhIrNmUDn0FB6s3+1cyAvdiBKV7sqGdyihdrQG0jhptM8WI5oi2OwJiTAmsgVztWpnixPY0bB5+p53eKF+tn0U3xYqfuSKds6/vl8iMZ1NqhUkKC/ea+dMlUmlC5IbqxY5zq3pQN3NEhRgFZ0XwZ7VJkKXMHXMQ9U1pJHrV+uSlTZEkcXH0DxFHrWpxO5+230HTXpmzgc4jIKXUi+j1d9mip0acyyQJkjT2/ioaAnbgVgf39vtpm1gcEDkINysatBTxIIK27faGVY6TfcKvWO6dttRGXf+kndsoGnrKBj3svT+0LU7zYFC92Ll5MzBxd1YKMgILBFz8dvybX0T7EqkqHAuoL1U3+hGclx+7swJNVNgAKvVB7G9+u2e4OWrHtVO953D9zzjVCx+6XD2ol72qIuqDIv/B4MfD5n8gGxl+qhOYpG/g1X1K1NmmZIM7t9dWIDxo5AuvFP6G7E7rbjQ56FHBoQncndPcwmXDflwI2DPjyQFxOjJ4/v+igNU093qRcPX832Rc5kbXuLJcJ3T2/dk7dUiZ0d0J3l/nhDeBmygY+ERl1SpM0hDqhu30g1InrcUJ32yVjTinZhO4+rCTYkF+IXOzn6+vrF4ocIzuyyOjtZwRM2cAn42BPLcQJ3Z3Q3Qnd/Q1kA4+D7k7ZwGWVv1P7ZZ1cPGUDU7Bf5Wgpq1z9urKBy5IXdfavYTrwLjtBx/8TTeovmd6ITAxxoRW6be42Cqd+rXM4v3nErMLeQyLfUlcGojJAZcHIvTTsAeUfB5SepITDVyHxT1Cj+6BYWqN+fMFERlayzDeoG1n+Pit/xx8SVRe+giPrb8unt3PEyc0tunBsF1msS2BZc/ms1hYwM+vd+y//23L+QHWrV9ucalGWSagE0uqcy875t5Uj+OLybwPfZV6AGbjkEvIozRi6PtJg5QMVem150DE009q6ZwvHc48z4k7JyG6C4ZGgAmm2iDe39ZNqEaf5DV3sCmDOQT/KuvEiDKSTSh64LFVp4rJApA6zeeq53BGxyz0vESoO0aKuGx+nzPYdxpPIc+3YC7xUxbFScZq6bqBUErgqij1BucBlKNbV7B8vVwUbtcELR6n3/WdKbEfBb0TFYwbmK4TRXa+er63/youH5TNrJeOPCDSwsgUUWibt+vH9ynjXpeM7j7lzCW/xQdgz254xHfFxgYoufIcqlF6yoru273rcCY+rerGueih82ZB2fPk8X0qcxi/6SEBpYSg54wHHKe5ghzFR+5oTvtR7JoTHEz+NPdcTkkXSTbBv2TIMPT9NuBP5TmLHDulwrfdJFDlMCWnLIGU8YsoOHMGY7Tkuao76XLIk4vhij+q9wQtH0fsvkwS6DI4LObciIFxvKwWnw5Z2g2Kex9DzQlKt7Z1HaqjtbOaEl6ntNufi0o/1X0zbWeh5YqC2G2hure2uSjxH+JGIYQOEqWRxpCRLfYHeBKHjBonrMea7R7Xd4IUXru2dR2qi7WLmipnwLlPbReg5/u5sv9Rb5mc83ffKtgy4Z36J++V8rmJ9uVym1mK5eKvyHDd8Ogf1HbMygvEN9b9owv7EGuUzpmvdX5z96cPKCX4DSMjnXqMFEtdYnxqdzQAcPWMZdMRCPusaxTWJ2Re5Rr3AZu6lW02fEQxhInQDQMMmtyK6BzTAEANgY2cmMYdFUeSFPnPCKOFCOo6Ny1SYRoEbx760w8j3XXnUTDJ44Whm0t+3EeJGwf53vc5ykFpZG7AAjXENqi9AnUdlYhLh9gPg3blQVRaBS4byJcMdn1mVmT1IlYGQdFXLHb4B0FPKMOKBR23twI2SIAkTXwUOF3EaRynzAZYcVWWDF16QKncelaEqOzOXXaYqe8K2J1VuezXOYJVMMO4Zohd7p3LSXS1rVU4DJxFhFDhp5MKTAWASmg0HhYyD2Et56oQCTg5+3EVh8MLRVJkO4Vw7AhMkdDwuwa37xrrPkkQt3li5BBXwG0suEmu70n8b9bzuPF4zJRfezL3M66HnMtdu8QDn9xfnJv+c5zV3uR0MOq8NFLZxXqfKjplK7QRgpbSjCH5IjgNbevBMuIl0El96QXz0vDZ44ShK/l49LB9LP2S2QMzAQs7JEYnOVV7bilvb1EVcG96dx2SmyLyqM31xOI9nC1dMitz9tGZeaA8LKEjSzkpZK7JyQi5jlUapFwkWu2HAFA9smztu6IhAgJjdTxQ7foc2eOEoikyOxd1p/V5fpZfr67ttNk9wnc5H1ejOgzPTaAbk9jK9C54I7KDhXXgHx/h0NJ+OFWDMRcTAoKPZQDtrjZYsTEIlcZVOeJgEqYKCp3aYOhxHcyjDNI49FdkUAnIYImTwwsvT6M6DM9BoHs5c/5KiA/7n/weZaWsBgWIDAA==", "string": ""}, "status": {"code": 200, "message": "OK"}, "url": "https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=2"}, "recorded_at": "2016-05-03T23:46:50"}, {"request": {"headers": {"User-Agent": "github3.py/1.0.0a4", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "Accept": "application/vnd.github.v3.full+json", "Accept-Charset": "utf-8", "Authorization": "token <AUTH_TOKEN>", "Connection": "keep-alive"}, "body": {"encoding": "utf-8", "string": ""}, "uri": "https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=3", "method": "GET"}, "response": {"headers": {"X-Poll-Interval": "60", "Server": "GitHub.com", "Content-Type": "application/json; charset=utf-8", "Access-Control-Expose-Headers": "ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "X-Accepted-OAuth-Scopes": "", "Cache-Control": "private, max-age=60, s-maxage=60", "X-RateLimit-Remaining": "4985", "Date": "Tue, 03 May 2016 23:46:51 GMT", "Last-Modified": "Sat, 12 Mar 2016 19:55:03 GMT", "X-OAuth-Scopes": "repo", "Transfer-Encoding": "chunked", "X-RateLimit-Limit": "5000", "X-RateLimit-Reset": "1462322134", "Link": "<https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=1>; rel=\"first\", <https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=2>; rel=\"prev\"", "X-Frame-Options": "deny", "Access-Control-Allow-Origin": "*", "X-Served-By": "e183f7c661b1bbc2c987b3c4dc7b04e0", "Status": "200 OK", "ETag": "W/\"971a2d5c640517963c8dcd33839b3ce1\"", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "X-GitHub-Request-Id": "CEA95BCA:FD17:F5C19:5729386A", "Vary": "Accept, Authorization, Cookie, X-GitHub-OTP", "Content-Security-Policy": "default-src 'none'", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Content-Encoding": "gzip", "X-XSS-Protection": "1; mode=block", "X-Content-Type-Options": "nosniff"}, "body": {"encoding": "utf-8", "base64_string": "H4sIAAAAAAAAA+y9aXPkxpU1/Fdg+oOscDcL+1LxemRZYz+jCHuskNvjeF7JwcaSIOGuhVOoIkUx9N+fczMTSwKoIra2mzLabjVZhbxIJHI999xzv3u+ypKr9ZXlOY5r2oGvX725Oj7dM3z2zSm/+/0D2x3xURgf94ertbja9g1df3O12d9mO1wXRXuGS24P4UN4DA833CA+OB02+OHueLzP16tVeJ9d32bHu1N0He+3q1PODvlKFpUFGwW4tVwWouvj/e6I6ojyK16LL65+enN1YPf7om6+7QWO9+ZqF27pGe4P4ePbhD2s6IeX6kR2cn5lVQTm78OnzT7EQz1f3aNJ+PMZuuHrumUHzpurPPsRt7LfXCVZfsx28fGm/OTAUlQC/81XdyxMhHEb9aDf8E2oB0nAwsjwEyNI/JQFRphaQWobruUHYZDGscsiy0CJiKX7Az0SPggM24jdxA18x4pSO9BZEkSOybyI2XoYGo4TxxHKoKG32TG/Wn/3fJXfhSisx6FhR4nO3NBl9D/L8V3X0JkRMy/0EtdIYj0yAnrlp+OdeOdsG2b0KqPox+jwFLPf3tIH9BpwmWzo39EX2u/QE9BiW5bn4S1V9ssk0Y4sP2rHvcZ2+enAtP8JNyf2+8Nhf9CyXDuEWc4SLdvhinst3CUaveXD/gHdIws32oa36W1+jVsV7Xu1Ph5O7IUO1vEyV7I9Vr1b4ac3ZcN5epAGno23zliSWngdpodXZNmmG+tx6rumoxsBvdtZGu4dNdq3LEkyDDxq6i16fs4bqPw0P0XbLM+z/S7X8PPHaaq+z101Vah7ccT0IDTChEVGbMcJ8z3TS9E8rm95aWyHse4nczUV9bH8dH+/Pxw1jBHtL/Ehuz9+ie47a6fp/Vj1lug9vGfpNMVoy3k7lP3k9pRtktkHUe+566e/0yRK/TMuRm58YOGRJTfhEXOEqRvuW916a5jvjGDtOGvd+v/RN/aH22JSN9zA9X2/WnKKORqXDVt2YLSa4akDiiVr2Mojq4O1h151bf3EgkRTtVw/v+IP+epWUMcwTcMLgu41NKZFaPjUy4upa6lYGx/0a/2aZk38eiMb7hje4oNtmB/Z4SY6hLv4Dt1E/I4vEpbzEY55Dx//cf/4dsMe2AYLx3Z72mVxSN9om/CJHfg4+ObbL/+m2b+myYDWb9iU96E9Ba9U775prB39dfZNIwhsq+qb/8k2bOmbYlt4tm+i80RZkrCd2j1lj5zem/S1473a3qS/9pPCP2uea5wZDNPC2UmcEKzWmcHgEyGmtdqZoZz45KHBj7ww1Z04NZzUMH03if0wTDzXDB1Tj40IO3gcEWIHvbY8NOi6Z0WOH+uOjfvbnhlGPrZjjm3YrumFbqLDiI07dx0asCAw7HANxizsf6PEjBnTXQMHhiRyYmyBHT2MnITWhdm2Mbdsxw5ZrNm6pf2hGIca+yFmfOJXd3ZpuMlHnAdo4Jdngt4PWW3v0PSGi0OY5UQ4Q6ENkyS00D7MjMzYC33ftOLACuhFzNIuvztt7+kgxc9GWOL40jl/S/R+rFpL9O+Ss7TEn9jhlmn3p81GO7D/PdFZ6Zemo6WH/VYrz+/l9P397vsdbY0H9alxR0ylS/UeqAN3yFg37Ne6bviBW+1CvsEL/Fa8v2WbTP22YysC5E1sdOPNHkgJGm932kbYuK5NYE80BG7kEKDT0kXMrQMS4f2VjOQrk+YpOsy4puvoHNm6O243N6rJGoSnAGWlIWEnydJ0RMlrKodq3IfH+G5MeV6QniPPT6xhoIFAnmsNXlQ2R72p8yNOc1iZy/ew2ccf8ELWcvU5ZsdNAXn1mGn44QNvrIGi0guQ+Oqos6lARR9+Q3v9YUdjBZG9/OYlbJvuN5v9Ixajyw1dGV6VJVA78TNgiYGlUeJ5tT/SOY4s/0QPCpCwfyX41c8r+gdYLpXHqz0cAEdcHD21x5DXoxqP2KX89LyivsQNnaLybNq/QkopWAFIEe6yH/k5tr8VgjZQmHfr3k/Cr0YpHKABL/YuJi5/BlaePYTxEz36gcUse0AjDjTVKAdL8nT+VxogtFM+spsw2ZKrgY80HNijffKEkYhvLwBJdrA2A1omT/fJWbCpWErFoO7Eo4pLtrTn6IaslEtuBMp8IwD3hNmBGTt+mJiMRb5npX5suYGdhH5iBywIrNAA9M9xY0C5tzuGOWSHCRkASLbBxma/Kz+Q8PXlt3RuWism+WLDy1/ZQ8YeeX1ffvu97JKZluEp9S3q9rwSUzH1tGn1Lad33hCywjS3Y3y/MADONUFRetV751w4gDD9hxEj30qxmK4boAO+qn8iOtWA+zSXmRpyyv1YBbI6arEpgNDxy02tOpeXnNqFg5ad2mlALlboQYOWnoaFKctPaWrSElRamW8ZqkzWFzC01OClqLQ0dDkqCw5fksqi8yxLtZooSxqaQy5Nf64t0Pi0a4mipyjcF51IkwTSU9rAdzmr5fd8h1HfJxa1w32XAdxr71i+z3LYj94/lqaWAdzicvzMBjDf2tKBTx7v+i1PnD0ih+4EPxWW/A/lnS+eSs7sSfgid/gwckPDi8LEB/Y00gKVfF7hv/KEFeOoGEb7Qwg2w0iTioln7N4qi7QrPLJwO9IyLwoTd/v92BbjRWlK5rhDnyPQmRe3kvvTYimrtr1TjIqyqF9xxhjZUmXxZ07gonYX3rCxW+ei9PNK/MTfZHg7snpUkqq02UcjLWBWW/HizytstQW2cLwZXyOyR6UVc+TbGXcoInNUujR3PLCxTc+rRsVLYzOdgp5ly23C3e0JNLCRz1oWxxulne5t+OOLUNO5QVWVhzFOMMui05SpqLJAtRObZoCyIx+1ZqAyx/1c47pJHVDiD8xpgONsycJKB55kkPpd0+i0I30XSCAmYvnNuCeXM3GXdclEHd2knMear55/Dbz6TmIa9+FhJGKNGlLh1XMUAh67vr5+Jk8xmeWY1chairIwEh7iOyB74xrxuSiNPcU2PHKsNKWqJTjZEK12ZO3K4jAlXtS4+omydXiJA2XjjPGidVslhDfSYFW+bnW3P2ap5DmNNKyYeP4iB22ZvQmBOqIjHbM4Q78E0EHviQNUI+8iyqLqIGOTrQNYR+iiI60VpZ9XArVP2P1m/zQBuqwZoFmyzUk03+ruO9NY6+ba9M9AyeZbw36nB2vdWzsmXcM5ZpdxYkyBsoPhJ3Dja2ep8hRVHCSI/w6rOYjn4iCA339bFVifKwBEe9ccs33u89Bccy4VQsXu9lt2z8neArUWzBYPZM3aoh3vTwgjWIP48kgOOlooq4+KhR5Q5zdP4JwT3eouzG/E0Cwoo/RJNeglA5w+fMw+ZNUpDR9QbcB5l+c2cQyq7rbNiHou2lLUeH/PdvJmtVqJMxBVufa98gj8l4Sl4WlzbNEUQQ29oumYQzdNjLdk9AiuT/mr5Oj35+os6K6M1RjtWFzAoSGOxrK1FnR3QXe7li1M3Yt7Rs5Ji3tGUCbKSeMTcs8s6O6C7raDUs/BRwu6O+48vKC7nbHP57oZTloLunshZvxcuy3o7oLunhUaONdpFnSXUJIF3e2/C1jQ3VIWg7P9F3RXDVwvWMBYxhd0twR8f7bo7s0m24HzAXw3ZxuEbD5f3Qk494zizbmFqIy+AWJMJKMOS5cgeB69Q0E3KM7XtI7ygyNfYKtwe85grmJaw26DdD7evEJo5zzulvWpxtuuX9kwXFRn6gsvmfgwWlAvxlstLPQnoZOLQgQ1FD4W/lsYURiTDEEoPrgh8xTddNp92MFzi42DjIeIEImxRDEtUUyrJYqpin7C8Hg5iqmaYeFHbE2L+Iwu4DONiW06KUFRPBhiNkyX/I4Q8BC/I1AzvgPXCk7nlEKGrtYODe3+qiavOJ7Yhwxb1djfLPHEqnzdpXhicmsv8cRcTLDFoSg2Vdf/hHhieg/owks08RJNvEQTv+po4jLiuBZNXNtHC05Y7QMlUlh8vsQAS63WmjBF+xS2xACvr5YY4N7SEw2OF9iHJU2lp/zEwhJbWGJcIaM6aywxwKWKdzE60Dj9oiTL4fdixEyDQbTQPDsJBY1WmozELDRPoTOzsMQWllh///DCEltYYo30EfwUQ7SuJQZ4jIoBKAYyhBg7hiUGuHPtP+dWXmKApRiDhA/GzU0LS2xhiXUnMDo37haW2MISuxADbPvIcEJhwgtL7BOPAYYU/5kYYHxThDEvMcDgBJ0hvKmoxAIO9RcZLvGXJYTw0w0hpA0AUd9IhXFReKyxKBZ0tyKULAP40x3AC7q7oLsLulvluuVg5aLweGY3e+68v6C7oxXmFnR3UXhUs4KfG2RdFLBF4XFReKxnlD/XdxaFx12+KDwSc+vjKzwu6O5rUXicB91dYoBPEY+guTz5Ijx5iQFO2sx22SwU4/cJxABLwdSxQcAigOFTCWosYzGgVFmgtGWyqyLnYo24OixXIslfliAfVpZJ2bOuEK0pYOQry3Mc1/DsoBbU+BXXJf490Y/pTjFk64sHkokiGykkhz2KwOdkRsdRzyFSToro+xcgccU1jKcZQwqiG92HT6QdTu0gNBfqOdLwyY2Mvi1TtQuJ3ZZKL8XRlgkb4cn64/7x7QYyhRuN+vFpJ0W2tU34xA4abqJ98+2Xf9PsX1PWZS70fChuxaV4h4TdlnuFV9dD7cByebZY2crfoCFeXf/0bS9wPGTThTZ5Lf8eH9UjemajV9aTvnuBbfsBNhxCGhs/JEg9CsX14035iejFlNpjRdr9YnKhpJAy57uLjO6GbcRugnzjjhWlmCJYEkSOybyI2XoYGo4TxxFKlDnfwyDwdDO1/Ej3Hd03XIdFlme7qRtHZhCZzHLDCAVRpow0/w5qJnch2mPADWfJ6P2Ocnh/yyjKfX+gLQXXddfCXVJ9CkrbFqc5inrX8POGt+JtrmZAH5e3u0wD3/u5h+Ts9te6vTa8V5mz2/Yc3VRydi+D/eJgt1xPx7wyYbAPGLjlYNdjzzFcK0gtx01Cx/WjGE5pL3UDFse6k1oGZu3UZJ2DfcANZxnsXyaJdqQBf9xrbJefDkz7n3BzYr8naX4ty7VDmCHXuZbtcMU9nwR40p79AwKGsnCjfZSh37sVhgx9b42dqMETQ7y2dV63Pdcz/R7rvGcGWEuhWFLsRB/2pzzfZ5S/ecxutFZ81I5U1ueL5p70n7fmu7rn6jZ0WsQ0AAGXxpqPT1prfpkHolj0o4D5rp1EcYI0E7aLNdvWLfyHGbEfupEem5Frp0F90fewF8BHQZhEYeIEZpDGfhDpju54thVbjm1FLElNOl+0F31dty1ko0495rqhiVvatulbYRSHWAV8lrhuEqcB7xLteYAl4Wa/3/z2KUQgDK3guEpurgrRKC44lec8W8jVN5C8wRqvvU+yw3stPey32jb8gAVde2SRdmD/e8IMQbBd0XKFStXQQ0O5tvd+PDqPyW1Q/1fQbpGiG/+Wn7Gud/Cv3m+eaph11UK/P0LBR/tP3oLUa6tm+hNJcGnEKi7aRPulE7iiwYo7iLPoLYNI1/Hw/e77Xb/GpQlWpFzRzDkb2u3bar1nUuMd8usYxtp5lZsomkndwEMTlyemzeZb0cM7D07LhEpHe2Ad2OtjqhB6GjRaTltk2QPfLsDUSmPiRs4TdPngeUEoCMIWLBOLD4da3cRM1zeMmY86IRUkjCRZmjbiTM5JN5bFrqkQKsCVhgYXnqZPVCQbFbVXGrfQ+yub/ow+0aiJhmMlNdXA2npP76Hs/ZMW//FJ6WvVuRzPXrtwUNaTctoeHc/esDAlKX1palJO69LKfIzHyuTUpPSlpaFpi8qC3L+BUdonCa9AVMui82Q9qdXkTFL6v9Kgog3nkd2EyTbDxMkRfewmon0Clc6rP2Q/sJx2D87197RLwF+x+pva7R7fYPPB91+Ec76/udkymmzzm5v3/ATGP0F6NmRspE+w0aAcqij2eMd2WnTKNgkVPt4xbFRy5EQTdrC7+9U++sfn76+1d3csZ/WSSFqpQU1UizZh/IHOdDjz8Y2gAIK+wpkPO8Q/R/9g8fGznGogtzhUA1EZ3D2L77R7dqAEk6hMbe9IwFCHz9d6a2A74a6R2M/k24nTfQKEvZmyr9xyuHRNTdLpyix0n9VLpCBqiER3Fy9RZJ+u9MC30lBP/SCJg9jwjERPPC9hup4kZuJ4hhXhIM+F+j6aOBRfjPIVloFSk7alSDk4frJhVKqnqDqXY4128YGKz4bblEGexePLmhaCusPtlVK8vQ8cBc5b5QosxrvI7yi7PfqAODRyx1X1YZE3sPfpbVmABT6xWhbgzqDyxhI2WVDm330BrscKWVbgOroHAKjmesLITuk4Iz8q20t6o6C4veyYCVBcBmx3NPoyYGsH7UZjHNjwHfOw2KDmcFVJBUQZeIMttBbGh/3uacv3xd/LXMdyq6t9+c3X2t8O4T02st9fvSEfSKjd82zI2j02x4BMsbMOjxpydO8fc24iz7b3G+yn4xiYKrlSDtx9io0yjNHel8KXB4Cmzafg0c99dj9KQYxQFMPdP7CnEaWp1PMK/70RSb5jqAKH0f4Qwi08wpxS/Bm728oasaePLNyOsMqLoThXfBreQmOFotR2lrvW4ngp0Bt6qD5nVMHXUw2KcrVzxohHmxg+pFZoSvpA1RLJL+HBos0+GvFQFOLDiz6vsNOmJibxBrI5/N1PChdSH4pMEWOkrNXxwNjYKlHR0tCQo49ap/Lo8yxbqozxGd5U48ODWnU63IY/CtlX7kcm4GLkdFKVRjeQ+2KOkvRAnxvVEqgWFa5M3RPiPdoWLw1j0rM33I4sqPTz0caofzYNDjmkq83VdeQfHAKkmpxJ3KlZTw5a5avnXwOcv6PZAlW/B9Q16nVQwdVzFALHu76+fibP8LCwH7V2HKOi/hYe4rvsYUSdnouShLeFx5+eaQ6iKiU4pBApc8RQKIvCjHgpw9tqQpiP2kQTBZwa7U0JWY77HU2u1Qq92x+zVHJLRzSXUnym0B612lNS+6mWDshTg86Lxxf7uQQu8P0TJ/cNf8e1wjSsODG6gm2dt7r91nDe6chn40qVpSa0y68x7XfAfi19bfsE7XIebwf6664Nf23qL4g1KY9Lvn2azPPCk4fff1udTNZdFwNZ3jXHYVVk1VUkf2iuEecKoDJ3+y27F5SLIrSZUIVrtF4CsD7Zx/k1/CIrqnf2I5gZoI15lqK3GJfZ/ArdI2DYJ/DR18T5kRqNROngBxa6Z5jfiAEpnRDio2qcrwVNk657zD5kylVUWSQxkuEI4hRS3W6bEUlM9B0RckAJVOTdatUSxxCqYO17XuGPrd00gILTxGFrAQnkCC0DFkY5QmXpL8Y7QmvVuewIrV04yBFalBsP6zQsTHGElqYmOUJLK/M5QiuTUx2hpaWhjtCy4HBHaFl0HkdorSZnYJ2ByvwdpEDMYHU0trwl/YDvWmhscQG+W4bt7eV1XYBzZZOK6WIZtmdURxqt9W8+bIehss1h+0mhsnJ30wM1aD5Gb1hWKTgYllVLT4ZlVXNzwbKq1cGwrFp8LCyrWpkBllUNjoNlVRsTYVnV2BRYtvHCBsKyamnCl8bDsm1b82j0c7n/cbBsR5VGw7KqrWmwrGprPCzbqtN4WFY1VQd16RhbIauX9yIdqgVFGuORsGzjEWv7ddRrAJLafD6eCHgsLNvuV1Ng2XbVCMypw1uDYVnV5EywbLOeU2DZpi2O506CZVWLY2BZ1cI8sKxqcxws2/Uy672D46vDx+VEWLbR3vPAsqrRjwHLqneYAsuqlibCso1+UmG6NMs1YFn9re6/NYJ3CIEGMqtfYNzqBMvatgzyOQfLijggTsrFUis7En6ClGcNBVVq+BIs23XxC7BsV5GLsKxSgCDSwbCszaNeCFYS3swC7AycmiC98uFL0GwFwk5GZo0AkHEvbNaupJd4IQWepS8rfJY/GUIfQ7DJW9odlON+Bv2liqZMcXcEc1Jkyp3g2Ran8nMdqwr+QVm+bnUUDl/UOKzH4cBQ4eqcamt2zaUuSjfqq3K6x9VaMNDbTl7ZHCSR1H4tPVq2zWyHxWI7PM7kcHo3dVURHVDQscaKLIEgvr+l0I6rwi8kMcclZgrj9nImitKTNh1zLE1NchWUVuZzFVQmp7oKSktDMcey4HBXQVl0HldBrSZnXAUXYqaqSRguQ3WGw1SEz+QZCl46sCmIeEm6NFdrn1asDZO/geUdI7L7Fv7llPacV2uTJoN7Uq+Ji8mguWNy3+oiRunVhzxTVLkMef6aVsf8910yZku0My1E7WjnckMxFCWu9hOk0sXHIckqPQ0+daG0OHMMLlmrArI4RmxDCAGktiQfqjcBRD1pSGgAEYzV3mYI4fWstRJk7efnFpHfVW3kImxYUNxCCDlmhCqYGuHkx+y4IckLxEJqYjoAvbsKoNynGlXsGmGSCHTMr/9yigSVOyeBhnsER5JsT5MrAHG6D1g4dhlFf0nHo2lAdsTF5DSOMCCLT2AMNCp1uTUbFw9iDtTLjmcPdFiZ4opUzE3aGiiW5tseqGanbhEUa0O3CUrh4VsFpfg824VGjYZvGcQ8d7X+7u/iiEFD/px+Qq9YXh6JCj5WG1ThWwQ9IMCEpOU4GtLkujW2EX3DmGWw+Lt9Ej5pXyM++5jFiMvm0ScU3E2R3se3mJjSEyQMsenRviMJn199/v7vvyrOy5xUJsJXOLfMXG0yuI0OT6uiUH5Nc8MvUfBzLWcMkdsIYBFzI7/LsOmxjGf/xXeQ82SIR8/v9ketCNTWA9jTbE03NNPU7rdVRfF6TolU5KEJljwJaHWuaYxXxMDjNeSsuDIsxO1ZnruCphp0kAz2lrmGjmBy5rwNmJe8dRiElRwjRNy2d32/u/28iLL/an/aUAsiqgdLcZ5FmyctooCeMlQeS0CIlr29faIod7TKzQ0aVOMRPrRWhryh0eiICupcKhAldMB1WrxBtb/AewMRD8xT3OgufKAoe3xzYoiWp7e4J6UlpuGmVCfeeCRCO2xbyvsTXi7tXRTWy6esrqtbrqW7ipzhosTT4QZrKByWe1MC67gWwaLEo6r/XH90JR5qetqP7+MPeAMFPbbYWi46PEJucNAmsoEIiUicEh26fOZpBJkumNLFQ2qjtYZuFn8mmNKiw3PTUsfpo8NTu6amwyPY/zXVndoHi8bOI28CbAkvT2Mda7/08rR9MZVnarjNEisREkP8ZFM5YobbG+6EWTR2ZK4WdS7GW7iM0hRzLy5cFlcpGzQfIrM4bPhZu7E/GCPZQTNZcRZdNHaW3TB281Xw5DJgGyNsHsh0kod1WDRHeSvCv7AWfVLRHL2TjjSfoncwh1JwcDCHWnpyMIdqbq5gDtXq4GAOtfjYYA7Vity1FoNl0dhR0v01XtjAYA619LRgjrat0cEcbVPjgjnadrg8z6Kx087ZqzbVtGAO1VYtEoROmzWHZ58TZ6etRWPnLM1OYAlqq80UzNHuIoQgLBo73WqQXS9iXDBH18scFcyhGpoYzKEa284TzKEa/RjBHOodmqyoIiRj+Mw0MZhDrdaisYOtCC0Wi8YO9+gsGjshcjeJAJAmQ+S0WjR2DkIPE6m6Lk9cAn8oCZiT2HGllflgncrkVFZcaWmok7MsOJwNVxadB9ap1eQMC27R2KEMDEjp/qL7pGjLoe6T8h2UTpfSAiiqg8badG5CWZll2NZCARtT2s9j2A5DZcuOQT98aqjsorFTKqdPkD5XXvFqMCyrFh8Ly6pWZoBlVYOLxg58sF1UkGmwrNrIZGs0LNs2NQ6WbdsZD8uqtkpGyijpc9XWorHTmZaoq4fWQV0sQDIq8/L+qMuOLLho7MgQiBeOc91NuGjsEGRyLgy9q83GwbJdy+EoWFY1NBGWVY3NBMuqRj8GLNucetVg1SGwrGppIiyrGrsMy86tsVMj3GLVlpMpflo0du4o14dUI+fyNJ+mxg50daQAeofGDr5cNHYEOXqkqkwXm7klhDPStqpYw4nSLdPjhGv+bTR25MG/JrIjCFrlBze0WRfhuSykqCoZRRAhTbJweHwK8h+1WfjVxVmavhUEAVq2Lv/xlRh0iwgIh455AOG5QEupCYMGXFRAct5SZZPVpUXKYbqogCwqIEjuuH8cEcS5qIAcMk5ro5AsrDTQlPmRCw68gAEI/8eiApJsSZGPbzqwU+tSAemO3f5naoA4a9taWzZpNrRCKBfFj39PxY8ynpOOExfDt7uQqwazc2UEjqnbtmOgi12OKDy3mv+Sm5RLOiSDK3v8i+GYbnlMIxk0rlFS2GzLaRUsOHnloqd5AchsxBJN5xeUFMRJ/ILSyny0oMrkVFpQaWkoLagsOJxfUBadhxZUq8kZWtBfIaGEAQS1QXYTqsviWZHL2sp0VsGqdo1crP78IXx6A5mjJEt2nx21LY7x+O2RCy1BjZNrK73PT0in/p6kkY507WOYa/845UeS1HsCiUZ7zI532tP+dNDYDyFPph6xOETv5hfDmQE7e5Kn0rJUE8kNNRPX3kOmT3wnA1WR3R2pD8lkethvcfM9bDyEUFzKr7Wvj9qGEoVrmwyiS6jYk0ZaTaj1Dpnp8xyyWG+0fM+/+WyzEVU8hBlMfFmIBP6ekh6WKldf02Vc1ynUvvlWo90FaUPxZwu1lD1q2JGcjiz/xRBZp1ojvz64wTM8zx4AN9i+oVPySqmmHEV70li8PUjhRFosrvDBxVVRTIOy6CjFRV6LLwS+V0Uvd2Skq/CCF+rUsVIvEMOUncMiNFo/4BX7qEVoVIph9ORiLhDDAjGUMhcfRWh0OsQAZ2bnJq2vzKiztry1yfO7LBDDIirKRUU/AsRgeAZt9GaDGKS9WSEGstmGGOROkfaWcvs5YdM4Pqu3rMblBpQXDVJ8ojLjtbhrpadocHMzk2AEbmE+CEGYmwofcCtDoQNeaDhswIvNAxnIGswKF5SrzAW4oLxGwgX/oR3Y8XSALjRBAr96B2lknLnZJv38+ubmloENcCTJZfrnc62QbX4HseTPclLA1iCAewo3UFPmms3Z7R1pYdN5H/hAsscZn//6QXuEqvIOl+2YwAcIimA/xEy4VzRwciQ4cHyECDMDUJDkBD7EpNQMeAJghCaUbSKWXA87vZfP/PpO744fmNaA0/uSMUTNGCJS7tHhrMhBdhG46AAJlpQhq1WPdGKVR0FOkKN3IsVJekkZouYxubwtaWARg7YnHWfwhSzwopdHabV6sMVCFshXn+hJnrKTNbKwDkoYgpO8vraD5SS/pAcp04N8hJO87hvEEL084w8gC0h7s57kyWb7JL+QBfDelkQJx7tOSe3V5GVyIQsw8POTdraA2sp06fRfrF7y9P+X04Ec7ziv/0L7zz3LC1++8OTTGb7w4+fIyi0O9o93cKoDGTjyUz536NOZvcpL9UbDYR5ppkRqJzj1JcLw30i8pK20Lgc+8jJ9BnbAgZMPBJ/ABF6gfYcD2eY6O1Z5qeQHq8+RFOqoUhSQRKpEI26zBzL1XuUJrLXPOMzxmSYyCoIqQChGVXftsxro8dn7gTBD0bivD2Yw3cCu5X76mg7Ll2ISFpJAsY9coAVKorrEISzZSBsgUQcHfrRgUwdAMcUTopib5BFRMYBjeEB62ufV/nHHDj89cwcDicApu57LlJuOVpu8Z1IqOdRT0jjIY6zjgYQLZPCTzOM5+TShBZyFJiILhr029QVZWJCFj4csIOTdsyj1/EzIQmlvRmRB2GwjC+Qwla6BhSPQ0trj7uTp4QbczKQVUTj1Z1sJhbmFI4AjfynFJgPHx4UUYJUxOH59ASUor5Eowf9l+Ruc9JEgWYQRyAzN+X5zomDIa+1v/KxPLnrB5N9r96f8ThQ53h32p9s77bcFdANWd/+Mys66XBZf4anaMAwfTb1E+iOcREmQomDXCw3/8k76MilhoeHXDwRyh1Ckpn9zNYpIWBT/YjyXsFGpyxuuxsWL877KfLicsM+q5nVgBT/nE7Y//YRtrPUl0H+1nLA/5gnbtDzypFye8Hv77g3QP7m9WU/YZLN9wl5892jnxXe/+O7vc8h5drozPlagv1GsTJdO5cU18lT+W8JGtP/OYgZi/BaaRMcD1ybS9qn2SIf1wo8Ov3iyJ294kqUpQ4A9Uep5OL64wrrW/pLtYIZLA9Qc4O8lEZ8sfsuSJDt+tedCxn+O/oFA/1yDKx3E/W3OS375zddajFCAN9o2fAJEAMZ/DO0BIvoTeQDQwA4EAw4j3B/4RoUXS9kxviMZAR5AwP4XygAkIVDSCTRUn+H8yA5Uty3bRiQ0evOeSAb8Ax4qgA++0P6GsP8N6H3Jk9AAwNc5O3JFQyoQMWqEaBPGHzYZvwm0BbIjbkCSA8c94g5OqAoeNtTQmFSvgYEG5Wt8dViF4ZuuMSTQYGEALAyA85DOokQI4ahCQmPBJzAF77L8LhwfA6nAANP9HIq5Sf6OBZ9Y8AkuRb32JuIT9trR1/qiErDgE3cfEZ/wTcMnn9Rc+ERhb058gtts4xMLA6A7qXWltrUwAM4d3DmPYCj3jRf6makE1FaZs1hD7RqJNbx///4eOn77HSkAxBvo82qkFPArQaP/fE0fa/iTsFS7ucl2SI1y8yuuIlB+RV/TJ9fVyf432rvDiRWiAlX5SnaACrzhWIBqCLkYd8dfffYHOqPTof75p8+uCYkIj0Kp4POiQmS0v8BBURX6FyKBRw01pMfk5sQ9k+zwK/rqc+03vwGgIn/hF6CRCgP4Mb/jhXirmdr16ri9v75/4hIKZbVreEbrcwlrtD6X+Efr89r1RauqFaqqY6nV6bic6yjGdyz+UNN5JICGh09oogdQPqww2wn0pwr/ALqTkZrDXkA7IkqEbUmH8b18s+8H4Sq17vjKcBU78E3bcBYOyH0mE4Vfk2e3g9CwcEAWDggCugAQA34l79i1WFry67+cIpDqgT3nNINAoDYmGijteUjpY8FYgIar0fZLlMWY/AxLlIUasqvgWjIIdBAH9aNke4DXelKUhbnWwSj1lyiLhQPyETEW23KtGfUbjMLenBgLt9leRxcOyMIBqW/VBbZU0h4mr5KLfsMZ/YbaynQWl6ldoyR72FJ6AxmUUXA8QOeI2PGRQauhzM8Q7hLNEpEcUG8ENwMgCvggOVdySECJAAtEBHRwLYj3kHWACGSSxUBz3uNicCbChDbo7znw8bj//D0oHuginLuRhyB5lNqPd+E9ki98UQAiXxO/A0wO/L9SkuBcEOKIoO5p9gMIGdjhh0AXKGcFVfYBm1swWt7i0HgPbkuEPBSPPLvFZ+BuhFxz8sC4ECVAnjAH94UkKY/7N1wcgiQl6PecRC7u9o88QAUZahGqEt+Fu1tR7YghXwQBFniuqmrQhYjCPItJepKe9j3PVkHPFB5y7Veop3XtfC6jYsAhkU+xR40PWZKwHce96K8EYf7jP/5DwiZf8q+ur68Jm5KIGe6gAGbFt/wS/JEQ1nfGG818o1l/lxboHxiW/2jhb74UUBXdjF5RyHEppVQNE0KOkD0xXz7s0DacMUQNJEJ87tF+eOFZQo2EbwTNRvwcAnMDfwc9CCARvfkdIW8bLT2J7hNusuNT0U3e8xeZoD/RrZI9vxFKA0SinlMqjpgEGB2hhc4tbsIfn5AbJEzY4Vr7ZnNCxzmKLB7IOQK+EQ8zanQkqTiy48Z5DhBYRM/CQ7Jr7f/i0bZw1KE7saR4N/9BvCMyQ8ok6JqPh4yPBaQKkc8inpgjYbkUKWWQJSX6EDGmOFdIoGGihahHQPFENCLQropzhXbIKQcft/KZkDLl4qefUa+nt3V/2N/yF0Z/eQemMQPt1PeQKSF8DWOQGh1DQGBu8R7x5jG6JbdMqVbwFaKx0DRCfUVmQkGDPGm3+31SjJKIoRKoveitAqGD6Q8ZXhUeiQ/iQRAdn5lE9PKrg+gcH8wnCortG6ZVBZecQR/GJEtpkESmha0seVMe8T7FCXQSriZMPK92WNtIyaHIqjrJ6JKaVRU25fnnlrwpw6jnCyFqyZvyiedNcSaCdZBTM9fmErC1EKI+JiEKmU/9GdOmGIW9OcE6brMN1i3E4oVYXJdGubwr7QgXngznKbuQofSqhovp5yAt1umcMox3em0tOwvv1a6R8N5/bbdbgoJqOB1Qo3cSSOOQzdckxaodTrudOMaTHCsdMIC65AWW8P9lW0rpmhzvfvP9lWN6318BRTji5xz1BUAItOCombrBpdANHXwrzdNMXzMs7X6Li/NDjIvvJPtwoOvIDyw3WLmOwXQ/st8yN3XfGgZz3gaeFb81HCtyXTO0/CC6vt/dfn/F0Sz6q2aH5cFuwCh4ihlqE4KJ9qcjB6pqwq/f44zGUZE3qHhGWWp48lfCAQlXPAB/ehL4JcBLHnsGDGqHxLBJeAy10w6Ik7Z9QhPu0uz2JCLqAEEd9lEYIWyuhjlRfhuRKNe89rSHnBBBnoh2iL6NeOcOOSRfG3DiB07gW94A4GRJTrMkp+kIre2ggy3xYwu3adWxYVm4TQu36Zg/Q246ewjjJ4JkG9ykwXvgT4bbhANe5/axV5ZZ/Z1prB1vrS8Ksgtc8hHhEt3T7cDAsJspfqy0NyNcImy24ZKF27RwmxZuU3GM/2fp24iVyV07F1Rn1Wsk+PEOB3w+K5TsERwUDvvkBN5FkRiWuBRbVnJtfvFdtg1vWZUtpgdWIQ+lK8NyfccznFXIYi9xTIIq4gKqsB33bRRFrmfpoWHpMUEVJVWjAiyANgiQQYjlUqIckGwYu4/3CP3iiAVXzkGGHBBOONMCkjVgdmQU+wDQ4T4jiRxgHLfEFFGYMAiDAzsj2h+hUCPldwTJBSbSE5g4pL+T7VLk3P2v/SORYDgXRcuz7T14U5zEpPGgtiRLiItTlmZQBQKLhNeZGECAgx4FFwu/IZMPB4kII3oDoEQo8ciqVxURlWU/gBaToCzp63AshP5+JTMBAaYRhJQdguI4WlVBKFRTtnvIDvsdsXSoHp+BoXLK5XUAuCSP7QskKuLVJNyLPyDxzuQziOvDgpEEdts224GahG4CTZ+YXIYi1RAHhUia+JFuABxJA9UpzMs698Zw1I3Xq8NwdM/2/UX3Z4lJYxscqxaaSwZeIueGAlIu5uUlJo20e8q9I8mvXUyiveA2S+afRkziz1mX2JqG2xj+2vbWprvEpPXYrBc0QdqtOzpUbFduwJI4NRh264YuHYvMS946LDV1xwgTM/GU3XptNwqPc46ggqcidgKhEKfNUZC6o9PtLRyFFP5A1Hw4C/k+mjanQmWThyp0Rypr9yGpbApiOPabCD6A9CWpbvL9L6IV4g2ONdx/iz3wNgSZXdCuxQaU6LuSJEoOo4uT7QXnScEzXRmBHQSeY82n+1PZmw+3kTbbuM2i+7Po/iBOghLP4XwFJcEf+QjstQtZdH84peNdfZXpJqCo10gM5ndMxHfJ834ZlkOHZlBMeJSPgmFg24ypD+gCAhgysOeBPJwQdMPjhJ7oO6TojVmeAyOgcml2oHN4tkWEzjuaDoERRE+ATPLsFnmFisAXHhrzRo2awYwsolzobjI+6gg5IUobHIoAIgFPlGf6EhZBLJEII9pDm5jDHgIdwc3B+wByIlMbF3AN8W+QJBgT9+4tbwJpWFQZase0gDzuD4BmMhRHHN0teuiQEBq18V8biuB5geta5BVYQmi6tMM71uhF7+byGbKjyRZOyMIJWTghS1bhpt7RzxlbMCdiC97adqTnDadr7LpuwuPVegd30Zurws22T7CB+ZoYskglIfdDtDeLTtnm+BbH3DIW+jse9//5+8q/luzjHKqGpHF4jV3LylxtsugQHp5WRaH8mpgCv0TBz+WmC1upWvj9MJS13Mv94rsu7rIeEHfZ1nRDM01wlwc5AhdsoZnz5MIirGALnmXZzmycEBvEcGFvTmyB22xjC0sIzRJCs4TQ8NwMn8g62smBJASjtpadRTBq18jl7S97rG1cv4OSCoFBUR7+RariEoSg1EWExso1jpCILSYgEVXDEFVjG1qpRQnSCenwag/hISOQQ+C2UlsFCMceoEd+E0JNB3hv/U8U7nbscEMBOa0/8rs8+1EtQxcSjyLebzbhfc5uErZhSF90U0zC+F7+eJMj+oXd3IHMcAPmQ87vIVu0db/SLrRzyNzpGLeuqemUXPruhhb5+gVU3zuoBZ17Vvld57PK747ZEehR4w+3S08XJuLhGt9nAHC6W1fT+HeddxTtmyXNu1W/f2BPaO4NeCTtPxv4i9ufik+ovuRVP/c9SbAYfve396dok8V424K4Qqm9yj9kt/199Rrkd0gJlqZQMVL+/O8pPIQA5UBBav0hu4gK5D0frUV4jnIN/+54g1xeH264OEbtW/ndESyg83aPN/R9q7/UynbZFQqw7erwcSGjGCkvWOu+Odw4AOSqwXKTA6cTl3V2MGmB7MLv0rJXfEA+7pssv+FjttFviu9IH5szu2q9htuVZbd7DI+w/q0GwpOwuwUfrNkdi++q562qV9rNDzfYvG/ZDdvRxFTYeMw+ZLWP6PJitsKciBKY4BD5J7K8cVKdOr/VChRTzYcQqk+NBro01RQTe9c0Q+YhcnSD0ZClGSW73wI+rozzIU8p624QiLeHFHDtz6VhS3bxltJDBl5ax8vEd7d76IF1/cF3eENd34jpgo+BshFqDSSyytWEqmhlkUB5saTUjj0EipfrCF+iKFhR5rUrDzxf70B4w9IRIr9epWNGdEcaxxxLx3LD0WxZlUqwn9BzSNkfrkGZu0HvEaPpV5/R54dwi4Xi9rNK4v6COBWvnRSj6sTka63A5fIhnvaHcJMzZBKkuE0h8MWzAd6zPdEST/cIaP0DOhwYneRfffOvUuR6f16SS0iYXfZFvIE3N0PyQCEvh0BTDCpyGdyFB/5TStJa8ClX75OU3yhRAoNbOLwFxbLyRJC3GdvyKONCatiIcFU7vOI7NCIxOLnCPr57QxkMvxZqY+QGwZsE93K/h5DXrXBaZMf+Ol7qHuu1OSHgPrZAzx3ghFhSGC4pDJcUhou8/uZqSAq7JQS1J/VCzYxYB1nGEDkWpZEEq/vVOqXtFPhhH0Ve35jmboAGqB2sTWuhMi5Uxo8mr2+7vhUY7nzuhtLejO4GYbPtbliojAuVcaEyEpNTMsX+Snmc3lzlOAcD0lVWuHOOgPoqc84RUL9GOgK+Ji0oyfiWZ+1KzKmM+QF/EWRCooG/0dj17bWIoeSkvwoWlfw/InJv9ogVOoD/B1QMMUN0ISnFQ58elMA7trlHaCZBGhVkc5/t7veARgT9u4ED8Xrg8C6CPdkPSHcFBzxJVpFIPQ71FSwQxgfQ10k4nBACYIO4E5EoSc9LKJojyvRAN35ToBIABgQqgCfcb7g9eO6B/PGZLx+CFNQb+NUhBY7jmTppPspOCPzpw+8fwNjHRwDR9jyzGKkSgzLvmQ6SDhVC33/Ibv+bPR4FGXmMzLdiYJTIt6zTF02Nb8tw3MBEShdyN+BEQ6EJbxP2AK2UEDXeb2jUDQ0lEOWw3bwPn0iOnxoGMdcfGO5QNJFj+w6lkqndt7gdxSXfyM+rJ1dqtH8EqFnP5Ka0kPISJjXXw28oynXyK7sse6HUvTwoXmY6imi5WuuMPmC2bGCAP68QMA6XAt2F1HJ4rvuBFeJlnlf0z02WkBXMQtDYg8fw8OLZuVYpWQpVonf+0wQmW93o1INlzdZQActa0eFZgmuF5/G8K7WJGTzPg/K1cUmlI8Y1P2G+JPBSu1l9fqn5KjEFfYmFZ7OBtgC5LskBQrOQgLm5oiU5LB4hKIAVlxI8Yn0mDQN4JR6yhBanykF2QggBUnOwHIh3AVjRWnikpDUxPiXHBHy0IOHDy78Bgx+r2Rs+VV2tKSqg19R35qHIygtjRjClusojJTdKoy6o3XgjVPh5RU5oMQA5FSDac+/heKuKledV/Vca5cj2sx1vnJeGlTvsOsZb4aVhRZzNxEC5PO1ceBVim1MSXXYn8nHSk060K4rDDvJiI26FsfHPW1p45lRnqhz4lDtSKR392IWB55X4ib/b8Ha8QSpMFdvso/FGbrPjilt4XoF9JNYmkAOm1ItMkgHF4oGl0ypJBkqLRzjqppnjFkp7NIfhRY83WRhYPctWJDbKCSI4402WFvCOac2+DX98Meb/wqirTMBejRMxvoJ1I1RHsQUg/sfoMVKzUVnk6ZmmmuRG+JPD0f7S3utCM8rySt+eapMGTNNuQSob/dyFAciJlBOsmL4L5sjFbeOFJpDzd9cNqEvgADe+AxQGVs+/Rqq5O5qPcB8K5J7SEFR+9YxMcuwnaFY/E7ONLG/ZYcoAFcVhJzwgFdvD+Bo+FwawSdmGR74lT6mCCbbodN4b356lBVgT7210O4ri9e50j3Pl+Krx0nVz2wwMThzqJ8yZlYm6YYqpAP2uT7jyhW6vWHn+ApTUmL0Bp+gNehd2vxmlRtzd0mvrn/Gqc6vazHZ1AM8UXXd8SxcGnldi25qw+83+iTQUxtus2aAxKrmrFNZyVUnYv9PNtWWujfOac7rxzrDWpofMU+Suuj/ldzI6hsw4b3X3rWm904215UuPFiZL2YXx03qF8/CxSJnd1ZqkNAjDeX5XlfptVWZ9oQy8R7vmoO55t4fmevVCOdTwDmjhPTYMaECqL9jHV2vb8JW1nxOar9b6m6vHkCTzsdJWHxX7hSKwiIh8YsxWh0l8VM0I8kRG1xEvUbmKapKXn4ijV3WrbXY47IFmEO4gopj2yDIq71arkjh0UX1r3yv157+AghUC6r0R+2I0wDYEV5QAacGfFWfHnwj/4lzc4izZp9e9NlTUsiynLQWXd+KiSwZEFRIloj82eWve21hSLHlTxFucgCKq+KqQ4ZD/1Gf1VCBflO6/HCgl64HWzSVhyO7wrNHCCKrY/+R91lp5+r6MzZ4rTy+LEhBagafrpg0KhNi2Av0PEDDGSeJ4s+RnWZThyhWvETc3COvuSF2IBQXvQVjBXuZyZxeIeYeVKXi3Ym4S4q3yngRSPgPmvdCpjl3aGh2d4ZNR9McOo3On0EfRv3HNEr2dSOlueuHysH6NhWQFxJQd89W/S/R2v71o1Xu8V5haCq5sw4VgHZaEvoJCC5d/Xi4/sSP+xZtPe/URNp82xzsJgJhj82lLXxJsDdl8EgC1cgJqY7H19APD931l64lsMMXW80uolInAPA3ilbsjHaRRlCbCOnHiuI+yMH/YSKu+7pswMoo0wct+MZ4yUavK5XapXThoC1mUg3d1s4Fa/fDtY8PClK1jaWrStrG0Mh9NojI5lSRRWhpKkSgL8smk55gTe7qy6Dz0iFpNhpMjutj2YkuGR9rsSVygxKwKF26BUJXQcPFBOQOtz/LtvXe6v7YsYJi0fHeyHHWPJBF0a203M2SXOGh5icWzQtG0c3Ng/wuqH7C8EbqyHDiXE9flca0etmvzXZKlaeN0V0Mou4tdUyGqP0GPgwvzUkSWkzzQ+cV1oSwDvvfQpEhFo/ySQ6WyT7w1gsqacJr1cJadgTbK9aWw2V4zFj76wkdf+Oi0Lo3kozdm4T4ztZyHvnwECXvLftE300x1p9eYLZiOdE4QkAK5bOlv4HHrdC78+x7lyAkJgh/AaF0PLNuzTPLCcYcchAAT4jruYig4FJ/AWQ9kmvhJK2IW5KvSfUW/4ivP1kM7DYIwicLEATs8jZFnW3d0x7Ot2HJsK2IJdPLxViIGnxkB3SxK9VSP6AqX6QlzHd/wLD9MEkOHyq/thL5vez69SVq0QGm5Wn/3fEUZQtZXFrKLB1HkQWw+si1b9+LUZzbDcZ7pnmmFsWcyO9GpcHhC7AE/wHCtDxQWO7Tfkjvxbp+m17s9LpPs8Xf8cKP9z7X2R/k1reMIuciF5/LMGaloswEkUHUxlY+46v1gP70p2mJA47fbIop+jA5PMfvtLTUOAV1VW/yOvtB+t2dqE/yJSCUabSvAp+WbPO2XOGRqJJsC6q04HK74duitgcxcu4/dZr0b4Ke/90S1ii2v2Ba/Mg8rnwL9gCIXyylws/lWvKplJlRiTEqHannIqpxyADVmPcnQhOsCf3FNwxuN4/yrzjUFM3raKUFpXGKU0kJw7ny7wFIdOewbkMUCS11w1i2wFHdcNbrMGK9lAWmc8zMOgZEERlTzM56DkTj7VAhJX7yEy1OCMyd2hobrx0wP4jhyLceJ09izbIP5hpX6XhKGju6mQRjolGauF4DWk0rdRpQ4gFWwnnG3A3vI2GOppXmZeiBYmpeMSteCanWs0QIprHNKi8+G25T06eLxZU2HRCCoD16GHvTeHGN55QeTZ0FNKvf8D5u13JfyF0Knmur3wSeLxTHysFkcI5eAvWUFmmsFogmxDAU3fSQC8rCJroWCY0DXo8Crcyii0vFdK/y7uADf0cZ88WJezNdeNmdJnRsd8F2aWryYNY9Ua5P2mr2Yw0K8m0NVje3+5tsv/0Zh18jnhpztT1sN8KH2/dU3PBdIQVegcO6/HcL7e3b4/grSrqSuImRpkSIz/gDoTkSDI5hl/5hzEzJdvcgSR8qp9djwETHdzafoHcytFBwcxa2Wnhy+rZqbK25btTo4YFstPjZSW7Ui96iFy39MiLZqcFxstmpjYlC2amxKNHbjhQ0Mw1ZLY5KbEH/dtjU68LptalzEddvO+FBr1da0GGvV1vjg6ladZGA2dkpDo6pVU9PCqRvVqmKxUS8ZVd07jrrT1ogA6ubzcTfV2Mjpdr+STqHS4JAjebtqdA6fFCutmpwpSLpZTxFePS46ummLh1VPCotWLY6Jh1YtzBMIrdocFwHd9TJHhT6rhibGPDfau4yXnhTsrBr9GFHO6h2adOIh4c2qpYlxzY1+UgVF01TeFdAsgpVta61f4APygGa6hvMBGwHN7lvkgQaFRNAKdR4XjR2ABBHxkxrQrNTwpUjmrotfCGHuKpJfil1WCqDutaDl4oBM4OQ1Wi+B4BnPS0gJCWkR4nQJw7LtUTHN8khD93z1cc0UyUxiFHUEtsB01yV5RACw5a8Cfx1AC2nir8Ud0ISc3OIGLtjuI6npsvQEcnqtOv1IrKj3IHJ6iZKP9gI2LEwhp5emJsE6pZX5yOmVyank9NLSUHJ6WXA4Ob0sOg85vVaTM+T0P2O3v8t+5FIifFJrC+bWsVhQtALnIhJb3pJ+gMUWEltcsAzbx54hyWWTTkdjS1PLsG2hsT+zYTsMlW0O208KlZXSoUPJSHyB7amxqTz/YFhWLT0ZllXNzQXLqlYHw7Jq8bGwrGplBlhWNTgOllVtTIRlVWNTYNnGCxsIy6qlp8GybVujYdm2qXGwbNvOeFhWtTUNllVtjYdlW3UaD8uqpqbBso1qTYJlO22NgGWbzzcJlm33qymwbLtqk2FZ1eRMsGyznlNg2aat6bCsanEMLKtamAeWVW2Og2W7XuYoWFY1NBGWbbT3PLCsavRjwLLqHabAsqqlibBso59chGX1t7qPYN13ur3W9bXOVVa6g//sd0awdvRunckClhWR3IKCewGWVWr4EizbdfELsGxXkYuwrFKAINJKS7InLGs7AfKrVBrRhYhjYOPjptgk/7CSm+yGZoXcP4G1kxUnEY2NILM+mpOkGydFJ3khRXaSviwe5WrNH+Kc8iQhtDy7OqLqEEjGNuDCPl/dCUj2DBXsIimZYsQI5uww82LgPZXl61ZH4fA+k4JMnErYVYeKb1yLuefPNfx5urjLqF2DvD3OdilrUGnDtExPsdxmccO83KxMsVvnscNisR0eZ3I4mZu6qogFKKIr+W+Udb6Uvig+uCHzFFR02n3YCfkaGUYQPVF9i/xKix7BJdoytY4kdk9xCXAzk3BFbmE+V4AwN9UNwK0MdQHwQsPhf15sHuhf1uAM7P/XC/nxCiErUjJuTYSkPViEZ2P5Qfg28udQ3rI14icTCHvL3+g6SjqHiJ6U9o/wz/XUO67UZYxXqDFnBoivNzwTm5ae0biSil5MVYVHGgbGZBWrFZ8g0dVMAtfh9ilzwNEOlXSzsR3oDM7rWMLJfC3zW1vmeInK5cnsVoWYz7VQG/qIUbm0scSbbGhOLTG5S0zupIx6Jb+mTtkkXRwsqKXf+YWkDY1YiaFLcVmF4ctxWXSeJblWk+HL8jwxuXX5t5b2by0CVyRB4B8o8bbi8yWSVkzNeXmyWyJph2j0NJlctU3LEpx3YSvVqQYx5chWzkiTjm3VJD+bKv2ybiyRtJcOFepIIIQYWOR5Dc/aBDOIcll2w9GUy4aFZbCeOSUumzxGh+JByZLLvkVnRYyBT4qz1Ts7cvMpelO2lIKDKVtq6cmULdXcXJQt1epgypZafCxlS7UyA2VLNTiOsqXamEjZUo1NoWw1XthAypZaehplq21rNGWrbWocZattZzxlS7U1jbKl2hpP2WrVaTxlSzU1jbLVqNYkylanrRGUrebzTaJstfvVFMpWu2qTKVuqyZkoW816TqFsNW1Np2ypFsdQtlQL81C2VJvjKFtdL3MUZUs1NJGy1WjveShbqtGPQdlS7zCFsqVamkjZavSTi5QtwbUqo2SXSFpxGBlB2bJd+JDblC24kpuELXz0El1LxozMwtfC7XqxtXDdxAzBSyTtIeeuT3Ivvwjr1C4cBOuU9MLRsE7DwhRYpzQ1CYMtrcxHn6lMTqXQlJaG+u7KgsN9d2XReXx3tZqc8d0tkbTLsM2SnzBpySF4WUlXAJ7LsC1TPzca5BMatsNQ2fKV0g+fGiq7RNKG0f4QIvM9tLHqKC2NW46vXh61HWS21WBYVukhq7GwrGplBlhWNTgOllVtTIRlVWNTYFnVEgGheN3RZh+NeN3TYFm1JmRrNCzbNjUOlm3bGQ/LqramwbKqrfGwbKtO42FZ1dQ0WLZRrUmwbKetEbBs8/kmwbLtfjUFlm1XbTIsq5qcCZZt1nMKLNu0NR2WVS2OgWVVC/PAsqrNcbBs18scBcuqhibCso32ngeWVY1+DFhWvcMUWFa1NBGWbfSTi7DsrJG01jvdWZvB2uTo7hJJu0Oi59cWSYtUl+cjafHlEklbBn4tkbQ/u0haefAfG0oreP9FxPHVvy40sB7AADSZgnwV8c8ijK5YKmi6PshIOLrwCh9cDFcjiLrugRgVRVeoiVISZlHBK8tDomLX1i2LkIane4paXnIVAyFTYwHruYqRqlj3LZJxEFK7rVzFCEAVCgZ08JW5iunFU4Z6mapYjz1kCbaC1HLcJHRcP4odw/RSN2BxrDupZdiBlZqM8IAiVXFkOHZg+7pp2YET60aQIO+wbvq6boeWqbtx6lqomYEyZSxsmap4wA1nSc/7u1O2SbT96agdkZpXJDv5llFYrhZvAMBQbpPZ0hX3frjeqXfLnZX1CoN9MaId13SdZURL1w+fOC+OaNcLjGkjesDoLEd0mNqJHydW4odIJuanfpQkVqrHmA4iPQqQPdl1jMDoHtEDbvj6RnTvhxs4op215b7OEW0hgp8y18s1+g9QEP39A7AmfIRAdZFankep2YZpBhQyfZvtKLNktg13YYzLhm04pN+rLD1qwyEq88U/N26fjlOMi/hQe9ASaQYWTlLn0/TJNhLTRJc2dNkK9Rae0iIPv6H91rQ3cjm2qHrvwzgo4lHHJ9Qs2nIGUWhZlWkEFGlkRv5JYXEy/UQaGsw+keVGkE9kyZmc2GU9zlBP/npezWWg71reiS/jrzWgqDHJ9I4nqpcbHE6kFJ4cTaRYmyuYSDE63Gld6xrjfdZ1I3O4rOv2Rnqs6yamOqzrtib5q+uGBrur64UnequbpsY7q5uWRvqqm2YmuKrrpiZ6quumJjiqGzWa4KeuW5roplYqNc1L3WFqjJNafbhpPupmf5rkom5WbLqHum5xLgd1s29QLccl4GssUzO4p+t1G+WdrhuYyTldNznSN91+jeNc03U7Uz3TSlPP5Jiu2/wofun6DSa5peuGpnqllQ5y0SktYoU45glpZlM/L+9skgQ0Eu+Z/tmse8KMCxHol5zStQq+qO7cvvYlced2icvazrXrccYYEScE1HVMnJDwYb3+bHuUhDHaZLGQx+2rqql2utfnOrN82yR8dnGdVcqbl1xnrmEZ8I9NcJ0NAM1LoD1mbqSnYew7hhUlLAosL42NyHUCIzEcM40Z/rF9Hy+y7TobcMPXB7T3frhhQLuB+f816uTCdWZ6rhksI7q368yBsDAiTCeM6AGjsxzRLIhTT0+ZnbpxZAaRG3jwftuBn3j4EzkxC5yQsW7X2YAbvr4R3fvhBo5oQ276Xt0a7VsWBvUyonuPaNO03Wlr9IDRWY7oFGQYw7PiKPAsPzENi+l+xEKsy5EZGrofp7prJGnauUYPuOEsI/pbht0EvK7a3f5R+2q/S7NbbbMPk1zLdpnGpeW1cJdoEKIXBJjr73ff776uvjswbbdHEUjRHzRK463td1q2vd8fjm+0CLSZxzu2g6FDftR2jCUsmZUx07u9hkwS9tpEfqDX6V/3Tc90FHn8/K7Tv277hk46D9K9TvkMRnpyZdFRbmRei3+yX13lwJm6roNmMGHZHzDgy0kisGymW16sx2Fgp9gD2LEeOb7u2LrrJlEQRpFnuRG9kvZGfsANl0kC4Gvv9ho4Sehr+3XS6nxTt91lklDiXS7S6gwPp+pJk8SAAV9OEqalp7YTpW6ogxabhj7O+QyH/1S3EhOkW8MKdd3jW8L2JDHghsskgUmid3sNmyQIh36dk4Tp+nZQgwTPMvVwbjYtJM8rthIf/rGHT3QHKt84Zli9/KhNhajQP3lX0WLr6V7gXWTrlc95lq9Xbwli7BUtPaVVxjP26rW5zNmrXzmItVc1yWjpqKaJKdpRla1J3L3KzHzsvZrNqfy9ytRQBl9VcjiHryo7D4uvXpePzeOr7vWqmXytx+jN5VNLDmbzNYpP5vM17M3F6GuYHczpa5Qfq0TTMDMDr69hcRyzr2FkIrevYW0Ku6/52gbK0TSKT2P4dRgbzfHrsDWO5ddhaDzPr2FsGtOvYWw8169dq/Fsv4ataXy/ZsUmMf66jY3g/LUecRLrr6N/TeH9dVRuMvOvYXMm7l+rplPUaVrGpvP/GibHMAAbJubhADaMjmMBdr7SUTzAhqWJTMBmo8/DBWxY/RhswMYtpvABG6YmMgKb3aUfJxBorb/WnUmcQE4btGCG0wYvCNWoVXyJFdh59Qu8wM4yF5mBagnUfwQ30LMBaQzXEP+35wbWOt9r4x3o8NEZwSKr0ddbAB6eDVGLwluA8VLoQ9wIJyM+eUlWYwDyX3EDyZHoOmbimuAe+EYchHHAICPFQit2o9RK7TSOfRLvaHsLdN9JmBvHdpgGZmjGuNr0Et1wjSj0Q8N0zcT3DC9E4Vm8Bb//4Z5oBX/M8iMAuv/DdoyrxnKOgRbvH/DrLZuVKdD7CUlSJr8LEXI+4C3M0ihfgmTxNbaMCKc+ZvvdO2qLKMzZR1Aa6f1ovb0d6tL4+iY51/H0hVzVf5IzoNcDp88E3kTcf8KqeBO6E0MSxkxizzRDBoaV4wUhWFYBA23Sg5yRETpB0E2AHnDD1zaeez/awPGsv1IelG7bnmUtAQ0DxrMBvZVJm5ag/9gsx7MVG6kOyrPj2VAuCi3LjuwYQm6W5bEEi5RrmxB1O6MFlpi673uek9gGMahj5hheFAWJzgzdDRlMeZaZWu5cm5b/zPIw2jDtdJ+ER6zKdyz+oEVPWgIS5WlzBGVSciRxw9k0wXo/ZLVvGfAiZpnn/vmbud5POGy6M/W1wwGCV7Z90UHVMO36dHeWrOHrvok0YAVXo8jENpKqUSs+ipPAa/OvJWpAsCxwPe+SrJKSro56xyOOK6TL1NGKXFhJtPGEFhlP0qi9kcscjdqFgygaZWOMZmg0LEwhaJSmJvEzSivz0TMqk1PZGaWloeSMsuBwbkZZdB5qRq0mH5uZUd7qVRMzmk/Rm5ehFBxMy1BLT2ZlqObmImWoVgdzMtTiYykZqpUZGBmqwXGEDNXGRD6GamwKHaPxwgayMdTS08gYbVujuRhtU+OoGG0745kYqq1pRAzV1ngeRqtO42kYqqlpLIxGtSaRMDptjeBgNJ9vEgWj3a+mMDDaVZtMwFBNzsS/aNZzCv2iaWs6+0K1OIZ8oVqYh3uh2hxHveh6maOYF6qhicSLRnvPw7tQjX4M2oV6hymsC9XSRNJFo5/04Fy8mLPdfGv67wzoMAVrPTirw2S8Q1IK21zrLyUHUmr4EuWi6+IXGBddRS4SLpQC4/gWI3O2/3vzLdSO99qwPK5DsoRwl0liLiY9CODrc03HmeaJHOCFKD0XuGfAUtuLDNtC9hMzNW2g0gz/xMTC8G0/iXTD7qZbDLjhLAg9MQv+cooOInHJRrAuPoKzovdz9cbl1dn/9Y1lw/bdhTvVyw1JY9lxXb/0QkK7pUGd6pGRaMC4LMdylEJuwUh0zwKrQPcTlzGou3mGHhsmJE18m8UmM72okzo14IazjWWRgQh8qY83lHs/1rChbEFQjZNnX91Qdg0PuzHUfJFIfFEikYayaXg+CYny5GLwrzWGMj55iQU5YFhWQzmJY8c2mW0YvmVaYBekDuKwfYs5XpKYJKCQmrpJRMY2CzJJXbBGMAnFNmQVmBUGOkM5O4kjBnpCwqzAhcgizQOzDOW/nO5JKEm7f7rmxEfz2r/2ZmU99n6iij3gRlCiM2LPD+3AjRIdf1lkpaFhQREgcYPUNmITtKu5GoH2JlnFemzlWbtOD/vdUTvlcBSipXhDEXc0YsdwG/4wa3P1fvaquVIHvco3LfjfTStMQ8+K9BD/Y35kgZgS+35qGmDXztVcf8h+0LKjpMtGp1tinWz3Cdvk10027axN0/s5q6bx0oiWzQR/kA3MjL0gjpmH9IChGXqB7iW+G5mxT0I0swwn6knffPvl3zTQh3MQaLXjHt0GumTgEu8gR0a9Zr/bPEGnLD2w/A7ff4A4WZh/DGWy3g9ftVecQi/O8Zwg9GzmoH2QUg0JFFMov+BYYTqWD86lyygH1mzttQX0SgMr2cf58UA/wdnY4mVf79gPx1m7U+9nrZrHiUK0goUGANETpHQDs5Lterqh2w4k9hxwQg3Xtmebnf+T3TPeYzTavsZ7SN3p1/q1NWs79H6oqh0GLIuzdBPQuyCHnR0xcHJGKn4xy/Pw8KTJPJj4kEj8IjfmEdlgs/w6zmZtpd6PPGwzShrwr5Lv5TgOHZWWzWgfLVBsRr0gcMxpUqBR/41luRnVHZDRUy9xncjxDCt1vFDH9pKFLgJyYoRV6VEa+zyqpr0ZHXDDWYb5f7PHYnsl1895h3Df9hsyhD3uGHiNAt26gyTIwBqWIdx7CLuIRpsUcDJgOJZD2AtYBB001wUylGJvz0KTuWaIjZnj6KGNDNepGQAK7hzCA244yxDu2LHQnjfFkUEenIqdMW3yPs5S3fuZh41zBF2/Sh09Guem59cy3n4FleQje3WavEjrDkQnQFxrLf9sCQTR7hhD4HTYIG7y7ni8z9erVXifkRv27hRdY3lbcR6q6tbhxdQ0GwIUeuA7bVjErzcScANZCx9swxyn3xvBSMPNxO/4ImF5fMgE8WZ99cf949sNeHQbRJNut6ddFvOYSm0TPuFASN2fnxTtX9MiR4rAsCnvQ0dGXqlaGhgs0Py1JTfhkSJDERn7Vrfe6nBPeGvLgbTAq8Q0ITeMxC/VGvSfbMOWvvnAO+nZvnkfHsJtrvZN2R2ndyVb5pt6dfA4upIZvHZ4/J81yZUS5PxsYnp6KS4MR3YPn1c56d3BPYX5CDLAFuTDY+iHI2gO4HUY+UZgO7YBnWcvdBM9BpzLKOdAhZRD+Rwh/yESEvhMDxI/TUIjBcYbBSyxg8S28d/YoUNm+3Bip/jOcnVPh3ETcKcbRriLlVi6A8zdsyFvnlgxzeCz7GwI2nsvBt57jf+LeQpbmL32F4ARmNivD+x/T4CGeX6CLzf5/g3C8gCPvkWMDvA+/h2gUkz8W7EQ0MYIIOD9fpezXD3ppOEmZ+MWs5VsqlXvBqrBf4Eb656rM9AGEMYc6XiBga07uo4AQ2SGSeBLBmpKR4ZZ2vR3p+19HSrtALfmaAnQIfo9VtUSA7rzLC3xJ3a4ZRrRDcuu8kvT1uB42HLsT2xz+LTPu1fvzkjbEk62iLFrOB5OU3tV74YZuLm2X/EGxvQU2YbN5lsx2pcd9pldDBK+YMLEqgG+Y87IMbU7bSOKaKTIUBoFN3LCpBjHMVv6FWfurkzaDlBApGuY2Ge6LiSnjtvNjWqydkJQaCK0/+KGhJ0kS9MRJa+pHKpxHx7juzHleUF6jjw/sYaBfgeclWS38+aoNzWFaMA3Xr2HzT7+gBeyltPuEZg7fT9ovuGnl1psqszOQq9BpnwZFZcqErWMj0uV1bj8/uVFg+JRqQwi3Tab/SO8fmhh8TMcEpdfFrVTvqqVnhKHys1MikHlFuaLPxXmpsaecitD4055oeExp7zYPPGmsgbDY02jffKE8YZOdOm0j6zB9nkVQYEIFAuqmGI7QYPiEh4I0o0rKJfciK3ljdCmcvxIj6I41pFZLAliW4dvNIFiVwgmKnxDoeeErhVZXLALG+Rb5N66Wgta+LaIhCg+kHvWy+PlDHpTTvXFzhc3PLCHjD3y+lKc0Ax2yUzL8BS7Rd3qUSrT6ltO8rwhZIWLILxxVS1D+HpvobHA8gMhpAnCiBEWVyyp6zpqQZ8XIIboTgPu0FxgijvQGolbG27g+j7Os6OWGVn6i/ELTa06lxeb2oWDFpxykzJ60WlYmLLwlKYmLT6llfkWoMrk1EWotDR0ISoLDl+MyqLzLEi1mpxZlP6MuNhd9iOHJjCQcjC8bsJkm2GbzjeDgKPpKWhk0yDrxKkkBp/SBv4CRt9SLylqtwzgx567xvJ9lnvN59X+SC4Emhx/QkPy4Xh51hcb0NLUMoBrB8FG2/w8BvD9IXvgBz55vOu3PFW+iEkuLpIQKe885kyPTt1bhqTs1PQDP8fzojBBoiKXh8W5feZkOZJ2peaSJGlbHixL0jYxVpqkbWkGeZK20XESJW07E2VK2ganSJW0rZFACLpttNlHI/vtNMmSdo3I3mjZkm5z46RLum2Nly9p25smYdK2N17GpG2Ldsq34Y8CcqqLkYyb3abJmXRUb5KkyVl7I2RN2rYk3PC8wtFT7JWOEtuYMMSaRqcd5rvgAQEAy2/GveWZZE66mnSK1EmXvelyJ22rYyRP2lbmkT1p2x0nfdK2I17yKPmTtrGJEihtgyX4l9drqOiYjOvbH0MKpV39KXIobWsTJVHaBpOXZFHMt7r7zjQgZrI2OWVMaAkr8K/51rDf6dBE8dYO1zzh9LTLCPGFVDStWr4kjXKuwAvyKOeKXZRIaRXC41ZpaQReLSNIETBaLbpYME47+NT1N1eP5KDDOlz7qFjogXN+83S845DKXZjfiKFZeOLpk2rQS/c8ffiYfciqUxo+uEcAWV5+Io5BVQW22eGwx9GfuI+ixnsEDsmb1WolzkBU5dr3yiPwX6SadIvh+BMwIMoG0YnuloQgQZ8sfxXobtSf6rOgu5LusaC7nSTeBjKTL+huxWg+IDIue8BMXR6OJaN4QXdB/1Z7Ds3IvbggHHeS3v3LO5NG11wG8DKAB2LwUwbwgu4u6G7vmJfVgu5ensvPeQEWdLdzUj/XXAQWL+gujoHDQtI4EW9Bd8e4CQsa2ILuqtQygc+Om/TIOUCEstXzr8FGviPMHDA0GFwj+ciy8OqZ4ISfrq+vn4k1RmYFQjuulgu6O3iaWdBdJYB4taC7alxxwf9d0N1/C3T3ZpPtwJIBxy9nm5T+vRNw7sDdSxl9A8SYQJYOS72CblCcI+Yd5QdHvsBWyYse/WAdHGtOjFTo5uNrW7RbxeBuWZ9qvO1Ylg2TgeA+ul2UipMlGC3IE+OtDqefk4tChDMUPhb+GyXNLIMPig9uyDxFN512H3bgpBa7j+QmQgxGlVuPgjnw3RK/1BhyAu3koS7TWajczCQGqgg4wrEJGuMgw1KGxJ+eeRgQ7SsVN8Hl7WXtuSY7F/7d4peqGRZ+xNa0iM/oAj7TQGM7TJKM6Ob0G36FgiiTv0K1N74LdxSVlFKsEC6gkd1fD+X1hhNbrhcs4cSDBHvKcGLyai/hxFzqqEVrKAKZr/8J4cT0HjDnLsHEfOMwyHlYW06XYOLSZyvW9qme/n+3xXi+YOIi4LgWTCwjeavQ4doHSqCw+HwJAWaVqJcMWW4fxaoj6uUt6jlvS8fxtDqHjTM5/Ay2hACXqy9m/4VjgvPY5a7XZHTNdoYrN0GTz3GlpSUEeAkBBk5RSooWHWPoPq/sUAtJrJNP0JwTpu79lgEsYvgXkthCEltIYlJPsYgNX0KAB/r3lhBghONc3tSeO6ItIcBA/Mc1XX0Xj/2WdCWMsyULLySxhSQ2ImPAagkBzo/73dg5cAkBrmGBrz8E2JRRwgtJ7BMniSF50ZkQYHxTRDEvIcDn2fpLBOEzJ+ncZJysTUEKC7qbwLPz89CHo6cg5tsi8Cj5Jwu62wqbnlEifHHPcKpOA+peQoDHnegXgcehMYZLCPC4nraEAHe67M4hnksI8LhutoQAgyg+rukWdFdmgFjQ3QXdvcylF1zEej+p0Nlxg29Bdxd0F2s+4UN5kfAMv/+2Yk2tW9EJUhFyEXgUiT9HCTzOg+4uIcAvzJcdHOtWkO5ckbQ8iVPL+viAWhGm2+ad/1xDgGWei7ExwCJ0oWgu7hj5F8Y0FkEYry8xt+U4hoH1QIpRfgM14SU3Kq2BHRne1cTcum8iFlboD1OIrEzle1N+IuQZSGFrRRI6+apMqyYTcw/Iq1Ym5kbuPt1mgev6lhU6iZNEuq8noZtSnvCAeVbg2xEzg4qEc7X+DpoRdyFcZ73zTs+URLp/btIZEnPPkEG5d/t80nm5Z2iI3j1zYCrpgo7w6uZJ27cdlwbVa54nfdsLHCild+V/w6MNVXSj3c99+LTZi6SSyuzoGbaJJVnMhdj9NmZHfNKeHTHpUhJkOTl6AYt8GxOdZ5qpZ0UsNJlrhomFFUsPbSe1UjNwLHon5eQYIK2pa4S2mcaumVABQ0+YEZlxajt+asVWAmsm5T2VMFRtchxww1nyyh8PTxrln2YHbCW1+6cjy488w3x2YJQiFJWcmh6+0Ntb9X62YePZQKICnZIQvL7x7AW+5yzjGWOuVFm7NJ6Rt1WnHAvZj1DlGTWeB4zNcjxHhu8GoWelSWToOmqg+76NwRyETqoHtu/GBqYBl7LUt8fzgBu+uvHc+9kGjmcdai+vczy7thG89nPMP3F9di3HxjCeMJ4HjM1yPJuJ5+K8wqLUs33TDQwnjpmhR7afJKEXBMy2bRZZVud4HnDDWcYzHV62QJSQ9lVL9nF+PNBPgP+0P/Kj3u3/gaLNITzuD9c79sPxes71uvezDhvfSCxk8cRCr2+9dnx0mGW97r1eG7aNNOtifAOmaOy/8clL++8BY7Ua3wGL41RPWYI9uquHTHec0LEt3/NNz9RjFkeh4Rpp5/i2fFu3dAPp4Z0gDEKdRSFzE9NPo9SKEs/ns4QR0oZ/lvH9p/0D0/abRKONd64d99oKv93w32YdzL0frEIVTB3PbMYAc3TTSsIk9SM0he14ZmKZiWMarhNGlhHP1RZ/Od3f7w90Crmm55/18Xs/S/X4hhUbZmKjDRyT2QmWKjuJPTe1Y9M00sQNQz0xbL5OzNIVaKrPIKN9i+kcIXCyR9BU/y0jIbzr9ACVbe3E1wJ5Ugt3iRaxY7gNf5i1uXo/e625gkj3PNtNHT2O49ALPTMOnBTLrBnGETOcyPBC26Ppc5bm+kP2g4Yc9Hzt06LTLdpO2+6hEZhft5bGOVdFo+9z1gZS/x3HLE1DPembb7/8m/YAiTDelfboNuygIUnc7vhGo16z322ecNBPDyy/w/cf2E4Lc23HWMKSWbtS7yl84C5CX9uv8pRgmUFgBbVdxNckYPyV4ON0ej0MzwhM0yK9vNtsBwT9cJchf9cBvRpzBRK2hwfEW+BzfHARPhNE4qq0LNsoww3m5HC/O0VURIr7Cy7yStbmC+F2rOIBPu65oZRwjA8Morx02C6Fny8+cwfpcCU9tE5Acze/IMMk8jSYzYLSQg9+cMlaFVqS8oU7cZLR0if55kqkeptkrcwW10+sistbCT1J0cbUOw3Lw27McQJAv6dthO67xpfAsrMjaS9f/ZkmpHt2wIK31Y53TBN5RzVkzow/YLqKGX2r5QyHIZ4uk7pmXYa56tf8dsWgmdTJH35DXWTaKLvcZlWtB8lAymJSWHmEEqRqAFsKSCKj1ZEkFM0qcl5jC3q51yjzyTRx5qI6MmJsBn3m0uJUZaDC0FBlr6Lc8NCvoqQYds8YTtlDGD/RW2kEfwx5P2fjRv5KA4mOaUd2EyZbWmI4LwKeFTG9wT/xdx7Mx9XQhcYjzX37+ANmYnExdnUYmLfYQJQ66iTMTEHgxQcVTwKeajmP34RIkXtl8qQS5lvTf2fYa8dZO/bZzMNm8M4I1o61tgO6pqY4qZjx144nLyFSyw35NFAdmjAGrxeCFSOmssujWeGt1WdA8rE0XlaHSJg6cV5TITwiF+QdXFjI+OIdSpXPr04IxdwdN09vWrMrptXHbLPRDqedxsIY+8Fsy7RQey8OHu+xtcbLpxk4yzX54q61d3f4Df+PWBxiJtBCwGgCqHovpu4bPnWz5D2sH++095Qy4vq9dqQVHCWPtOEMd5Xxh/CQkRC/hj09ZiJUE9+GGvW7LNbiDTqYVlxzrX1F+t90FKK1Ig5Rfexy338nLiPn4t+v29Wgq+j2xd3PGP/+ACoA/n6t3YUPbPfZkR/GGE7p9MxCeVz7Vb7XtuEHpuWnA6Obc98ZHut+w5C16PM3OJXgGbU8A+E1PGj8hWiP+wOahB9WnrQY5xWNUhxd025Kjo9RXVRuaYohtjICHeCqZ5voP2O67C+5QWnubd0a/+LyvPPinosv0EUNl5WcNvPLSp63ZcfUDcZkkc5/55W8e71V11IxXyaNNVm9Rq4m/xXe3z/RpHfHNveYvYiFcYo2WVykVelzv9flAHCDwA4cU1eSL7xCouLHPajXCDiea9vwiBYOAJz1Gg4AfPKiA6A/mF86AHwvMB3mgyrlRqbrMmxpzDQCMmkEcRiajuPYUWzr3ezEIHUN1009PfB0+AlZ6rEUGDeATCO2DS9IdFO3AAZjXZ0Jq/vHCduG+ydk1joeYhpSEQOQiS0FoDm4++IjErZrOfYh3O2H7bkmRpq2Zce7fTKvm6D341foJkPrWnCo+KkO/0gSpYnvmYZpGSCHWmbo2gkDy4XN5ib4luX7DVoj1FL2KBtOY4fD/kDbyQTNtiWnivj6Y7RR7weuIcD9u/EsvapwpWyEk7joKoBRpDMBPanY1M/bgcy+T9oP7sWZMHin62vdWRuvkOSFNcOyAmBRmC4W0mYPkpfneJ7llE7jUSSvAfN/uWboWBlslkQOfL8OS0KsHqYR+hE4nEkYxvB36rFthxEhDkWGqpLRPuCGs4zuhtMYC8ee+5CxQU7A24wJzJ7Vh9P7AXsPav+daa6B9VjO62OCYFCDEojFbhnU/ZggiBYJTN+exPQaMEDLQW0wD8M2iiI4oy3bA1XbMGPHSiPXM0JXT0PXS4KQ0V6uPagH3HCWQf2t2LaccjhptX1Kvuy7ML+bdRz3fqYh41hfW7Q+v85x7PmuuzCwFdj6PAMb49jVdYD3gtE16kA3YEyW45iZTuxEHjMtj+mJG0U64jCiKHZwyIiZjUCzmFlWTFhnexzHOIKGdhwnqevjNKIboW6AnxKwNDJiz04M0wfPi4cfzjKOvxKefG1P4C/wZo7x4tAGHUSQePJrwCaPdxkhwQS5E+FCnvngZgcOCJR51iHf+/Grw4pvgdzkxUbEnNh3TcT06Q7o73rqEg8KEX+RaXgzHoGJyXP/xNmtT0D/BZQNqP2wP93eUTOS54o8FzQnZmmGFuMoP5v35NL7sWucp/4LzCx96y+nNEVj/Sn7AR1KtoFwrvCPZu04vQfqsLXChH/PfZVrhe5g00d0gOUg1+cgZzumW8UmjzrIDZj3y7UCS1RqJabnhcD4okT3Yqaz2HLTwGJWYBkW+Ij4t3vPN+CGs4znL+PjCWvEE7HzAFipLk4O96kuzGuKUP4GHFlMgWCDbtdakZfjRWcy+u1sgXy9m2nA1GDAYQ+3Pvf7vzq/AGhNfrAcB/tuI23HwBAstpH9ZAu2IRzwRFORkbngFeupHjmebbnYFDLX8Q3P8pEy3NBd17KdEFF6nk8vpZwbosCNbSs0Qzsw3QjHP2z7cC4NozSNg5S5qW9ElpMSUt3eRxopXcwMExb80DAsFLADhk0lvAORaboOC4PI64a595t8vzuEu99u89M1S064g4x//haQ9ZP2Z/qeNt5bUChwABzMh6OJYWF4iD31J8rwmHEC7t0Xa16a/uNllqXtT5T4F54qohUJ6pX2SzCo+LKlFY54Tol5a1DnHUz/nG8169syA1czb+28To8FrWZqWPpm8614h538dNtHXHTFTqcs2pjfxpBmZdFRlF1ei38ZK32zzzkpXaE2z0o8JM6Uq/PYATT2GE4X3wL+a2iIkq0/kTamNO5LdNSFVy5jRBZeOdh1C6/8kK8+HV75wkmuqNGfLid5CksfTP4aBb8HS59rMDa5h6oVfsmNOJrdCDG5wMNhzIng3LMSK7J8kLwSsM4c2wRTL3F8OPQTG4pLWPt6xSeMURQXCskr7G4LqSPcraEDOXzlK4MOuFEu9Nm0OtZoYVAV/xY3GW6ziqar15SWZ6BaL0QQdTHFi5Kr3qecAhR4FvEqVYTkWh4weMul+Lz6XXSfAbdYws0WknpnWouFpC4FkdWGGBNupuQXg1gHHAgQCakpBWIgpxTNJT8qcQTwUPDV/hFaOEtI6H6z2T8uIaEygr2WQmsJCZW5xKrQ1JiBXZ7clAHe0rd5ISSUB6NSIKjUzr6MRDRGaMLy+JDdk74JVmPSqCA1CoQOQtrkacs9Xt9fffMEXntBVNa+/OZr7W8HhJuww/dXbyjgMCT1E7riPow/EFsKgYKca7F/zLkJRNwhBk8GJBK7QnAvPsvJGDnJST6/CFTpEQ3aeAgq3WtfUy+H+C6Uwr0/sKfhhanQ8wr/lYk1Y4zxMNpz8ZPh1pTSz9i1VcYoxvjIwu1wo7wUSvOkYX12kUrrjE01phiRO9EibFrARfRE/SUQFHvlqCiODsOfqyz5zIceVabIDja4jaakFVOe6xje0puKNvto+BNRti5e8nmFPbRQCjjekMnBD0SmqKBiiaTKR1migqUl+ID6nT2UhuEVopKlnSHHGMVUeYp5lq20QbjyCbPV8IcrS+KdEY51G/6I1X1wG1VFYYc0XQ4ZApTHzR9VYaqTUFiApMeISlVlK0t8fRj+fDWhB/6E223P07zy4mQ5pU+OtUX9qWmvOHwPfr6uU7sIxpbfDLbYCBofmwus0XyQMANYsXr+NQ7bdzQ/oHpIOtArZrz5Jqjc6pki5H+6vr5+Jt8/WeRA0PDOJoqhfHiI77DvGdxgz0VBEqkJjz89I69zShVKcPQgCfLhdSpLwop4H4NrJYrV3x7Hjgbb4aXGJ/pS3t1MOb4Um7v9EezPmKvj5c9fQAcvZm+w7XuDXgKFiAz9DXoQ9Dr6qzIpNxDF0AJQjyAzB8aVHIa3ZFHweSXyoCfsfrN/4iDe4LdSK0tjSchfdcqmFDro3WHcQloF13DVNArT7UJbufqKzcn8mLxkZfETYvNrFLN6q8nMXGdTeXVc+0IWr44S+UNzPThTG9T7br9l94JDU7DjiJh0jZZLoBVCPONrMMxWuFQqD5uBTWykammN9ydI31M6n0diKNDCVn1ULMd0gOHnELpnmN+IYVidi/BRNbjXIh8GXfeYfciUq6iySAglz1PidFHdbptRaKt4EyLf0P6e7eTdatUS5wuqc+175Rn4LwlLw9PmeCM2k3gGSayiCH6aZzl8EkZsQ2ipDO9Zl+QrgaKWvwoQdQCtqgmiFndAE3JNEIRsQ3oVE7RQtRuoTCdKfzFetKtWncvH2tqFg9yrJUOuLIUnFz9j6ro8Nwh4rWFhinBXaeoWUcKY+Okfeb6U7tJBFZJlZpDuKis2WfGjtDRUvKssOFy9qyw6j3xXrSZnwJo/40Swy37k6yKf1No6XnVgtUP/gTphDVYtb0k/4LsWrlpcsAzbx2XY7rcSVVyGbQ1rnmXYDsNam8P2kwJb5e6mB9rafIzecKtScDDeqpaeDLiq5uZCXFWrgyFXtfhYzFW1MgPoqhoch7qqNibCrqqxKbhr44UNBF7V0tOQ17at0dBr29Q47LVtZzz4qtqahr6qtsbDr606SegW26ah+Ktqql6adnzDANhGtSYhsJ22hH9vEATbfD6O3Y7FYNv9agoI264a4Th1nGwwCquanAmGbdZzCg7btMUB3ElArGpxDBKrWpgHilVtjsNiu17mKDBWNTQRjW20d6HFrPRbBVC9fNbvIup9DDxWrfYUQFa1NBGRbfSTCs6lqbwByepvdR+Kue90e03CZd55JWsOyULMWudK1ucg2RqN9gIkq9TwJUy26+IXQNmuIhdRWaUAHm84LGt5HpIFtGHZwEbIaBOY5R++BM1WIOxkZBbyxfBD9cFmSQFTgrO8kALP0pfFo1yt+UNcQmhvoBIJFgkwWhLTpn/vBCRbIN3hfVbnPXUN20rNHJAvwZwdZmroutr1qTAxnSlWla9bHYV71EEuedJQ4dmcaqviH6N2DQL2ONsdJOyW6SmW2z5dqQOO3c8Uu3UuOiwW2+FxJsvNdG+2NDkTBJ+/YFnx30jcvcwOUHxwQ+Yp4Pm0+7AD0oipQoYCRE9U3yLpjYwNJDeBDDgc5SQQYYLjXQSyGpfdA/KiQa4BKjNeBLxWeopLgJuZ5A7gFuZzBQhzU1N4cCtDXQC80HD4nxebB/qXNTgD+1/gaFYTKs9x/pCxRx42w6NIyAsoz0M8izIkEzLuUr9a89Vnw6rfRKqD5Cal/SOu7qvzrcbtvEY9D9PSMRNJOuzXL6XoWkKgi20wuYSKfcFQsHdJzLVaDdo8yXmGdtfYxzVOcC/v4aTTfUnMheYToT1tVgnadtAyXpBXRjv4VQNTFvPC0qT1vDAy35JeWpy6qheGhi7sRbnha3tRcp7lvarH8BW+KzEXHSzRXcen5cJmoAlm9E/LVSz5Bs/tXQv4FaSlWbUwRs52/yr1C6JW7ROcZq4WiaZPWqJJHr6xTaaT59jtS7HTXhl+gMStpkkCw2O6bCMJV82a8DiMrWAtKWdhs52Eiw4ftJlbztsqpCZoePxoVhL3RufO5GYmrc/cwnyLszD3/9r71ia5bWvbv8Izp27ZSaQZvh9dx8dHVmxHdZxYJcvXH6JUiw9wpq2e7k4/NB7r+r/ftQESTZDsHpKgYk/MVGzP9BC7QZAANtZee23dnZlb6bstF6ddVOPFa9clY6vyJMbZkIse9N+NT+6aHLsXO+JJVnblmmKX+PYGhQVXl8bXvFQhCO/pzX90LrGl7sOP7egdBtC3spUSW+fFx6bi2B8uZHFs8gAnGbI7KU8sK7BeimqoH1GGrN35nkTIsDKTaut0hp5EyFR9kHG2bJ0z9HQm+x2IkFXKiTcwiYrkmAApmgJj4vNJOuyuEDlrBqxFMKlraqPKJjjSACbpsKFJb4EFjMHRSHorltAHMYrjddNmPm3mv7XNfJIOw9JcKrxODveCzh0j4mJT0GqSDpukw+4UrTBwySfpML7qMoDWUl2bXNwjMWOSDuMrMR8UvQS2cgmWpgbnrzUsDUtfa5gZnr2mmNJLXlNMDc9dq/doeOqaYkkvc03tlFbiWpupAXlrtZvTSltrvE8FS1OmwZWH7y6pOo2OaSetKRZHyllrvBuTdNjZTA5lvIalq7U8xkHZaoodzWQ1xdYkHUaaY5N02O2s+loUKW0PZKm1tDibpFa9ngCw3tJhp3LUJumwmIdhJ+mwzU7VzisR8L4YjUTOBzOLaxZ0qMXSlBZ3SVoZj790NKnLYZKW+vKYZMP+/GLZdJzgaKUnJzhNk3RYIXH4YPSjHMtp2vKzPKfYTNO2psn4G5q2k3QYn6kdKzXIV5p+mKTDHtQQmKTD4NR1SNWbpMP2N0V1qDYpCgKEh2Gv6oQlO8PBV9WWHvqq2hoOvzb6NBx/VU3pAbC1bmkhsK22BkCw9fvTwmCb75UOCNvsmjYKq5ocCYat93OSDtuS/vlZKFYds2FYbNvDHATGqoY00VjV2EhwrGp0kg7j5VzreUNjS4c5M8ubOf4D1RyURzNJh7VXdfh1pMMgyHJaOqyigjZJhw2SQmvjHzf0vUYR4uLKOw3Tw/S4hChZk4n9bycdVmiGD9UOE9z9cpy47tGvpIVUSUJ4hAmZnun72KwKLaSvgKl8+Z5S5MFYT1E/jl5iXrxFMNFJgOJ6QeU9i+AarrveFlptdN0FPjibua5Qi+lbBhWDKXjxYtJt1mUvW6pfyHTBhzrWcoQl85v4nmp/0TfQYs24JiLdqTeVMJ4S/yYeco05PA5IXkbuh5QZ74eN1zgCv6mqGsIrPbuaNsmpfYBxhZkjXNGphHE75XcoJq4MceGRlnNkKmF8gl89lTA+MTB6ULjyLuoh4Yqp4UB4vUfDcXDFkh4MrnZKCwVvMzUABK/dnBYGrtii90kHAm90TBsBVyyOBIA33o2Jh3wW/FbGaxj23fIYB0Hfih1N5FuxNRLwrdj8GLi38gU6FTMUQ5oFM9QX5Gy9jKrEZIcSxg/Wy+gGelc7+BDm3XLtvwsPWWBkj798MZVPSZaLVNQb6CFTDmiufOkeHTTn27YVhkdo7iWKerdCc79feXIqqoOaxoiPRIHtRaYbyKrf0LpHwWMUkd/PizrghA+jqAr9e3d1g3LhO851dDHC9Bv+Aig0dzI7COIkdZPMDFJmstTx88hhTuRYTpLb+C+1SBiOy1TXwrVsZqOudeYy37IiP/EhcRfaaejYcZQ4rpm7npunJBJaOFoXs7+jwMtN3O8L4wOKkXM0lt3GC4pfJ8nPyfY+Zf9zTR9QcVJ8BxW3x9++oD8YX0Djk9fq2O1ExfTnqHi/OmyM7w7JllFhACNdIoXViI0EP8erzHjFP/7rGoUCLmGuHMOy0EcHLKLONuDO6lXnof3lH4S5VmZ7M44qt5TKXvDoZreH8kduZXY/53WmHt389iysUkGESk7Fm6c8/pTmyAPxgBbYnTdToXcxdVGnLr6l0Dp+nRcRi6LUPV7zeLdn23nx+6z4nV5itku3C3Hsml18s757ugQCszRoPh5WCyhwoj6GsYzv2dbArDZevnr2g+H+id5/XrVrW34VhS54v7q9nvbMcWaeSfH5x/d6eqbJF7pilKfNB7XS1VdS2XzswPcgsC62GsyG2uaDTx7afHpsJHLzSYLY8YOc2VkQBmlqhXkc5ixjgc+syE3xQ2ancUiCq83Nx8Q2x1jKHN/2Mj9NfOZj+0vN2GcsMi0rtDIT1cpyNB5l8/lucbtZLvJ7zLtVvri+3NzzHUco5xr7G4Y/bLdst1mvMmQvGdk6PVBNGz49R92MOt/5L0/KndpM0izNY9/1fMeK4zTPzSiLspxFceynLIt8M3IdJx5rsJ5lmXG7+Gmx2l0tuR8jx+uW9mb56TUkYbcx4rQYzss3qzer1xhH0fDyG9Gw2ONRfcvYbNfvF1gO+Wjv15snxordPTFu1vsnBtunT4z8AIeJqgXRQvhmtU5+ZOmeLodSMRZqPB08EGN3SG+Molu7S+P1zWJniFpCxjp5v8A+tjPezq8ZXK/1Fmvy2yfGbv1mxa3cxTtYul2/Z5mRb9e3xgJ0xO0qXpY3QKsv9z8MWKV+oov4xg3bCl+FVuh4ueSr9FOcY/FtGCo+KEVXypJH8fLNqtIQ5lYMkyMz0AUc1aUV/m07iDJvMCbwhbJ4Hz+ldQ8d3R+20Nw1kvs3q8Jj+pRdXl8+4T1D/1f7DVyrJ8bdzQJjQl9Bho8O1h/kQymHNl4u9vdiRGksMKbvF+vDbnlfPh36NuMtjR7BmBjvt8KufHzZmxWuoA4UT/jr8iUQj3rcudL1xa/MldwPfcv1EzOMIycJo9hL7AwLTOZlNstiy8S//SQada6IWcGfCK0seJDH91U+K4Me1vHNJd+gOCRoO7hm15uujBMWXDsLYzt3UHsvp3/noRPFuZu7VhabgW06oW1aY43Tlz/tt+B0FJPlqlglxGBhiBZbAwX6MNWSTZy+w0CN+x51vdnj+MQ5nGQrcQPLS9M4wN5kR6mNkYmdIDZpd7Ms1wzIwxxlg/p+g5nPjPfxliZkMUziTcqXWAcNcnGLhW436uB0vtPK4CSeja3aTCwrcOB7uNjzEy9iWYLdyYncKEo85qajTbJXfM3G8mYs16treMrFWsq2WyzIfGlmWIBp3+If7S5fsX8eFlg0n4vtfszJFne9+eN4pUFq2YnnmIGHjRpnczvJotxxvTxLTcdL8RNmYkonslFepr/JgaJTQ+Hz0M4Tr8SWl8c4oWOZevbyxfzls9d/+W7UN6rz7R5HyI+jzM8cM0sDFtuYV3buZiEOd7HH8iiKIy9PmcsIyRhlhL5a/GQkh2uMRnMfu1vsycMAdPDZ39arcVeizvd5HJoevvkoQ/N/2ZYc5RW8c0A329JlNnZsT66ggXLtBnhzC8QDlnA+yI3Gwn6AW3QPXwF1e7H5/fPAdvtR36nOo9AHw7FnpjeznUd5SHYj1/SUahYTQnvukGzaYSQRWqdxSMYnDx2Sexx45SHZioHOpmYU+p5nWn6S+77DctPPYhsHXJMBvUrD0KZzbvOQnOeBnXte5Oc286zUCRzTzrDz4k5YivUyxanb9CN7rEXxFeNOhjhrFP6/OIVJr5YAWn7eGXVyd77R46oIvD3KvMQB6BCmLHdiK/cdeLOewxKTwYd1zDQw02SssfntAAid7/w4WD3e3FG2kAlAmACE3wSA0PnF7+M1WDPTnzn2Y/QafCcIHbcCrb+Yyk8fczRUiF2WvipSKbGTcPdWp36jFxGgwqNOC+Cm913kNpWgFloLHk7vlscK2NZVncpDrgfR0rSMlkbQRcEx1rImTMDWgOqWNMY8bcjxEQkPIg8BwsNtgvjZDA8AYaXFfkmBatqmqKStCAWs+I+E8AKFusWj5oXrm9UrEb9bbNf7m/JLAhcOnIeMyYHJRKK5RpWNSofOD1blwl51Nsp2RVVKtqVhkRUqzz9mkXFVs6CjDydNaenDSSvjCU0dTerqw0lLffXhZMP++nCy6Tg5NJWenNCH+57i2RS03LN5nN1SWh9PysQi3FaBWlRYouUPsxSFCMXFmHSYp9eAAy9mRZ2lxRLQA2Cb8gO5LM3cU2wO/7WNcLk5s3i4vL2aZvDaNGeeP3P5NZV6TxcAyQUppLzEseiS0apS02p2fl43JRquxEYzoCq1dUmNqP9UG7Q2tys17Nu/syiGiGdYVEDj4TnExBCTowf3li+1bw98rX1LCCSFR8AIKtbbIob0BuE/+uf5cg1g1/hPD0AHbBaPUmcDLt8GKqDsubZjDR3dRgFlaY07CecXxRbaSWV7xuDz3avsYXMLomqyxVUF/27Q3sPbamw8RTfOv5zFRb02HF4td7AYaaW1zkbDzWhtMtzCeBuMMKe7uXArfTcW3qj/psKbjbOhFD3ov5nUhVDUtVos512WfLmgxat3WNKu48Wqe+Vkdfd4bIQsOjWa3sQGVk9iJwlZKDLt2iEOBIKQ5TewZnzSwJoFgZC2ekEHTiI/dQFoxm5kAzZGsNWy7NCNkzxPo5z5eWglDoJiaCHB5sD0PQYqkO/7gZNYmeO5XmCGgWdmsWtbCGEndoKgIzkwOPGBGluhA+dBlIReajuOi6CbxTzH8kLQsBzLRT9A8wrBtgTPGI2bGOE3OBMZr8ShqOBiHj/CQB1ZwBwaZKATwyWonb1geTRGROe7qYCkTobBSUG1TjPLCSzQR+zEN7PctL0k89LQjolfnWkOACKP4PRs2CYkDihIafEqpc18tFtPut5HJfoamj6eOajjfsSYZ2egmgVJyJw4YK6bpZEPehleRc1bx7PHrYOgBVYXnbPp+V8JX3DMAfC73k3l2UdWDNoLJpfrmXmc427hHjqpi9mTmUmWW67j2fYILz8GICW6wh5kD05B+0iDkHS9o+MghLHngvKTswD3nIP8gkUGi1EQJJGLf8IwTxFC80zNt0AwXTAMNzG4HFB0+ZjvQud7qr4LnVfe5kI4IC/ir2wLXhgdF8vAOQ48kVW8HQXYdEWvCVEsu2FW4y0mXbehzgC6PCU/WgDddGkKSG76cklkIyxqjy5/4uMKFh3xczrF07apgLCj4iN0XgYJ2sLqjUD++QNpO3LBN6A87w14jICWFHEFTchAGVzSVyCE+xRqNiHg/dSjJJw5GJSoWdABJqQpLXBCWhkPoDia1AUppKW+QIVs2B+skE3HASwqPekPWoyOno4BfQtcuwP0zVUVs3m8xwp0Ah3nl4AiQKfQuchHhW8Zp1nuhaBc0+E1s/0UWa82Qzqsb8Vh5gTgy1qcJNsF9Mfxtv+SKgQw4X6VyiT4tpqyo65RrtdZtzrUaIlnV/Utys/625SipeL2i56WYj397UmZn85ueIl/fBBBILxA5TyakQfMx43yqItfijen+8GFZ1Eij7kUkiytw7AQmpyCuWfkyadgLiXTxqvFzzwv8IH1pTZav9OtjDbio2opkmpCP1QStjH1chx+5wVuKPdN8tPxN6QFTVNWME6Gh8TkoErWxocroLXILqeX9BcMM/clz6/wtfd58j4r4fDa2Dxu77OfiGl9wj5OFdP6XXBB0vPzQUTxlYa963uprd+xe1Tnxb+hK0OTMl0vwYVZ8xzrAZ1Rmn+AR3u0Rtb3DHoW/W+RN0NzLkrav/lQLVN1pEYQM1UNSr5hebYYcGuy6QeuS0xjXFbp6j9OOvW91FvrK2iqtiaVxmS5TvBiwtsWW8V+Tjb73xNXEEVLxRRJIg0zRS2lKV6ka5id4fW91KHSUzVVbQ2XNW30abiuqWpKT9i01i0tZdNWWwOkTev3p6Vt2pw2RZBdvqJ9DubNrmmrm6omR5I3rfdTp75X3dYtJJfY1YcERet/uby8/EDUCFqAOHg0YK6LdjAQb9ObxftOJD21Tx/KlnAPbuP9Lx+oxiB1SQqV9l+Bhmmctj3MQSKnqiFNlVPV2Egyp6rRj6Fzqn5DPTui1Cvt/2g1lU7VbmUdpE7916Y7M6OZ7Z5mMstrePEuLjLWitXawczlZrBrF7eOn9SK80oPH9I6bbv4AbHTtia796vak6icw5QG3Em9ZaS7AsiwVB0mXOESgncZzr+QedpdApi5wqWFUqMd+uBnUVDkOv4ZqQ4AqQ+oSEMlfu6IEq1+VG7TsP/yHqymFX1nvJuL1VWy1Omj4zyfHSt+3S3eLZSrqLOgZRX1icQp5NiD2wXJW4jbF3z39Yatim+r9JS3432u/F25B/4LFLbiw3LfkK0jkVNadTlSCqEuggTLsOlMMtUEEit/FVhsDw5aHYstvwFDyLFYH1qWIbCiQdzmorUGu7nSnW4BZfS7F8tZBqIHBxVrFnSCitKUFqwjrYwXVDya1A0qSkt9kVjZsD+sI5uOE1Ss9OREUPHbCj7NF7Vmek0VkW2hgNBrXMFj5VfSD/hbA48tL5im7R0m4HknQcCEckj10Vhpapq2DTT232za9kNl69P2N4XKFt5NX0FnvsF2hGWV++8Ny6qttWFZ1dxYsKxqtTcsqzYfCsuqVkaAZVWDw2BZ1YYmLKsa04Flaw8MQCjeaI6tnt812tIF9WBZtSdasGzT1DBYtmlnOCyr2tKDZVVbw2HZRp+Gw7KqKT1YttYtLVi21dYAWLZ+f1qwbPO90oFlm13ThmVVkyPBsvV+6sCydVv6sKxqcQgsq1oYB5ZVbQ6DZdse5iBYVjWkCcvWxruUSEDw7FgFUsFV++9HHwOWVbutA8uqljRh2dp7chaWNZ+a4VMr4rCsOTOD07Cs99p0ZnY0M7kc5WlYFhoUgoh7BpZVevgQLNt28QOwbFuTs7Cs0gC31x+WdQLK7GjCspGLRNs6MMs/fAiaPYKw2sgspBoQleqCzbqQHyrAWd5IgWfpj+WtoKwQ3cQ5hHa+XCAVnTDaHVuCFPvh4kZAsmfr66lvr+Q7k74GwZwtZioH62ZjrjSCtnzfamkcbxb0At4cEioSJDQEeOkjKbtVVb44inzw++p/P20MZhitUbiH2Va54Zwc3TCtY7nJ5S6GgydqDxmNJpsdFkt3eJjJ/pRuCiaIjIBS557/RurBUrSn/GBO5ilYc1i9Q80IiqYUyQTJPfW3pG2TGkSBN04CKA3gi2tl6GOL3IwWrigUSxBKg6QsCKfE5OUh882aYuaQWZO1kR44AQvcVJjTDQNwK31DAIX8CHT70HEBSpz3kSo9Hgf6Hy6AUoj7Xcy4xvH7BbvjiTd8AaMoYHEegjgeYm0k8MuF72aBR8oVbMmK3+lKnrKdzXPyIJFx2bUYoqqM9SjlTxwqZlDk/L6gvW43pfuSSNj9ck3lCz9cNNN9pUvQF+eVHgHXN+fo16+mlukFH0EtE0blDOy2nLRBgKWvA2sSJu0WqeYOmDrKPOJuRajmAQmISqq2B830Mlv4ebz6ZF+otlREMj838JTqwfzlOvvxgBJGxS6NYimoFWLD4x0WzS+aa4TzKx06P0iVC3uF88t2GlkaxZiNsHfLzmjt39LKeOH8o0ndfVxa6ruXy4b9w/my6Th7eqUnJ8L533dQyfxw0XuBFVjKFarj7LeL5ID0gv9jm19tF2yVLe8xZYtcrMrfjcpfEcCiarAXpunZaYpD0PAefIXCo1gm8O2FiMbxy4s/oSosV9fAH47fG4WmefHLP8Rh5pzQQadMYeEJgWlU14rznlrWUzt6bXkzF6rf0Wn0RvVwOuRIFxneL6AGtDLWK9RO2TGU5IEapjhhUVG3JcqFra/hcFGZGlQJjWnRLSrBvaASUShhgyq6x1WZmu9xaI6zz41ntwauwR5BNVt2oMHtb/DT5xe/F5/NCxzXqflsz4Xb2+q5WVLHuSU3+HpbbFu0TV7QdrftnCaLq4fteUWH8MiEE0ReFn1/C2fnqGH+QNda/Iha0ZRJ9rwXZDWmIwfVgUGOHME8heLvJHu+32927SRlTMR+Dl2pvTWYnynZyPpgjDSl59CVtzSiQydNajt0paXeDl3ZcIBDVzYdyaE79kTDoav4NRQfwHtLchfDRM+B+dS9GqGA21n03PWLOuIVr0Zwz0eV8Dp/KDsRePj1RLuwJ48u0lPA/XBHaas/62K07OPleb5EFoTEuWV7I0qcF9Y48nAe/zzTQaFNLyXOyWYLflDMJLz8kzBLR17xtN1MwizvC9GmAqo+gx+0bwv8ICuX/DPC6PKaYhn89n/7HS2pPS+U8fjCAabnh1SJthoOOHe0LEKUteDlkDMlBYGGnidFtYd/h8MkFbz8daMCkf0RogIwOmJUANYGHSYlg4JGuTxOuqbtkGqTrKIFakoZFfh+dcOWG6QMGeynlHG6p7GNF5B8FRLGzxFCRBXhfbwj+bZ6nGCRQkak/KYIpcMdikK+jxE0rjsY/MNdwSYhO0WJUk4sOVxZorVGiKDsy3lXtLyq11mSNxoeGag218nyE3a0jpDCxHjnx8Ke7uFRmOl7chSt+h8bRbtxzoxlHzQOjAMOC0UE4PsVJlG+2N6yDBj8F4drTMUC/K/8yRB/KPF3lviureLv2udUHAHaHRKcUx0quHmOO8mdFlzmcn5l45xaeCg/3LCVceB4+MtXz34Qq1NspGJ9ukNmEdsCM9+ydIEni6rmuFqubfttnLIkTt9dlsWp3r59a2zu9wn9/rr8q/Hp7Xq3JxvwwwDNQ9R9Ge/2f5jRVYbxFWH6by6uiP94tV9sNhDEgcf2/mq5SK5gDAnkzqV3RVmbTzf4MkoGvxIdvIo3myveCxRjfnPxBBXqV8ywXfMJRQf4H6DKs3snvsgwXhmfoRv79/ESP+SH1ad/REb77onxxz++u6Of/nC6R0XHiv/M54sVFADm1a+1EMmlr53P6Rbn8/JL8YWH7cp4jX58gfzxy/LvnxJDEd/duQtag+JirxK926BQI0v3oKy09pN6dbnVGBsa8J0yMGbAv1pEaebrVbKOtxleuXKEikoonxX/VZ/Cf2UsFWpfT6/Z6ikqr/z38UlzuztE6OZF29MP8MHR4/F/+WXKHdiBxb/pbhtvao8Vb1FK2/v5V6lxE86//iasoHwM/zwsthAajjeQbogH3U+/ycpHtm3K2J7nnXiEBk1UFINdr+aM9B0++4oKIZ59NcL6mG5FlHT+427NS+cNXGvOvBgW+Mj81Sbfbo4gyVz6ezs5sPQ3g98E2Mm7/d/Nf9CfyOol/3R3SUvvl6WjSH/EOkr/oX/4Co35uKI1GvHKHfcajeTeAPEsoxAoRTjJSaZf+QK42BuY5LcbzHHUFeKlRGLjuXjcTxD3xIIO/Qpa0Hlb7t3i3/gl3huFa5qt2Y4zW1iMy3Z7zFeDNrQnBhWDLUOnxm4BQZt4e2l8v8LX7A8rUFURt32BzmHCof1NjOo2FIJdoCeobbiEz4cRK6ocsp8QWsOWAM/1CQ/cvvjk1rhGaJvvSPzGRLcNOTFhcVf60tiNjv417/0d/ghz1zd7XlVxcYuB2LAtukSjtaQdDIXKVsazly+OTS8/RiFFx0eBMOy85x3nE6xvu1lKsbSnizPKIwxKKXKbzfNHcaKmg87EJJ6YxL2knX93TGIwS8iXahH2l873GcRQXqOWUqQlG4RiLIJYmxAC2RyS5SItcwXaDwWqw//oEETb91FSCwvmhCCC26umA010lDru1SeDalQEcaKjDKN7nXeDyigdpn8vCLEW3YMOnbQwxQcJAQYivLuSo6RkF2GksElMhRvmMiTwcHxQIISoEDsmHQVxg/YdvTMdxQxndtgK8010FORIXrymYzPSuHbGW2INveVnw7eiyupbOpXSSfO7Q8KpwThRp8AndxLUfE61+nYoQInUm49AR3HsMKAqsudXyHayT8sxsbCme0wkgkxJR+E9nI6JxdbUcWPhByF9jiM3oxWc4hbGi00Jc7qhqemYaNpPbX5kk4v3mWOivKYM26xXGdsivvwfxvP1YZkZ9+uDka2BBjLkQGwJ1yMAbQP0DtgjrXZY/QgE/Muzv3395Tfffm0gFLO9/9x4DVjsnag8LWDEIvkFq+B/9Dt5yi4+upOn5VmR14e7MqVF1NNbBQhCW0YpeTGUqSmIkL8yk8X6GEwWpX6g8Dm1TrWDmCxTWgT0XGrDXkF4p3Noy6FxNK2K6RwqaFnqoZx4ESi8kf3q59BzdBN75pgzyzyd7MkpKVQegSeENugm0zn0N38Otd3Q9Ec7h5bWxjyHcpvNc2hl0Z7SIjqeTqe0iCktQjctQl3y20+v6jXF6fW79XZ7/4TOqy+MFQMnAwjcYpUuD5kgpKRA39h+L5grnDwCrQPQU9bGBn/pnq6vfvmjO5eaoR2a9XNpu8RSwfOmpMjrxQqIJyfQYjEfklBRttVinP9KORVEUUJS6CgH0SmlootA5ZRSwfMyekRFy/l1HvAvr+oVDxW8+cG5+dXmcCQG18MWdrRQa2FiPNi6sKeLWwszv9OUijGDnqSc2Eqd6pLboF7TOGxOuQ1TbsOU21BNlzlDYZ9yG86l/XTLGplyG6bcht9ubkNH/rC6qT6207IX+ShvSgkXBYvrJQTACpnELwnpwF+Qa0IajUIxbgri1oK48uh8zGhHRGBU4J4OSb4ZWa5HhSvOn3/aCU94itkiz09H8NqbXVIjtN1QteLejXmrElPQidnChjK4pbJ/e6ZwKSbwDNQNoq1xHocgrfGcLrA5C8IasV7rIgITKj6RgVWW/EQGbuRyyTDIkCDs6FpnZ47EXeKvlWsaR+KiYghlKQmdOv4Br3mAPHZRpjxyfc9PmZcnLPXcyGUsYV5k+2bqhhAh8ZkV2lZgxbSTgid7DeheWivLZ5UfCMMPVLBo00NrlmdpFGjovwKrRgs1KLXsw1CjpTpNtWxY+Vl/m1JoRiELCfVjNmA0y5ZXPiKnsWf5oe1HjHl25rtekITMiQPmulka+a7j2DHnIUMqmdeVKYrcl3NkRnsQ/i7qKRW/iDenh/Vpm6qFXqeclZq28LRNjbtN0SpbHno82wuC0CfZqviWNOLJXcacrha3l+9n8bdGcfvJs5w8y8mznOH8KmS1z69fQzzLfmXt6xP2N1XWXlSU7MAGr99F56r2SsPeVe3V1tpV7VVzY1W1V632rmqvNh9a1V61UniqpdzbsWZud2a7alDS2WVp+i7us2pDs6q9akynqn3tgfWsaq+2hjdwlSzXCSoTw9umIoH4BLJm153OA01b1FIxRbXp+481dWtYVfuWLm0ZO/apz3FHtSWPOx+KkZKl6fvf3/Cq9o0+Da9qr5rSq2pf65ZWVftWWwOq2tfvT6uqffO9KlAI+bb3OZg3u0a4QfWYL6jNhc3+L1ixgrYDCKI2fX+jhfzr7urDn4Ba39BqgS/YxNB767sHk2wGNbz6kCCJ8JfLy8sPBA2QSVGdvr/FIVXt1ecwTlV71eawqvaqDfEwB1W1Vw1xqKpqB/pqYL8isXPAGn1sq1XVXu3hx6hqr36DTlV71ZJmVfvae3K2qn1ZQYSq2kcz2z2XKlNc45+sas+VH2DG4+KttN+L2Yuf2uv4cL2gh6raK7dTXPxAVfu2Jmer2isNCE2EVOAGkq3AGsrzEu8qFWQDtTtbp7tLxJqvcOlu8TMus+zQQ0lMYv5dxz8DHQNIfaDqH2CM1Wva46OHKtrnJBuJbsS7uXZJe3xdp4L2uK6oZ4+fKLY3F8uDcg/8l3Ol7GnVreKwZWRzdgvxXLaVSKz8VWCxgel7zASW7/uBk1iZ4wHpNcPAM7PYtS0ntRM7cT2Khtax2PIb8CeeSONHfhgCKxpGBBetNaTHK93pFipGv3uxZWWseDBhtmZBhzMrTWnRZqWV8ZizR5O65FlpqS9/Vjbsr0oum5YnVY6tpPfkxtSgmfP+jIClKz05kbX5bUVTiS9qewjDZreUlcHXIrBxqohsS+FCeo3hBcwrEG21lmEDjy37NE3bu47Zb/Ip6uuzSFPTtG2ELf7Npm0/VFbePP2AqfmbQmULr6jDkbB+G51hWaVhb1hWba0Ny6rmxoJlVau9YVm1+VBYVrUyAiyrGhwGy6o2NGFZ1ZgOLFt7YD1hWbW1HizbtDUYlm2aGgbLNu3sB8Oyqi09WFa1NRyWbfRpOCyrmtKDZWvd0oJlW20NgGXr96cFyzbfKx1Yttk1bVhWNTkSLFvvpwB0h8GydVv6sKxqcQgsq1oYB5ZVbQ6DZdse5iBYVjWkCcvWxltCulqwrGr0Y8Cy6jfowLKqJU1YtvaenIVlzadm+NSKXhPkCq1Fjqe2S1V4r00oGEUz0zkPy0ohpDOwrNLDh2DZtosfgGXbmpyFZZUGGIL+sKwTUM5GE5aNUAKnAczyDx+CZgU1ZBRk1goBGXfCZqljBTjLGynwbPVWLmb8Js4htCjnA/FMwmipaBb990aQY08wg87ynUkwg2DOFjOVg7X66ks1P7Tl+1ZL4xo7qa0PxZZH0otHfWN+X/3v52jrKLvPIa/3C3bH+eacfj3IdguNu2Fax3IzFFsMx2KcHpMZWCzd4WFd7U/ppgIigv9fQJDFr3GyxAsjZiH/O30wJ/sUrUmhHbvCSlFkDiTQLxOZA8dniABG47lSXj9PKsAzxnoB2dkFlUfDby6yyTLUFSx+pQshPnsN4TtUOmF0QddKJzw6Jpfhx5apaJthFHkhhrZrpZMpVfEj6s1C1P3XrpzsBR9BbxZG5Ux9ctGdlafuMOVyDmuD9GaP7cvohBU5vhfamP4yFdILjpWTn8e86hxPd6xkOH5ukN8G4UxaNEuRq+U6+/GAMoHStGtZAapwDgxYWqK5RsSy0qHzEcvKhb0ilmW74VWTaxZ0IpbSlFboQ1oZL2J5NKkbsZSW+kYsZcP+EUvZdJyIZaUnJyKW39O0ao9Uynonw6spP18XkvLrLaopf7VdoFrr8h7fV4Q3K383Kn8t6yqbpmenKdym4T34ChXasEzg2ws9gOOXF38yjn84fm8UmiPXcwaVoq555D21rKd29Nry4B+B8XP6eOrTEdYKZ47XKrBbJKO+QPVLFOpcoeDljgk1ReEwGnc3VGcZSyc8LipZijInfIEVVXmhxHi7zngJThgoV2CuvYgzQJx9bjy7pWuwnlNhAVkAFGKMwuEjhQUszX0jSlc1uO0KhzLL9KOA6o+cX0NPbVSNmijSHv+u81H+c8cj7ikUNVGEzZYtCecc1FfmagFEoQncMLQ9nCOHcWiK5jpb0rFD54ezJG6h5/22pOILNLYk1YLWllSa0tuSSisjbknSpPaWVFrqvSWVDQdsSWXTkbakY0/6b0n1FbTCu5SrYzvAp66gcsEsSyG/+ASFjn88oFI9qgYvGeEUJE2L0lGXxl/Wd7j17RNenxjls6FNu2J3l8YzXmqF4d+0IhY6tS+M3Q0vypKwsnBzH83a6jL/+M62gWOZlSqeX623776c5HfiuyODCxvmJr5frsWZlrBI0lEQokRTfm65e057kHAkRqzcJanZUxnISk3MKT8X9PgObnud2N+ZCKYmUYjYy5MLonR1ccXV1tpEMNXcWEQw1WpvIpjafCgRTLVSnKtKh23Kz1W0zGsPrCcRTG2tRwRr2hpMBGuaGkYEa9oZTgRTbekRwVRbw4lgjT4NJ4KppvSIYLVuaRHBWm0NIILV70+LCNZ8r3SIYM2uaRPBVJM1ZEoV+Jryc2+R/b9ZVxwpwuOuxiGCqQ9iGBGs7WEOIoKphjSJYKqxKT8XfACkOSLhecETy7Mjl6u/c1hpDFNncKIH8nM7EMFwSYBM1ZnzO8zPPUUEQ0T3bH6uYI8Q3UusrJKMMgoDDN/eif+F6zRzc0kTOVkuUkGd6c5ZqSaGPzJcLwwQw/DDCq738rC7acX13NAyMcZliJ7qfGMyDilDVTQdFDjhveCxqQdyLauJlR0OwSqNUkXyNhgSXgYoiuwosiILrIYipZ1Eq3d7lDiD1KpIci9lNMlDvyJZjR3P4D/qb15YsR9mqRmFvueZlp/kvu+w3PSz2M7MzGRBFKWILVEed8LwShNnK/AyB6qebhTkkPlLIzOOssjJnZTZLAoy10xyhpJiFFuTJK2/g714E6Nxjy+MD/sboaTObuMFKIsXSfJzsr1P2f9c0wdU5hTfUUR4v6A/GF/gReBEtN1OyAF8wVCqzAD8AnYYSGgpxueaQpIcfd8iJEvUMSNdInvokraGYgRLxlrfp8XZMKClXXW+z1/+QQ+4MtdPbSpyNwgpNPvo5rZl2261/vk0t5GweWZuu6YbIaKrMbd7zFM5t8M4tQMvToPMT+PIc3LPS+I8zuwsgYKvaeZ+nAUxj1E353aPLxxlbr9it2tE07C6bRlWF/BKn3D1+q34fHMP5jTibNcrrFtGSVMbdZZ3vuM+s9yHJsbMth/lLDexLThU31fWx5h28HOz3PbCQO7gYDLXdnB8Irj+p3fwHjNWznKLWZ6ZxCxPE2b7LM8c5kZZzFgYQqHb92I/M2Mvp9KUzVluBrkZQ8zbtezcToI486wsC13Pj/Bfz/RzJwlSyyInY8xZzn5i6QER8Q3bFvUojHVurJMfWbrfXW7uR53Xne+RGGPCsQldM3cCrJF2kGVukJomnKKUJU4OqlHiBg6DZo7PiJE9yrB8+dN+i1IzpQezgHsTr1K2A+sKRYJAF1hsDYh4EOGKLXfEwdrE6Ts4RaMOVOe7rgxU9y1mlIEqdgkwAPPF9eUhRu7BlpxAVK+DN7hegV+B0ifcP7z8/rsvX82fff3l317Pv/r21V+fvR53sLreeZ/dwpt54czmCMFj8wldnGEsKzruFid5HKHvm6ZF2V5F5eEY7/uS3a9TNB5y7Ku2H3T2K3r0Lz791XkcLtVEQLH60zrr8kb5yY9ekruVym+vDgUdLOVYa43L+8+cER7NeTJhtee9mBzHQRmsyVU3ocMnPNrSIhQezYzHKKzY1KUUHk315RQeW/YnFR7bjsMqrPalP62wn7jP8buKufubUvfprLneuI3OpA61ZW95n1pzbVpHzd5YvI6a2d7Ejlr7ocyOmpkRqB01izK1q5f2es2IpspPzZqOzE/9sfWkd9Sa6/E7WowNJni02BrG8GgxNJziUTOmx/GoGRtO8mj2ajjLo2ZLj+ZR75gWz6Pd2ACiR+MWtZgeLe+XDtWjpXPaXI+azZHIHo2e6sj+NIzp6/7UTA4R/qmZGIfwUTM6jPHR+kgHUT5qljQ5H/VBH0f9p2b1Y8j/1L5CR/+nZkpTAKj+upxVABIJQu5rK5rh//aZFEubrnEB8vMUS4rnQpyCSmZe2IjAPjXtp7b92gyJ+GGbdMkZBSC1iw9JALVe/YAGUGubsyJAagv0f4AKUBgMEmf/fZM/1JfvsYGBJmIPkUtRg66CJYHnhAifHEHBH+N3LNkeGNR/KNV8CC5YMzEIAiv69S+GBhELAJhNkjrIpEZOIQaAuxs6+dKOTeAsB10WKGp/34Utp5JXSJ8LqY+dkjBaU63RhfFlS8hoGQ/GDYqDae+bK7PKyZo8255HLE/dIj0sQK+ujaAaWBJH1RLYPqqWvGK79WGbsh/i7YrTWBDrfnG7WW/35Sd3Nwxkl8OK/5n9FBPJhSJAB2gwNdPIf0zTmy0RXrgEE+8A1Ekcoh0MevGL1hp55GqPzo+lem0v/LfSdHhCedOIDgZctaaFAlcNjYcDK1Z1keCqsb5YcLVtfzS42nocPFjtT39EWGqf/ENItglRtPWOL+AkUIQfREmHjvXQhfAdlpA6Ndl9akZPLfO15ZCqmcf5Je0p7O5rErGkCkM1ERDyUF3SErEgjgYbcHR5dSGZ5X4X74z99p5WHwShsRBRILq2EM3ebN+s6B8D/wMND8uXQeti9WOKw/IwrPGZ8ebi5T2IgCvju3S72OyNA9cG4S0uSjNbXEefXAo+36ey/R/KKxD/LmgDO1y7vbxmYEmWuiSffkICjWJx/eQP/G836/2nywWofJ953EZpB9i18RMpm1QM8juiu6H/AeJf7T/9STb6AmInGATSNYFd5eY/Rcr6+z8YL78z/jx78+bP6/TAZQvfvBF38cV6/9/Ght/7JdgPxuWbN8l6D6pD+XVBZMxmxjeLhG1jzmu8vSeJSCPfrm+Nr9frayz+tEdAKQQVjspWLjX6arGFBEFxQCIixbcYgD+v19tvEGDFjoB6ScazFf/U+I5vOka82YCPjC/Co6BR+GrJ2B712kq7vDOWadzF93gJ1hRRR6AdWi9QLFijHwaNMRQMuLHNdk3EDeNugdvDCN1tIeCHO0jBVigN2iZZ/Hqx/8sh+WRHUgjG8y+/nRk/sE9ICoFtF+vDzogTas9fM8AgWwz/7miBDHz73RcU+zfo3TX+H++F6IR4+XdEI5Vvyf1uZs3AqVA22pkBqsCS5qTxXzuakvuCd2LEe8P8ybSfmX/+sx/+d2nlOT1NLC/L9YYe6Js330CB8ADqBR6teJcd982b5SJ580a8//zHebJe78FNiDd4xDMbDJ2ZurvPjN397vKW7eM5Vd0zFjuD3W728nXo8DpRD+mfH27iPYb0brteXX8EcZ0gCnwHukaDxHXg8tTFdY729MR1hD8rxHUKmy1ekXqKoKvlSWOQW1SeB4aHxGunkgf8IrX//RyjSlsNz6hpRcs1qprT842qlkZ0jhSz2t5R1Vpv96jaeIB/VG0+koOk9qi/h1T3Y0qkreqjdPFjpJPC1Xeu17Rp0BK63WIzg99yx5ZL6Jdhj1I9GCQLVB2Pt+RnvOUbF9913ood2nlrJItVvAUPsWuyTulmWRzle2R4TWAhYcS0iSI6UX1Pye4ck3XCwIVkke+WhH5wtmpUX3zyENW3B233mKyTxWaaJUFoOVlmpW4ehFAcz1lim3aSoWCj7Ts2WP54jk2qb+hlHkv8NPNwSWIHSWZFCejBnp1Yaeq7OZyUyOeg3ZhUTTh0m8N+DuFm1OXkTji8vT33D7lryz39xWoxLjmz670emayea6HOZRLZThiHVuC5qInpJY4VZS7JL4d26qU2M+Oxhuc5iWUbh40cAO7dgxGxThcE+XHvedRB6XyHx0Fx3TCyszgDQ9wOw8gxrTgzrTSI3DCDbm4epEkaJ+Zo74wcFOFKj5vF1flmjvdv+paTp7YXZ0DSLMcGIT6PzCzNkVaHUTAd18t8J6YciZHmDJE/s3KGEBW+OH0R3XlsXnzXmzuOR5QzCyzfMMdqgVqvqWNmluXlbuQFXhLGzAwC0wJlfqzx+CulBAmyt0jrU0fDeM7h8YLvPfLwdL7X4/D0WNNHeV2+Q51koAUYAEp55OkCxnyOxXQ/n9Orw7MHhForf5VEPhUlSc5vFz+BdT32K9V5ADqT4mX01HqUfpUdWb4zJUGrcaRTiZJh4DjwQilazFOeB6VQBd19pKNfhdQnM40y+FPY3piTWZkNZ8gOLSsJkQMdsQTpkz6FXZp+lR8z32SZnbAkS1MkXOfMDZMQ/7LiJLNZGLuelZrEoB9l0hcpMJCuQCI09xSAIt2ss3E3y853dVz9AsYceE/IHHd8y3WcCCOapDQyphdmvu8yZFCFwYibJc8Y5T6l8C+NDKnhWBDX21Edp843VhmL7m/hmC/FJ6/pFEzFUQhQxOYAcDM3Xr569sPlJx8pabbrjfZZ8cGncYqgxqM7Sdse6kkQWjqdpLucpB3L9wOU+RErPvgftZM0PnnoJB10X73lip9aLLBsh/kRVmcvD2I7dbM8cV0fC3+cYvmGd2vZ7Sdpx8IxkWGNd0I3sRwXQhk4Vzs4DCWeG8Z5FuEgbsW0XYw5uddkbH7NkPJFS9xHWuw639xxscNy7wexD936AKSHjLkxHonv5x7zmZmGCTREAjs0xx6POYq7zPmgGPkyvh51ye98S8dRsHPXZ4GbhnaYJH5oOxbeMC8zPTPHiTFD1nDkODbPkBzzrVjsxBjM4f/vKAMW9kcTPul8T8dhyJB3jrzpxGEmUtLDOGJuECZxlGZhmto+0ITUwdyimTXmMHAYCcXBMgjBjOsJdb6h4xjA5bd9euddzwzgPZqOF0BdKIBva9lZFsemD2ApGO2YXLiEItJ8Cal8BGOFUzjq29D5tionYt9zMy/0ooj5rmVGkQ9NKC+AIJid2VbmpinWCCw5H+NtwDlYAikIeC/mW/bPA9vt58ihj8VhOI3TGzbfL24ZYMpRx8rqeuMVn7H7PjbmzKFRWWzLlfQjedBdb62Pl4hkeXfmPcZkeawJVgB5iaOXKICsSR5NKXQgHD9a2Wmk8Nu88KlFph8+u413oInMi99nxe+0AzIR5xMsWjoAkZqQEacgSdzfcsqLpCIVsmHPXr4wfgBhA5okby6e0PkpLgg7OEhxsQ2ECMERiZfL9R0OWTAhhMdgFVIdnCYjsDaQMWCMlhOeELAtu82L0XUUCAPuBa3IRyodFJihDR+gIgbxZ6riyR7d++1B6MyCbJ4iyiCPNcAaOBN3S3J23Qv48mYqBibedFHcdHfiXdd6l6yZS0zBx4ih4l1yIp/gs8d8ov5XvUnVKLXl+maAULQ4W1Nl6oclJcWKisEmhUlSabRNk0E+LkzzkJlRFuZZbOVQ9kwilkFnynXx79QjUE8erp0YoVMGODX0Qz914QbnmefnwP/MmFkhs1ygoyxp152znQB4Gw4KcRjbpoN4m4sEIgSdkjhgGfSXUqgxxsFocCrXC+JVhanSHfEOt/d8cTcvzUtb9QnzeLnrVvpCAbxpwkstyc73d3QMezyDURzDv1JlZ4NSGLGhcZfZ+E/bOYbr+QJYLFagIK76DGFnoYnTI9h5OPo4k3yBtB+jGidttk7EFVrlArlcFuUppx2X3qOWHfeYYcW5wBi8srawjbATvfvlcXFIzhWf8aL2vE0rFfFOfRM6euDr9OLRSkNX3E62yPNaahMSSm+KQiXNKUNdQMtLaofmYBqnN0Pa84Z0H4PKYfKbKNLl+W1Uh5rEH87lSewXe6o5f/HwIlOv9VyoQfNUKCExPYjxK4Shh/N9i26c5/kWF/Xi91Kb4bzeSmsdPi83o8Xj5RbG4+8Kc7q8XW6lL1+XN+rP0+XNxuHnFj3oz8st+LSYm+0MXRxHrZmDVHd+hGhn6PJraBflkIzIsWjmwlcu4VoSLeny9UvmIug+F9qcvo1oeuKBjpUkmRdElhXHiWdZcBlzBHKywGOQPU64gwhE/HpFtRJFMrksnlF+UETze2eLKut76drhC7fAXtkd7y8l/4xgl2eA1Q3r2C37phan0emvXNmrabilps+wrkpFoM7ecnlg+SBylLFhlLvhTDlWU0419/ZpVMUL1eM76ltM+R2wxRNR/MgPQ9TsHrTRFK01Mm4r3Tm/3VQu7LXlSA9jsNRizYLO1iNNaW0/0sp4W9DRpO42JC313Ypkw/7bkWw6zpZU6cmJbelbyGytFj/zZESalCiVPI+z2wX0D/iBG1BZtSRHK5JSQHE5+e0V/dQ6VNfQSy17N03gO0zF84s1LX6i5ggfVrEETBP4BPBbG63f+QTup5Uq1wz6oZjaakTnm/XdUxRGY0vOajusylxmBHZRCIbgO4r5GO6fCMAjiVKZdz8IrS8qQJ2fH3yp5crMyrLTW+m0aUFb7LRpciy906bl3pKnTRNDVU+blkYQPm0aFRtj5Xwx0NHXlD9tdkxHAbVpjURHcZPJcp0MvEE9HdRmj8jeYCnUdnPD1FDbbQ0XRG3a09NEbdobLovatEWe8nX8M7wBvB5VadNh66OeOGpL97T0UU/aGyCR2rRVAA4frnD0pOqVJAeoBULQlKgb1YMf2gACgfsWfxn2lEeSS20bUh3F1DZ7+qKpTatDdFObVsaRTm3aHaae2rQjHvIgAdWmMU0N1abBkWrnNg1/DCXV5rfoiKk2rWnqqTYNdqila0JpypqB2CQCra0wsgUlhWiGWrlC1OqUpGoFIz4jqdro5UOqqqcaPCCseqrZWW3VRiPsB0d5VYFYF2kRUAA7brrYMA4rSMw+VD23kNoiq5UKuiIA/8gK6JIKR4KqzxSL5dOgiu9KyopgUclfBbrbg4wyobvF0XVCd/eb3ezqCqRxWjDK+HoN0oFonCSZkicMiEfimA8cmCZwaEJ3lzU/uoXIwbErohvjqHXe627FZsUBTeK0AyxM6O6E7nYIz0zo7pMLwmjPz7BT+PCE7g5lSkzoLqGhw966Cd1t8fBOTVEOPG8Z20ngbiR+y4cCB5Tw7LCHOaG74A4MG7qqF8/hbF7GapitOhA7obtFgGA4GY6CA9T66sOfSCeZMHOMMpRN2OBHRI2vPhCc8AsUrz9QoguZFQjtMKsTuts3AU6w46tI8RGdHfgMximS1UDlriZ0F/NlQndBeaJ3g+goJYr8vr7ntJzhZaMJ3c1YHh+W+0a6NqG7kIZdvdsRvktVD+i/NwLOPXEAP+WpyaQb2CQObIulcw+JL0uUa4Pm/HTR0r6GCp7qyTHhBbbKoOoI5o4sa06MVAjnw82X43bGuq7xZmC5GBj4W7q2bUey8GG0dM6HW+1PQKeXWCQ0XMxEjIX/FieUvVSkH5QfzMk8JTUdVu9W4KSW3kc2T+6pz0hEJXz0gtI58Lcpg6kViOfJLvosVG5Gi0IuUo5AioH8w4crzjH+5QNPBCK/UgkTnHdtBIorzOkGF7iVvqzTIn8IPEZ0XNDcOvd4HLr44Aym4wqLuGhjWcRnxeEMWRrIzyANXBwZse5A2wF6QKz4DUnyIj0km5OKIv5uda0AoKY/PTbdQjO0kGhPqlyTykILibem16GqLLgOZfWKUD1erg4qC8cUpEJmoUcOkpRZyFkc5wmDgn/qQychTAPmmAFU5wIzNpPAdwPXg0g5JcLJd//v8K9uYmwtPb5wFE2Bh9N3dUXreE5ecaPw4DqqSPTUCDBnLq/0+windwDVter0njQCGkf92iyXGgFUZmzSCKDaai3pDaXGwL9AI4CeA17hWiXFSSHgFFWl4iEP4AJM/vX+DA3o9+Zfj6YQIFUEKgoBlfOx0AOofKBm/6PuUeRYaQZ14TxP/Cx0YyiKQqIfgvMMgvOeb8MBikMsE1P2fwWyLNRomhjMEZs6f9J7EOb6CNn/PTxc4Ue3sEMfyv7v8R0TP3Tih57Tepz4oafJsqhVwhbvofYikycLnKEDvWzK/ieqSkvYApvcxA8t1qRJvqO2/IyDx8ozss4EnvihEz+0eUY95VFO2f/DPPEp+7/1uH7qNSNO55T9j1BAT4bFlP0/8UN7vzRT9r9QHIy36Q2OQcMW+Cn7f7dfr9hAcvaU/X8kMGLjQ/lxVAz7cLXg5OcKvXPYu/mv5odKFJk49kLTCz+1H5MbvM1dKYKNJv9zPFfPGgHBguj5K2f/E7lASu48uux/MCJIgA1UGpBwKII3F5sBJ+LckbA4xaboj+f4oVP2f6Erh8k6abuCaHd+mapDsaOR8+QSMWX/V2hSOuDQhO5O6G4THJomcCvnWS4/E7o7abueQrYmbdfh+aaTtmvH9CaCT7k0rKJlOVyPgOxN2q5D1Kmvpuz/KftfVSo7vTOIQi/VvOtJ23XK/q+q3J16dyZt19Xuw+e7xSplT1AD+glkJPaLdAHaO1TS6BUS+qzncYlTgztpu5qW/9S0n9pqpbEJ3d1VAN9yo0fQ9uX9/oaXy/mVtV3HQXen7P+ihOep9UGKCZzJzx8riZ6XnGtoCwzPpRfSAk3m+SPI/u9MDq9m/xdVoivp/zU9gEr6P+I6JBBQSgfw/H+R+zA0nxnV50bMZ5ZhtkeR8PiP/w9Bh4hMu2UGAA==", "string": ""}, "status": {"code": 200, "message": "OK"}, "url": "https://api.github.com/user/48100/events/orgs/praw-dev?per_page=100&page=3"}, "recorded_at": "2016-05-03T23:46:51"}]}
diff --git a/tests/cassettes/Organization_events.json b/tests/cassettes/Organization_public_events.json
similarity index 100%
rename from tests/cassettes/Organization_events.json
rename to tests/cassettes/Organization_public_events.json
diff --git a/tests/integration/test_orgs.py b/tests/integration/test_orgs.py
index eabb556a..b062d695 100644
--- a/tests/integration/test_orgs.py
+++ b/tests/integration/test_orgs.py
@@ -109,15 +109,34 @@ class TestOrganization(IntegrationHelper):
assert o.is_public_member('defunkt') is False
+ def test_all_events(self):
+ """Test retrieving organization's complete event stream."""
+ self.token_login()
+ cassette_name = self.cassette_name('all_events')
+ with self.recorder.use_cassette(cassette_name):
+ o = self.get_organization('praw-dev')
+
+ for event in o.all_events(username='bboe'):
+ assert isinstance(event, github3.events.Event)
+
def test_events(self):
- """Test the ability to retrieve an organization's event stream."""
- cassette_name = self.cassette_name('events')
+ """Test retrieving an organization's public event stream."""
+ cassette_name = self.cassette_name('public_events')
with self.recorder.use_cassette(cassette_name):
o = self.get_organization()
for event in o.events():
assert isinstance(event, github3.events.Event)
+ def test_public_events(self):
+ """Test retrieving an organization's public event stream."""
+ cassette_name = self.cassette_name('public_events')
+ with self.recorder.use_cassette(cassette_name):
+ o = self.get_organization()
+
+ for event in o.public_events():
+ assert isinstance(event, github3.events.Event)
+
def test_members(self):
"""Test the ability to retrieve an organization's members."""
self.basic_login()
diff --git a/tests/unit/test_orgs.py b/tests/unit/test_orgs.py
index dca92087..4e920ea6 100644
--- a/tests/unit/test_orgs.py
+++ b/tests/unit/test_orgs.py
@@ -1,3 +1,4 @@
+import mock
import pytest
from github3 import GitHubError
@@ -215,7 +216,18 @@ class TestOrganizationIterator(helper.UnitIteratorHelper):
'url': url_for()
}
- def test_events(self):
+ def test_all_events(self):
+ i = self.instance.all_events(username='dummy')
+ self.get_next(i)
+
+ self.session.get.assert_called_once_with(
+ 'https://api.github.com/users/dummy/events/orgs/github',
+ params={'per_page': 100},
+ headers={}
+ )
+
+ @mock.patch('warnings.warn')
+ def test_events(self, warn_mock):
"""Show that one can iterate over an organization's events."""
i = self.instance.events()
self.get_next(i)
@@ -226,6 +238,10 @@ class TestOrganizationIterator(helper.UnitIteratorHelper):
headers={}
)
+ warn_mock.assert_called_once_with(
+ 'This method is deprecated. Please use ``public_events`` instead.',
+ DeprecationWarning)
+
def test_members(self):
"""Show that one can iterate over all members."""
i = self.instance.members()
@@ -281,6 +297,17 @@ class TestOrganizationIterator(helper.UnitIteratorHelper):
headers={}
)
+ def test_public_events(self):
+ """Show that one can iterate over an organization's public events."""
+ i = self.instance.public_events()
+ self.get_next(i)
+
+ self.session.get.assert_called_once_with(
+ url_for('events'),
+ params={'per_page': 100},
+ headers={}
+ )
+
def test_public_members(self):
"""Show that one can iterate over all public members."""
i = self.instance.public_members()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": -1,
"issue_text_score": 0,
"test_score": -1
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | betamax==0.9.0
betamax-matchers==0.4.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
-e git+https://github.com/sigmavirus24/github3.py.git@1a7455c6c5098603b33c15576624e63ce6751bf7#egg=github3.py
idna==3.10
iniconfig==2.1.0
mock==1.0.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
requests==2.32.3
requests-toolbelt==1.0.0
swebench_matterhorn @ file:///swebench_matterhorn
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
uritemplate==4.1.1
uritemplate.py==3.0.2
urllib3==2.3.0
virtualenv==20.29.3
| name: github3.py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- betamax==0.9.0
- betamax-matchers==0.4.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- idna==3.10
- iniconfig==2.1.0
- mock==1.0.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- requests==2.32.3
- requests-toolbelt==1.0.0
- swebench-matterhorn==0.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- uritemplate==4.1.1
- uritemplate-py==3.0.2
- urllib3==2.3.0
- virtualenv==20.29.3
- wheel==0.21.0
prefix: /opt/conda/envs/github3.py
| [
"tests/integration/test_orgs.py::TestOrganization::test_all_events",
"tests/integration/test_orgs.py::TestOrganization::test_public_events",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_all_events",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_events",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_public_events"
]
| [
"tests/integration/test_orgs.py::TestOrganization::test_is_member"
]
| [
"tests/integration/test_orgs.py::TestOrganization::test_add_member",
"tests/integration/test_orgs.py::TestOrganization::test_add_repository",
"tests/integration/test_orgs.py::TestOrganization::test_can_filter_members_by_role",
"tests/integration/test_orgs.py::TestOrganization::test_conceal_member",
"tests/integration/test_orgs.py::TestOrganization::test_create_repository",
"tests/integration/test_orgs.py::TestOrganization::test_create_team",
"tests/integration/test_orgs.py::TestOrganization::test_edit",
"tests/integration/test_orgs.py::TestOrganization::test_events",
"tests/integration/test_orgs.py::TestOrganization::test_is_public_member",
"tests/integration/test_orgs.py::TestOrganization::test_members",
"tests/integration/test_orgs.py::TestOrganization::test_public_members",
"tests/integration/test_orgs.py::TestOrganization::test_publicize_member",
"tests/integration/test_orgs.py::TestOrganization::test_remove_member",
"tests/integration/test_orgs.py::TestOrganization::test_remove_repository",
"tests/integration/test_orgs.py::TestOrganization::test_repositories",
"tests/integration/test_orgs.py::TestOrganization::test_team",
"tests/integration/test_orgs.py::TestOrganization::test_teams",
"tests/unit/test_orgs.py::TestOrganization::test_add_member",
"tests/unit/test_orgs.py::TestOrganization::test_add_repository",
"tests/unit/test_orgs.py::TestOrganization::test_conceal_member",
"tests/unit/test_orgs.py::TestOrganization::test_create_repository",
"tests/unit/test_orgs.py::TestOrganization::test_create_team",
"tests/unit/test_orgs.py::TestOrganization::test_edit",
"tests/unit/test_orgs.py::TestOrganization::test_equality",
"tests/unit/test_orgs.py::TestOrganization::test_is_member",
"tests/unit/test_orgs.py::TestOrganization::test_is_public_member",
"tests/unit/test_orgs.py::TestOrganization::test_publicize_member",
"tests/unit/test_orgs.py::TestOrganization::test_remove_member",
"tests/unit/test_orgs.py::TestOrganization::test_remove_repository",
"tests/unit/test_orgs.py::TestOrganization::test_remove_repository_requires_positive_team_id",
"tests/unit/test_orgs.py::TestOrganization::test_repr",
"tests/unit/test_orgs.py::TestOrganization::test_team",
"tests/unit/test_orgs.py::TestOrganization::test_team_requires_positive_team_id",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_add_member",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_add_repository",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_conceal_member",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_create_repository",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_create_team",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_edit",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_publicize_member",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_remove_member",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_remove_repository",
"tests/unit/test_orgs.py::TestOrganizationRequiresAuth::test_team",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_members",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_members_excludes_fake_filters",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_members_excludes_fake_roles",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_members_filters",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_members_roles",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_public_members",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_repositories",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_respositories_accepts_type",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_teams",
"tests/unit/test_orgs.py::TestOrganizationIterator::test_teams_requires_auth"
]
| []
| BSD 3-Clause "New" or "Revised" License | 519 | [
"github3/orgs.py"
]
| [
"github3/orgs.py"
]
|
|
dask__dask-1137 | b066b73cfa68e31848a0ff0d6348f6a0df73d4a7 | 2016-05-02 22:00:14 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | eriknw: Looking now.
eriknw: Great. I think the tradeoff is worth it for these functions. Not passing an optional `dependencies` to non-critical functions also seems reasonable as a way to avoid complications.
A downside is that now I don't know the next "obvious" thing to do to optimize these further! What do profiles look like with these changes?
mrocklin: I recommend pulling out snakeviz and running the following:
```python
In [1]: import dask.array as da
In [2]: n = 12
In [3]: x = da.random.random((2**n, 2**n), chunks=(2**n, 1)) # 4096 x 4096, 4096 x 1
In [4]: for i in range(1, n): # 4096 x 4096, 2048 x 2
x = x.rechunk((2**(n - i), 2**(i + 1))) # 4096 x 4096, 1024 x 4
...:
In [5]: y = x.sum()
In [6]: %load_ext snakeviz
In [7]: %snakeviz y._optimize(y.dask, y._keys())
```
Inline_functions takes up about half of the optimization time now. Really though, I think we're at the point of diminishing returns. This computation takes around a minute-plus to run on a distributed machine.
mrocklin: If you are looking to do some optimization work we're close to the point where we might want to Cythonize a couple of functions within the distributed scheduler. If you're interested let me know and I'll produce a benchmark script for you to play with.
eriknw: Okay. I'm up for Cythonizing some functions. I'm taking a vacation in a few days, so it'll probably be a few weeks until I can do the work. Thanks for letting me know this would be beneficial to do. | diff --git a/dask/array/optimization.py b/dask/array/optimization.py
index 8d7224fd4..6a201ead5 100644
--- a/dask/array/optimization.py
+++ b/dask/array/optimization.py
@@ -21,10 +21,11 @@ def optimize(dsk, keys, **kwargs):
keys = list(flatten(keys))
fast_functions = kwargs.get('fast_functions',
set([getarray, np.transpose]))
- dsk2 = cull(dsk, keys)
- dsk4 = fuse(dsk2, keys)
+ dsk2, dependencies = cull(dsk, keys)
+ dsk4, dependencies = fuse(dsk2, keys, dependencies)
dsk5 = optimize_slices(dsk4)
- dsk6 = inline_functions(dsk5, keys, fast_functions=fast_functions)
+ dsk6 = inline_functions(dsk5, keys, fast_functions=fast_functions,
+ dependencies=dependencies)
return dsk6
diff --git a/dask/async.py b/dask/async.py
index 4f9d92b1c..2d8fbb87b 100644
--- a/dask/async.py
+++ b/dask/async.py
@@ -434,7 +434,7 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
for f in start_cbs:
f(dsk)
- dsk = cull(dsk, list(results))
+ dsk, dependencies = cull(dsk, list(results))
keyorder = order(dsk)
diff --git a/dask/bag/core.py b/dask/bag/core.py
index 3348be39a..268a17931 100644
--- a/dask/bag/core.py
+++ b/dask/bag/core.py
@@ -64,7 +64,7 @@ def lazify(dsk):
return valmap(lazify_task, dsk)
-def inline_singleton_lists(dsk):
+def inline_singleton_lists(dsk, dependencies=None):
""" Inline lists that are only used once
>>> d = {'b': (list, 'a'),
@@ -74,8 +74,8 @@ def inline_singleton_lists(dsk):
Pairs nicely with lazify afterwards
"""
-
- dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
+ if dependencies is None:
+ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
dependents = reverse_dict(dependencies)
keys = [k for k, v in dsk.items()
@@ -85,9 +85,9 @@ def inline_singleton_lists(dsk):
def optimize(dsk, keys, **kwargs):
""" Optimize a dask from a dask.bag """
- dsk2 = cull(dsk, keys)
- dsk3 = fuse(dsk2, keys)
- dsk4 = inline_singleton_lists(dsk3)
+ dsk2, dependencies = cull(dsk, keys)
+ dsk3, dependencies = fuse(dsk2, keys, dependencies)
+ dsk4 = inline_singleton_lists(dsk3, dependencies)
dsk5 = lazify(dsk4)
return dsk5
diff --git a/dask/core.py b/dask/core.py
index 275748384..ead939fd0 100644
--- a/dask/core.py
+++ b/dask/core.py
@@ -319,7 +319,7 @@ def subs(task, key, val):
return task[:1] + tuple(newargs)
-def _toposort(dsk, keys=None, returncycle=False):
+def _toposort(dsk, keys=None, returncycle=False, dependencies=None):
# Stack-based depth-first search traversal. This is based on Tarjan's
# method for topological sorting (see wikipedia for pseudocode)
if keys is None:
@@ -340,6 +340,9 @@ def _toposort(dsk, keys=None, returncycle=False):
# that has already been added to `completed`.
seen = set()
+ if dependencies is None:
+ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
+
for key in keys:
if key in completed:
continue
@@ -355,7 +358,7 @@ def _toposort(dsk, keys=None, returncycle=False):
# Add direct descendants of cur to nodes stack
next_nodes = []
- for nxt in get_dependencies(dsk, cur):
+ for nxt in dependencies[cur]:
if nxt not in completed:
if nxt in seen:
# Cycle detected!
@@ -385,9 +388,9 @@ def _toposort(dsk, keys=None, returncycle=False):
return ordered
-def toposort(dsk):
+def toposort(dsk, dependencies=None):
""" Return a list of keys of dask sorted in topological order."""
- return _toposort(dsk)
+ return _toposort(dsk, dependencies=dependencies)
def getcycle(d, keys):
diff --git a/dask/dataframe/optimize.py b/dask/dataframe/optimize.py
index 0ae46d54c..d23a7ff2e 100644
--- a/dask/dataframe/optimize.py
+++ b/dask/dataframe/optimize.py
@@ -15,9 +15,9 @@ def fuse_castra_index(dsk):
def optimize(dsk, keys, **kwargs):
if isinstance(keys, list):
- dsk2 = cull(dsk, list(core.flatten(keys)))
+ dsk2, dependencies = cull(dsk, list(core.flatten(keys)))
else:
- dsk2 = cull(dsk, [keys])
+ dsk2, dependencies = cull(dsk, [keys])
try:
from castra import Castra
dsk3 = fuse_getitem(dsk2, Castra.load_partition, 3)
@@ -25,5 +25,5 @@ def optimize(dsk, keys, **kwargs):
except ImportError:
dsk4 = dsk2
dsk5 = fuse_getitem(dsk4, dataframe_from_ctable, 3)
- dsk6 = cull(dsk5, keys)
+ dsk6, _ = cull(dsk5, keys)
return dsk6
diff --git a/dask/dataframe/shuffle.py b/dask/dataframe/shuffle.py
index 0b4c49ab0..fb797a503 100644
--- a/dask/dataframe/shuffle.py
+++ b/dask/dataframe/shuffle.py
@@ -144,7 +144,7 @@ def set_partition(df, index, divisions, compute=False, drop=True, **kwargs):
dsk.update(index.dask)
if compute:
- dsk = cull(dsk, list(dsk4.keys()))
+ dsk, _ = cull(dsk, list(dsk4.keys()))
return DataFrame(dsk, name, metadata, divisions)
diff --git a/dask/multiprocessing.py b/dask/multiprocessing.py
index dce045998..a2f6b996f 100644
--- a/dask/multiprocessing.py
+++ b/dask/multiprocessing.py
@@ -68,8 +68,9 @@ def get(dsk, keys, optimizations=[], num_workers=None,
func_loads=func_loads)
# Optimize Dask
- dsk2 = fuse(dsk, keys)
- dsk3 = pipe(dsk2, partial(cull, keys=keys), *optimizations)
+ dsk2, dependencies = cull(dsk, keys)
+ dsk3, dependencies = fuse(dsk2, keys, dependencies)
+ dsk4 = pipe(dsk3, *optimizations)
try:
# Run
diff --git a/dask/optimize.py b/dask/optimize.py
index e557ccd73..c34b6e4c7 100644
--- a/dask/optimize.py
+++ b/dask/optimize.py
@@ -22,49 +22,80 @@ def cull(dsk, keys):
Examples
--------
>>> d = {'x': 1, 'y': (inc, 'x'), 'out': (add, 'x', 10)}
- >>> cull(d, 'out') # doctest: +SKIP
+ >>> dsk, dependencies = cull(d, 'out') # doctest: +SKIP
+ >>> dsk # doctest: +SKIP
{'x': 1, 'out': (add, 'x', 10)}
+ >>> dependencies # doctest: +SKIP
+ {'x': set(), 'out': set(['x'])}
+
+ Returns
+ -------
+ dsk: culled dask graph
+ dependencies: Dict mapping {key: [deps]}. Useful side effect to accelerate
+ other optimizations, notably fuse.
"""
if not isinstance(keys, (list, set)):
keys = [keys]
- nxt = set(flatten(keys))
- seen = nxt
- while nxt:
- cur = nxt
- nxt = set()
- for item in cur:
- for dep in get_dependencies(dsk, item):
- if dep not in seen:
- nxt.add(dep)
- seen.update(nxt)
- return dict((k, v) for k, v in dsk.items() if k in seen)
-
-
-def fuse(dsk, keys=None):
- """ Return new dask with linear sequence of tasks fused together.
+ out = dict()
+ seen = set()
+ dependencies = dict()
+ stack = list(set(flatten(keys)))
+ while stack:
+ key = stack.pop()
+ out[key] = dsk[key]
+ deps = get_dependencies(dsk, key, as_list=True) # fuse needs lists
+ dependencies[key] = deps
+ unseen = [d for d in deps if d not in seen]
+ stack.extend(unseen)
+ seen.update(unseen)
+ return out, dependencies
+
+
+def fuse(dsk, keys=None, dependencies=None):
+ """ Return new dask graph with linear sequence of tasks fused together.
If specified, the keys in ``keys`` keyword argument are *not* fused.
+ Supply ``dependencies`` from output of ``cull`` if available to avoid
+ recomputing dependencies.
- This may be used as an optimization step.
+ Parameters
+ ----------
+ dsk: dict
+ keys: list
+ dependencies: dict, optional
+ {key: [list-of-keys]}. Must be a list to provide count of each key
+ This optional input often comes from ``cull``
Examples
--------
>>> d = {'a': 1, 'b': (inc, 'a'), 'c': (inc, 'b')}
- >>> fuse(d) # doctest: +SKIP
+ >>> dsk, dependencies = fuse(d)
+ >>> dsk # doctest: +SKIP
{'c': (inc, (inc, 1))}
- >>> fuse(d, keys=['b']) # doctest: +SKIP
+ >>> dsk, dependencies = fuse(d, keys=['b'])
+ >>> dsk # doctest: +SKIP
{'b': (inc, 1), 'c': (inc, 'b')}
+
+ Returns
+ -------
+ dsk: output graph with keys fused
+ dependencies: dict mapping dependencies after fusion. Useful side effect
+ to accelerate other downstream optimizations.
"""
if keys is not None and not isinstance(keys, set):
if not isinstance(keys, list):
keys = [keys]
keys = set(flatten(keys))
+ if dependencies is None:
+ dependencies = dict((key, get_dependencies(dsk, key, as_list=True))
+ for key in dsk)
+
# locate all members of linear chains
child2parent = {}
unfusible = set()
for parent in dsk:
- deps = get_dependencies(dsk, parent, as_list=True)
+ deps = dependencies[parent]
has_many_children = len(deps) > 1
for child in deps:
if keys is not None and child in keys:
@@ -94,6 +125,8 @@ def fuse(dsk, keys=None):
chain.append(child)
chains.append(chain)
+ dependencies = dict((k, set(v)) for k, v in dependencies.items())
+
# create a new dask with fused chains
rv = {}
fused = set()
@@ -102,6 +135,8 @@ def fuse(dsk, keys=None):
val = dsk[child]
while chain:
parent = chain.pop()
+ dependencies[parent].update(dependencies.pop(child))
+ dependencies[parent].remove(child)
val = subs(dsk[parent], child, val)
fused.add(child)
child = parent
@@ -111,10 +146,10 @@ def fuse(dsk, keys=None):
for key, val in dsk.items():
if key not in fused:
rv[key] = val
- return rv
+ return rv, dependencies
-def inline(dsk, keys=None, inline_constants=True):
+def inline(dsk, keys=None, inline_constants=True, dependencies=None):
""" Return new dask with the given keys inlined with their values.
Inlines all constants if ``inline_constants`` keyword is True.
@@ -140,13 +175,17 @@ def inline(dsk, keys=None, inline_constants=True):
if inline_constants:
keys.update(k for k, v in dsk.items() if not istask(v))
+ if dependencies is None:
+ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
+
# Keys may depend on other keys, so determine replace order with toposort.
# The values stored in `keysubs` do not include other keys.
- replaceorder = toposort(dict((k, dsk[k]) for k in keys if k in dsk))
+ replaceorder = toposort(dict((k, dsk[k]) for k in keys if k in dsk),
+ dependencies=dependencies)
keysubs = {}
for key in replaceorder:
val = dsk[key]
- for dep in keys & get_dependencies(dsk, key):
+ for dep in keys & dependencies[key]:
if dep in keysubs:
replace = keysubs[dep]
else:
@@ -159,13 +198,14 @@ def inline(dsk, keys=None, inline_constants=True):
for key, val in dsk.items():
if key in keys:
continue
- for item in keys & get_dependencies(dsk, key):
+ for item in keys & dependencies[key]:
val = subs(val, item, keysubs[item])
rv[key] = val
return rv
-def inline_functions(dsk, output, fast_functions=None, inline_constants=False):
+def inline_functions(dsk, output, fast_functions=None, inline_constants=False,
+ dependencies=None):
""" Inline cheap functions into larger operations
Examples
@@ -194,7 +234,8 @@ def inline_functions(dsk, output, fast_functions=None, inline_constants=False):
fast_functions = set(fast_functions)
- dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
+ if dependencies is None:
+ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
dependents = reverse_dict(dependencies)
keys = [k for k, v in dsk.items()
@@ -203,7 +244,8 @@ def inline_functions(dsk, output, fast_functions=None, inline_constants=False):
and dependents[k]
and k not in output]
if keys:
- return inline(dsk, keys, inline_constants=inline_constants)
+ return inline(dsk, keys, inline_constants=inline_constants,
+ dependencies=dependencies)
else:
return dsk
@@ -515,7 +557,7 @@ def fuse_selections(dsk, head1, head2, merge):
... 'y': (getitem, 'x', 'a')}
>>> merge = lambda t1, t2: (load, t2[1], t2[2], t1[2])
>>> dsk2 = fuse_selections(dsk, getitem, load, merge)
- >>> cull(dsk2, 'y')
+ >>> cull(dsk2, 'y')[0]
{'y': (<function load at ...>, 'store', 'part', 'a')}
"""
dsk2 = dict()
@@ -548,7 +590,7 @@ def fuse_getitem(dsk, func, place):
>>> dsk = {'x': (load, 'store', 'part', ['a', 'b']),
... 'y': (getitem, 'x', 'a')}
>>> dsk2 = fuse_getitem(dsk, load, 3) # columns in arg place 3
- >>> cull(dsk2, 'y')
+ >>> cull(dsk2, 'y')[0]
{'y': (<function load at ...>, 'store', 'part', 'a')}
"""
return fuse_selections(dsk, getitem, func,
diff --git a/dask/order.py b/dask/order.py
index 60fd03787..06f602c5d 100644
--- a/dask/order.py
+++ b/dask/order.py
@@ -58,10 +58,10 @@ from __future__ import absolute_import, division, print_function
from operator import add
-from .core import get_deps
+from .core import get_dependencies, reverse_dict, get_deps
-def order(dsk):
+def order(dsk, dependencies=None):
""" Order nodes in dask graph
The ordering will be a toposort but will also have other convenient
@@ -74,7 +74,10 @@ def order(dsk):
>>> order(dsk)
{'a': 2, 'c': 1, 'b': 3, 'd': 0}
"""
- dependencies, dependents = get_deps(dsk)
+ if dependencies is None:
+ dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
+ dependents = reverse_dict(dependencies)
+
ndeps = ndependents(dependencies, dependents)
maxes = child_max(dependencies, dependents, ndeps)
return dfs(dependencies, dependents, key=maxes.get)
| Tune dask.array.optimize
In some pathological (yet also real) cases, graph optimization can cost about as much as computation:
I've been benchmarking against this computation
```python
import dask.array as da
n = 12
x = da.random.random((2**n, 2**n), chunks=(2**n, 1)) # 4096 x 4096, 4096 x 1
for i in range(1, n): # 4096 x 4096, 2048 x 2
x = x.rechunk((2**(n - i), 2**(i + 1))) # 4096 x 4096, 1024 x 4
y = x.sum()
>>> %time y.compute()
CPU times: user 39 s, sys: 2.45 s, total: 41.5 s
Wall time: 39.1 s
>>> %prun y._optimize(y.dask, y._keys())
```
```
29816630 function calls (28569410 primitive calls) in 22.570 seconds
Ordered by: internal time
ncalls tottime percall cumtime percall filename:lineno(function)
604153 3.733 0.000 8.800 0.000 core.py:189(get_dependencies)
6240188 2.504 0.000 2.843 0.000 core.py:27(istask)
1312754 2.414 0.000 3.091 0.000 core.py:154(_deps)
3 2.146 0.715 2.146 0.715 core.py:284(<listcomp>)
299004 1.456 0.000 3.326 0.000 rewrite.py:375(_match)
909302/77823 1.331 0.000 2.017 0.000 optimize.py:211(functions_of)
2009082 0.625 0.000 0.625 0.000 {built-in method isinstance}
3 0.618 0.206 2.821 0.940 core.py:275(reverse_dict)
299004/77823 0.553 0.000 5.650 0.000 rewrite.py:363(_bottom_up)
299004 0.480 0.000 4.206 0.000 rewrite.py:283(iter_matches)
348156 0.426 0.000 0.426 0.000 rewrite.py:50(__init__)
874487 0.406 0.000 1.014 0.000 rewrite.py:8(head)
2303972 0.403 0.000 0.403 0.000 {method 'pop' of 'list' objects}
219136/51200 0.400 0.000 0.990 0.000 core.py:291(subs)
2250724 0.391 0.000 0.391 0.000 {method 'extend' of 'list' objects}
2 0.376 0.188 4.363 2.182 optimize.py:117(inline)
49152 0.341 0.000 0.673 0.000 core.py:307(<listcomp>)
4671438 0.339 0.000 0.339 0.000 {built-in method callable}
299004 0.270 0.000 4.477 0.000 rewrite.py:304(_rewrite)
794616 0.257 0.000 1.211 0.000 rewrite.py:81(current)
1 0.204 0.204 1.261 1.261 optimize.py:43(fuse)
745465 0.186 0.000 0.186 0.000 {method 'get' of 'dict' objects}
1484786 0.168 0.000 0.168 0.000 {method 'append' of 'list' objects}
2 0.133 0.066 0.752 0.376 core.py:321(_toposort)
1 0.127 0.127 1.249 1.249 optimize.py:16(cull)
348156 0.126 0.000 0.126 0.000 {method 'pop' of 'collections.deque' objects}
79871 0.125 0.000 0.185 0.000 optimization.py:32(is_full_slice)
745464 0.122 0.000 0.122 0.000 rewrite.py:119(edges)
1 0.122 0.122 2.202 2.202 optimize.py:200(<listcomp>)
595958 0.113 0.000 0.113 0.000 {method 'add' of 'set' objects}
1 0.106 0.106 5.805 5.805 {method 'update' of 'dict' objects}
225278/221182 0.106 0.000 3.578 0.000 rewrite.py:365(<genexpr>)
1 0.098 0.098 6.363 6.363 optimize.py:237(dealias)
176126 0.093 0.000 0.175 0.000 rewrite.py:19(args)
81919 0.086 0.000 0.086 0.000 {built-in method hasattr}
49152 0.070 0.000 0.135 0.000 rewrite.py:70(next)
79871 0.069 0.000 0.069 0.000 {built-in method hash}
47103/24575 0.067 0.000 1.377 0.000 rewrite.py:367(<listcomp>)
79872 0.066 0.000 1.486 0.000 optimize.py:287(<genexpr>)
79872 0.065 0.000 1.344 0.000 optimize.py:278(<genexpr>)
79871 0.063 0.000 0.132 0.000 core.py:9(ishashable)
77824 0.061 0.000 1.311 0.000 optimize.py:197(<genexpr>)
49152 0.058 0.000 0.084 0.000 rewrite.py:62(copy)
1 0.058 0.058 6.676 6.676 optimization.py:49(remove_full_slices)
1 0.054 0.054 22.570 22.570 <string>:1(<module>)
77823 0.049 0.000 5.699 0.000 rewrite.py:315(rewrite)
79872 0.044 0.000 0.044 0.000 optimize.py:40(<genexpr>)
28675 0.042 0.000 0.042 0.000 {method 'values' of 'dict' objects}
1 0.039 0.039 7.491 7.491 optimize.py:168(inline_functions)
98304 0.038 0.000 0.038 0.000 optimization.py:46(<genexpr>)
1 0.037 0.037 0.169 0.169 optimize.py:281(<genexpr>)
1 0.036 0.036 0.221 0.221 optimization.py:69(<genexpr>)
79872 0.033 0.000 0.033 0.000 optimization.py:71(<genexpr>)
1 0.033 0.033 22.515 22.515 optimization.py:14(optimize)
49152 0.032 0.000 0.780 0.000 core.py:314(<listcomp>)
79871 0.031 0.000 0.117 0.000 optimize.py:231(unwrap_partial)
49154 0.026 0.000 0.026 0.000 optimize.py:145(<genexpr>)
49160 0.018 0.000 0.028 0.000 core.py:246(flatten)
2 0.011 0.006 0.011 0.006 optimize.py:282(<genexpr>)
49152 0.011 0.000 0.011 0.000 {method 'extend' of 'collections.deque' objects}
1 0.009 0.009 0.009 0.009 {method 'copy' of 'dict' objects}
49152 0.009 0.000 0.037 0.000 {built-in method all}
77823 0.008 0.000 0.008 0.000 {method 'issubset' of 'set' objects}
79872 0.008 0.000 0.008 0.000 {built-in method len}
```
I also recommend looking at this with snakeviz
```python
%load_ext snakeviz
%snakeviz y._optimize(y.dask, y._keys())
```
| dask/dask | diff --git a/dask/array/tests/test_optimization.py b/dask/array/tests/test_optimization.py
index 37d02d38d..0ff08fb2f 100644
--- a/dask/array/tests/test_optimization.py
+++ b/dask/array/tests/test_optimization.py
@@ -69,14 +69,14 @@ def test_optimize_slicing():
'e': (getarray, 'd', (slice(None, None, None),))}
expected = {'e': (getarray, (range, 10), (slice(0, 5, None),))}
- result = optimize_slices(fuse(dsk, []))
+ result = optimize_slices(fuse(dsk, [])[0])
assert result == expected
# protect output keys
expected = {'c': (range, 10),
'd': (getarray, 'c', (slice(0, 5, None),)),
'e': 'd'}
- result = optimize_slices(fuse(dsk, ['c', 'd', 'e']))
+ result = optimize_slices(fuse(dsk, ['c', 'd', 'e'])[0])
assert result == expected
diff --git a/dask/tests/test_optimize.py b/dask/tests/test_optimize.py
index fb713f7f3..749a03c83 100644
--- a/dask/tests/test_optimize.py
+++ b/dask/tests/test_optimize.py
@@ -18,27 +18,27 @@ def double(x):
def test_cull():
# 'out' depends on 'x' and 'y', but not 'z'
d = {'x': 1, 'y': (inc, 'x'), 'z': (inc, 'x'), 'out': (add, 'y', 10)}
- culled = cull(d, 'out')
+ culled, dependencies = cull(d, 'out')
assert culled == {'x': 1, 'y': (inc, 'x'), 'out': (add, 'y', 10)}
+ assert dependencies == {'x': [], 'y': ['x'], 'out': ['y']}
+
assert cull(d, 'out') == cull(d, ['out'])
- assert cull(d, ['out', 'z']) == d
+ assert cull(d, ['out', 'z'])[0] == d
assert cull(d, [['out'], ['z']]) == cull(d, ['out', 'z'])
assert raises(KeyError, lambda: cull(d, 'badkey'))
def test_fuse():
- assert fuse({
- 'w': (inc, 'x'),
- 'x': (inc, 'y'),
- 'y': (inc, 'z'),
- 'z': (add, 'a', 'b'),
- 'a': 1,
- 'b': 2,
- }) == {
- 'w': (inc, (inc, (inc, (add, 'a', 'b')))),
- 'a': 1,
- 'b': 2,
- }
+ dsk, dependencies = fuse({'w': (inc, 'x'),
+ 'x': (inc, 'y'),
+ 'y': (inc, 'z'),
+ 'z': (add, 'a', 'b'),
+ 'a': 1,
+ 'b': 2})
+ assert dsk == {'w': (inc, (inc, (inc, (add, 'a', 'b')))),
+ 'a': 1,
+ 'b': 2}
+ assert dependencies == {'a': set(), 'b': set(), 'w': set(['a', 'b'])}
assert fuse({
'NEW': (inc, 'y'),
'w': (inc, 'x'),
@@ -47,13 +47,16 @@ def test_fuse():
'z': (add, 'a', 'b'),
'a': 1,
'b': 2,
- }) == {
+ }) == ({
'NEW': (inc, 'y'),
'w': (inc, (inc, 'y')),
'y': (inc, (add, 'a', 'b')),
'a': 1,
'b': 2,
- }
+ },
+ {'a': set(), 'b': set(), 'y': set(['a', 'b']),
+ 'w': set(['y']), 'NEW': set(['y'])})
+
assert fuse({
'v': (inc, 'y'),
'u': (inc, 'w'),
@@ -65,13 +68,15 @@ def test_fuse():
'b': (inc, 'd'),
'c': 1,
'd': 2,
- }) == {
+ }) == ({
'u': (inc, (inc, (inc, 'y'))),
'v': (inc, 'y'),
'y': (inc, (add, 'a', 'b')),
'a': (inc, 1),
'b': (inc, 2),
- }
+ },
+ {'a': set(), 'b': set(), 'y': set(['a', 'b']),
+ 'v': set(['y']), 'u': set(['y'])})
assert fuse({
'a': (inc, 'x'),
'b': (inc, 'x'),
@@ -79,39 +84,43 @@ def test_fuse():
'd': (inc, 'c'),
'x': (inc, 'y'),
'y': 0,
- }) == {
+ }) == ({
'a': (inc, 'x'),
'b': (inc, 'x'),
'd': (inc, (inc, 'x')),
'x': (inc, 0),
- }
+ },
+ {'x': set(), 'd': set(['x']), 'a': set(['x']), 'b': set(['x'])})
assert fuse({
'a': 1,
'b': (inc, 'a'),
'c': (add, 'b', 'b')
- }) == {
+ }) == ({
'b': (inc, 1),
'c': (add, 'b', 'b')
- }
+ }, {'b': set(), 'c': set(['b'])})
def test_fuse_keys():
assert (fuse({'a': 1, 'b': (inc, 'a'), 'c': (inc, 'b')}, keys=['b'])
- == {'b': (inc, 1), 'c': (inc, 'b')})
- assert fuse({
+ == ({'b': (inc, 1), 'c': (inc, 'b')},
+ {'b': set(), 'c': set(['b'])}))
+ dsk, dependencies = fuse({
'w': (inc, 'x'),
'x': (inc, 'y'),
'y': (inc, 'z'),
'z': (add, 'a', 'b'),
'a': 1,
'b': 2,
- }, keys=['x', 'z']) == {
- 'w': (inc, 'x'),
- 'x': (inc, (inc, 'z')),
- 'z': (add, 'a', 'b'),
- 'a': 1,
- 'b': 2,
- }
+ }, keys=['x', 'z'])
+
+ assert dsk == {'w': (inc, 'x'),
+ 'x': (inc, (inc, 'z')),
+ 'z': (add, 'a', 'b'),
+ 'a': 1,
+ 'b': 2 }
+ assert dependencies == {'a': set(), 'b': set(), 'z': set(['a', 'b']),
+ 'x': set(['z']), 'w': set(['x'])}
def test_inline():
@@ -341,7 +350,7 @@ def test_fuse_getitem():
dsk = {'x': (load, 'store', 'part', ['a', 'b']),
'y': (getitem, 'x', 'a')}
dsk2 = fuse_getitem(dsk, load, 3)
- dsk2 = cull(dsk2, 'y')
+ dsk2, dependencies = cull(dsk2, 'y')
assert dsk2 == {'y': (load, 'store', 'part', 'a')}
@@ -352,5 +361,5 @@ def test_fuse_selections():
'y': (getitem, 'x', 'a')}
merge = lambda t1, t2: (load, t2[1], t2[2], t1[2])
dsk2 = fuse_selections(dsk, getitem, load, merge)
- dsk2 = cull(dsk2, 'y')
+ dsk2, dependencies = cull(dsk2, 'y')
assert dsk2 == {'y': (load, 'store', 'part', 'a')}
diff --git a/dask/tests/test_order.py b/dask/tests/test_order.py
index d64dee715..3b5f8c3ce 100644
--- a/dask/tests/test_order.py
+++ b/dask/tests/test_order.py
@@ -1,5 +1,6 @@
from itertools import chain
-from dask.order import dfs, child_max, ndependents, order, inc, get_deps
+from dask.order import dfs, child_max, ndependents, order, inc
+from dask.core import get_deps
def issorted(L, reverse=False):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 9
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.23.10
botocore==1.26.10
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@b066b73cfa68e31848a0ff0d6348f6a0df73d4a7#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.9.5
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs==0.4.2
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.23.10
- botocore==1.26.10
- distributed==1.9.5
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3fs==0.4.2
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_optimization.py::test_optimize_slicing",
"dask/tests/test_optimize.py::test_cull",
"dask/tests/test_optimize.py::test_fuse",
"dask/tests/test_optimize.py::test_fuse_keys",
"dask/tests/test_optimize.py::test_fuse_getitem",
"dask/tests/test_optimize.py::test_fuse_selections"
]
| []
| [
"dask/array/tests/test_optimization.py::test_fuse_getitem",
"dask/array/tests/test_optimization.py::test_optimize_with_getitem_fusion",
"dask/array/tests/test_optimization.py::test_fuse_slice",
"dask/array/tests/test_optimization.py::test_fuse_slice_with_lists",
"dask/array/tests/test_optimization.py::test_hard_fuse_slice_cases",
"dask/array/tests/test_optimization.py::test_dont_fuse_different_slices",
"dask/tests/test_optimize.py::test_inline",
"dask/tests/test_optimize.py::test_inline_functions",
"dask/tests/test_optimize.py::test_inline_ignores_curries_and_partials",
"dask/tests/test_optimize.py::test_inline_doesnt_shrink_fast_functions_at_top",
"dask/tests/test_optimize.py::test_inline_traverses_lists",
"dask/tests/test_optimize.py::test_inline_protects_output_keys",
"dask/tests/test_optimize.py::test_functions_of",
"dask/tests/test_optimize.py::test_dealias",
"dask/tests/test_optimize.py::test_dealias_keys",
"dask/tests/test_optimize.py::test_equivalent",
"dask/tests/test_optimize.py::test_equivalence_uncomparable",
"dask/tests/test_optimize.py::test_sync_keys",
"dask/tests/test_optimize.py::test_sync_uncomparable",
"dask/tests/test_optimize.py::test_merge_sync",
"dask/tests/test_order.py::test_ordering_keeps_groups_together",
"dask/tests/test_order.py::test_prefer_broker_nodes",
"dask/tests/test_order.py::test_base_of_reduce_preferred",
"dask/tests/test_order.py::test_deep_bases_win_over_dependents",
"dask/tests/test_order.py::test_prefer_deep",
"dask/tests/test_order.py::test_stacklimit",
"dask/tests/test_order.py::test_ndependents"
]
| []
| BSD 3-Clause "New" or "Revised" License | 520 | [
"dask/array/optimization.py",
"dask/bag/core.py",
"dask/dataframe/optimize.py",
"dask/multiprocessing.py",
"dask/order.py",
"dask/dataframe/shuffle.py",
"dask/optimize.py",
"dask/core.py",
"dask/async.py"
]
| [
"dask/array/optimization.py",
"dask/bag/core.py",
"dask/dataframe/optimize.py",
"dask/multiprocessing.py",
"dask/order.py",
"dask/dataframe/shuffle.py",
"dask/optimize.py",
"dask/core.py",
"dask/async.py"
]
|
chimpler__pyhocon-81 | 24117399090b3ca1ea5d041f9de46101569ace13 | 2016-05-04 14:36:50 | 4683937b1d195ce2f53ca78987571e41bfe273e7 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 373df98..5f75182 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,9 @@
# Changelog
+# Version 0.3.xx (TBA)
+
+* Implemented self-referential subsitutions
+
# Version 0.3.25
* ConfigValue.transform: do not wrap lists. PR [#76]
diff --git a/README.md b/README.md
index a811af0..6153f4f 100644
--- a/README.md
+++ b/README.md
@@ -300,8 +300,8 @@ Arrays without commas | :white_check_mark:
Path expressions | :white_check_mark:
Paths as keys | :white_check_mark:
Substitutions | :white_check_mark:
-Self-referential substitutions | :x:
-The `+=` separator | :x:
+Self-referential substitutions | :white_check_mark:
+The `+=` separator | :white_check_mark:
Includes | :white_check_mark:
Include semantics: merging | :white_check_mark:
Include semantics: substitution | :white_check_mark:
@@ -335,6 +335,7 @@ Java properties mapping | :x:
- Virgil Palanciuc ([@virgil-palanciuc](https://github.com/virgil-palanciuc))
- Douglas Simon ([@dougxc](https://github.com/dougxc))
- Gilles Duboscq ([@gilles-duboscq](https://github.com/gilles-duboscq))
+ - Stefan Anzinger ([@sanzinger](https://github.com/sanzinger))
### Thanks
diff --git a/pyhocon/config_parser.py b/pyhocon/config_parser.py
index 5f3d4bd..a029912 100644
--- a/pyhocon/config_parser.py
+++ b/pyhocon/config_parser.py
@@ -3,7 +3,7 @@ import os
import socket
import contextlib
from pyparsing import Forward, Keyword, QuotedString, Word, Literal, Suppress, Regex, Optional, SkipTo, ZeroOrMore, \
- Group, lineno, col, TokenConverter, replaceWith, alphanums
+ Group, lineno, col, TokenConverter, replaceWith, alphanums, ParseSyntaxException
from pyparsing import ParserElement
from pyhocon.config_tree import ConfigTree, ConfigSubstitution, ConfigList, ConfigValues, ConfigUnquotedString, \
ConfigInclude, NoneValue
@@ -230,6 +230,7 @@ class ConfigParser(object):
(Keyword('url') | Keyword('file')) - Literal('(').suppress() - quoted_string - Literal(')').suppress()))) \
.setParseAction(include_config)
+ root_dict_expr = Forward()
dict_expr = Forward()
list_expr = Forward()
multi_value_expr = ZeroOrMore((Literal(
@@ -237,7 +238,9 @@ class ConfigParser(object):
# for a dictionary : or = is optional
# last zeroOrMore is because we can have t = {a:4} {b: 6} {c: 7} which is dictionary concatenation
inside_dict_expr = ConfigTreeParser(ZeroOrMore(comment_eol | include_expr | assign_expr | eol_comma))
+ inside_root_dict_expr = ConfigTreeParser(ZeroOrMore(comment_eol | include_expr | assign_expr | eol_comma), root=True)
dict_expr << Suppress('{') - inside_dict_expr - Suppress('}')
+ root_dict_expr << Suppress('{') - inside_root_dict_expr - Suppress('}')
list_entry = ConcatenatedValueParser(multi_value_expr)
list_expr << Suppress('[') - ListParser(list_entry - ZeroOrMore(eol_comma - list_entry)) - Suppress(']')
@@ -245,12 +248,12 @@ class ConfigParser(object):
assign_expr << Group(
key -
ZeroOrMore(comment_no_comma_eol) -
- (dict_expr | Suppress(Literal('=') | Literal(':')) - ZeroOrMore(
+ (dict_expr | (Literal('=') | Literal(':') | Literal('+=')) - ZeroOrMore(
comment_no_comma_eol) - ConcatenatedValueParser(multi_value_expr))
)
# the file can be { ... } where {} can be omitted or []
- config_expr = ZeroOrMore(comment_eol | eol) + (list_expr | dict_expr | inside_dict_expr) + ZeroOrMore(
+ config_expr = ZeroOrMore(comment_eol | eol) + (list_expr | root_dict_expr | inside_root_dict_expr) + ZeroOrMore(
comment_eol | eol_comma)
config = config_expr.parseString(content, parseAll=True)[0]
if resolve:
@@ -290,41 +293,106 @@ class ConfigParser(object):
col=col(substitution.loc, substitution.instring)))
return True, value
+ @staticmethod
+ def _fixup_self_references(config):
+ if isinstance(config, ConfigTree) and config.root:
+ for key in config: # Traverse history of element
+ history = config.history[key]
+ previous_item = history[0]
+ for current_item in history[1:]:
+ for substitution in ConfigParser._find_substitutions(current_item):
+ prop_path = ConfigTree.parse_key(substitution.variable)
+ if len(prop_path) > 1 and config.get(substitution.variable, None) is not None:
+ continue # If value is present in latest version, don't do anything
+ if prop_path[0] == key:
+ if isinstance(previous_item, ConfigValues): # We hit a dead end, we cannot evaluate
+ raise ConfigSubstitutionException("Property {variable} cannot be substituted. Check for cycles.".format(
+ variable=substitution.variable))
+ value = previous_item if len(prop_path) == 1 else previous_item.get(".".join(prop_path[1:]))
+ (_, _, current_item) = ConfigParser._do_substitute(substitution, value)
+ previous_item = current_item
+
+ if len(history) == 1: # special case, when self optional referencing without existing
+ for substitution in ConfigParser._find_substitutions(previous_item):
+ prop_path = ConfigTree.parse_key(substitution.variable)
+ if len(prop_path) > 1 and config.get(substitution.variable, None) is not None:
+ continue # If value is present in latest version, don't do anything
+ if prop_path[0] == key and substitution.optional:
+ ConfigParser._do_substitute(substitution, None)
+
+ # traverse config to find all the substitutions
+ @staticmethod
+ def _find_substitutions(item):
+ """Convert HOCON input into a JSON output
+
+ :return: JSON string representation
+ :type return: basestring
+ """
+ if isinstance(item, ConfigValues):
+ return item.get_substitutions()
+
+ substitutions = []
+ if isinstance(item, ConfigTree):
+ for key, child in item.items():
+ substitutions += ConfigParser._find_substitutions(child)
+ elif isinstance(item, list):
+ for child in item:
+ substitutions += ConfigParser._find_substitutions(child)
+ return substitutions
+
+ @staticmethod
+ def _do_substitute(substitution, resolved_value, is_optional_resolved=True):
+ unresolved = False
+ new_substitutions = []
+ if isinstance(resolved_value, ConfigValues):
+ resolved_value = resolved_value.transform()
+ if isinstance(resolved_value, ConfigValues):
+ unresolved = True
+ result = None
+ else:
+ # replace token by substitution
+ config_values = substitution.parent
+ # if it is a string, then add the extra ws that was present in the original string after the substitution
+ formatted_resolved_value = resolved_value \
+ if resolved_value is None \
+ or isinstance(resolved_value, (dict, list)) \
+ or substitution.index == len(config_values.tokens) - 1 \
+ else (str(resolved_value) + substitution.ws)
+ config_values.put(substitution.index, formatted_resolved_value)
+ transformation = config_values.transform()
+ result = None
+ if transformation is None and not is_optional_resolved:
+ result = config_values.overriden_value
+ else:
+ result = transformation
+
+ if result is None:
+ del config_values.parent[config_values.key]
+ else:
+ config_values.parent[config_values.key] = result
+ s = ConfigParser._find_substitutions(result)
+ if s:
+ new_substitutions = s
+ unresolved = True
+
+ return (unresolved, new_substitutions, result)
+
+ @staticmethod
+ def _final_fixup(item):
+ if isinstance(item, ConfigValues):
+ return item.transform()
+ elif isinstance(item, list):
+ return list([ConfigParser._final_fixup(child) for child in item])
+ elif isinstance(item, ConfigTree):
+ items = list(item.items())
+ for key, child in items:
+ item[key] = ConfigParser._final_fixup(child)
+ return item
+
@staticmethod
def resolve_substitutions(config):
- # traverse config to find all the substitutions
- def find_substitutions(item):
- """Convert HOCON input into a JSON output
-
- :return: JSON string representation
- :type return: basestring
- """
- if isinstance(item, ConfigValues):
- return item.get_substitutions()
-
- substitutions = []
- if isinstance(item, ConfigTree):
- for key, child in item.items():
- substitutions += find_substitutions(child)
- elif isinstance(item, list):
- for child in item:
- substitutions += find_substitutions(child)
-
- return substitutions
-
- def final_fixup(item):
- if isinstance(item, ConfigValues):
- return item.transform()
- elif isinstance(item, list):
- return list([final_fixup(child) for child in item])
- elif isinstance(item, ConfigTree):
- items = list(item.items())
- for key, child in items:
- item[key] = final_fixup(child)
-
- return item
-
- substitutions = find_substitutions(config)
+ ConfigParser._fixup_self_references(config)
+ substitutions = ConfigParser._find_substitutions(config)
if len(substitutions) > 0:
unresolved = True
@@ -340,42 +408,11 @@ class ConfigParser(object):
if not is_optional_resolved and substitution.optional:
resolved_value = None
- if isinstance(resolved_value, ConfigValues):
- resolved_value = resolved_value.transform()
- if isinstance(resolved_value, ConfigValues):
- unresolved = True
- else:
- # replace token by substitution
- config_values = substitution.parent
- # if it is a string, then add the extra ws that was present in the original string after the substitution
- formatted_resolved_value = resolved_value \
- if resolved_value is None \
- or isinstance(resolved_value, (dict, list)) \
- or substitution.index == len(config_values.tokens) - 1 \
- else (str(resolved_value) + substitution.ws)
- config_values.put(substitution.index, formatted_resolved_value)
- transformation = config_values.transform()
- if transformation is None and not is_optional_resolved:
- # if it does not override anything remove the key
- # otherwise put back old value that it was overriding
- if config_values.overriden_value is None:
- if config_values.key in config_values.parent:
- del config_values.parent[config_values.key]
- else:
- config_values.parent[config_values.key] = config_values.overriden_value
- s = find_substitutions(config_values.overriden_value)
- if s:
- substitutions.extend(s)
- unresolved = True
- else:
- config_values.parent[config_values.key] = transformation
- s = find_substitutions(transformation)
- if s:
- substitutions.extend(s)
- unresolved = True
- substitutions.remove(substitution)
-
- final_fixup(config)
+ unresolved, new_subsitutions, _ = ConfigParser._do_substitute(substitution, resolved_value, is_optional_resolved)
+ substitutions.extend(new_subsitutions)
+ substitutions.remove(substitution)
+
+ ConfigParser._final_fixup(config)
if unresolved:
raise ConfigSubstitutionException("Cannot resolve {variables}. Check for cycles.".format(
variables=', '.join('${{{variable}}}: (line: {line}, col: {col})'.format(
@@ -425,8 +462,9 @@ class ConfigTreeParser(TokenConverter):
Parse a config tree from tokens
"""
- def __init__(self, expr=None):
+ def __init__(self, expr=None, root=False):
super(ConfigTreeParser, self).__init__(expr)
+ self.root = root
self.saveAsList = True
def postParse(self, instring, loc, token_list):
@@ -437,35 +475,47 @@ class ConfigTreeParser(TokenConverter):
:param token_list:
:return:
"""
- config_tree = ConfigTree()
+ config_tree = ConfigTree(root=self.root)
for element in token_list:
expanded_tokens = element.tokens if isinstance(element, ConfigInclude) else [element]
for tokens in expanded_tokens:
# key, value1 (optional), ...
key = tokens[0].strip()
- values = tokens[1:]
-
+ operator = '='
+ if len(tokens) == 3 and tokens[1].strip() in [':', '=', '+=']:
+ operator = tokens[1].strip()
+ values = tokens[2:]
+ elif len(tokens) == 2:
+ values = tokens[1:]
+ else:
+ raise ParseSyntaxException("Unknown tokens {} received".format(tokens))
# empty string
if len(values) == 0:
config_tree.put(key, '')
else:
value = values[0]
- if isinstance(value, list):
+ if isinstance(value, list) and operator == "+=":
+ value = ConfigValues([ConfigSubstitution(key, True, '', False, loc), value], False, loc)
+ config_tree.put(key, value, False)
+ elif isinstance(value, str) and operator == "+=":
+ value = ConfigValues([ConfigSubstitution(key, True, '', True, loc), ' ' + value], True, loc)
+ config_tree.put(key, value, False)
+ elif isinstance(value, list):
config_tree.put(key, value, False)
else:
- if isinstance(value, ConfigTree):
+ existing_value = config_tree.get(key, None)
+ if isinstance(value, ConfigTree) and not isinstance(existing_value, list):
+ # Only Tree has to be merged with tree
config_tree.put(key, value, True)
elif isinstance(value, ConfigValues):
conf_value = value
value.parent = config_tree
value.key = key
- existing_value = config_tree.get(key, None)
if isinstance(existing_value, list) or isinstance(existing_value, ConfigTree):
config_tree.put(key, conf_value, True)
else:
config_tree.put(key, conf_value, False)
else:
- conf_value = value
- config_tree.put(key, conf_value, False)
+ config_tree.put(key, value, False)
return config_tree
diff --git a/pyhocon/config_tree.py b/pyhocon/config_tree.py
index 92a7afc..225e15f 100644
--- a/pyhocon/config_tree.py
+++ b/pyhocon/config_tree.py
@@ -26,8 +26,10 @@ class ConfigTree(OrderedDict):
KEY_SEP = '.'
def __init__(self, *args, **kwds):
+ self.root = kwds.pop('root') if 'root' in kwds else False
+ if self.root:
+ self.history = {}
super(ConfigTree, self).__init__(*args, **kwds)
-
for key, value in self.items():
if isinstance(value, ConfigValues):
value.parent = self
@@ -55,6 +57,8 @@ class ConfigTree(OrderedDict):
value.key = key
value.overriden_value = a.get(key, None)
a[key] = value
+ if a.root:
+ a.history[key] = (a.history.get(key) or []) + b.history.get(key)
return a
@@ -65,7 +69,13 @@ class ConfigTree(OrderedDict):
# if they are both configs then merge
# if not then override
if key_elt in self and isinstance(self[key_elt], ConfigTree) and isinstance(value, ConfigTree):
- ConfigTree.merge_configs(self[key_elt], value)
+ if self.root:
+ new_value = ConfigTree.merge_configs(ConfigTree(), self[key_elt], copy_trees=True)
+ new_value = ConfigTree.merge_configs(new_value, value, copy_trees=True)
+ self._push_history(key_elt, new_value)
+ self[key_elt] = new_value
+ else:
+ ConfigTree.merge_configs(self[key_elt], value)
elif append:
# If we have t=1
# and we try to put t.a=5 then t is replaced by {a: 5}
@@ -76,10 +86,16 @@ class ConfigTree(OrderedDict):
elif isinstance(l, ConfigTree) and isinstance(value, ConfigValues):
value.tokens.append(l)
value.recompute()
+ self._push_history(key_elt, value)
+ self[key_elt] = value
+ elif isinstance(l, list) and isinstance(value, ConfigValues):
+ self._push_history(key_elt, value)
self[key_elt] = value
elif isinstance(l, list):
l += value
+ self._push_history(key_elt, l)
elif l is None:
+ self._push_history(key_elt, value)
self[key_elt] = value
else:
@@ -96,15 +112,24 @@ class ConfigTree(OrderedDict):
value.parent = self
value.key = key_elt
value.overriden_value = self.get(key_elt, None)
- super(ConfigTree, self).__setitem__(key_elt, value)
+ self._push_history(key_elt, value)
+ self[key_elt] = value
else:
next_config_tree = super(ConfigTree, self).get(key_elt)
if not isinstance(next_config_tree, ConfigTree):
# create a new dictionary or overwrite a previous value
next_config_tree = ConfigTree()
+ self._push_history(key_elt, value)
self[key_elt] = next_config_tree
next_config_tree._put(key_path[1:], value, append)
+ def _push_history(self, key, value):
+ if self.root:
+ hist = self.history.get(key)
+ if hist is None:
+ hist = self.history[key] = []
+ hist.append(value)
+
def _get(self, key_path, key_index=0, default=UndefinedKey):
key_elt = key_path[key_index]
elt = super(ConfigTree, self).get(key_elt, UndefinedKey)
@@ -130,7 +155,8 @@ class ConfigTree(OrderedDict):
else:
return default
- def _parse_key(self, str):
+ @staticmethod
+ def parse_key(str):
"""
Split a key into path elements:
- a.b.c => a, b, c
@@ -150,7 +176,7 @@ class ConfigTree(OrderedDict):
:type key: basestring
:param value: value to put
"""
- self._put(self._parse_key(key), value, append)
+ self._put(ConfigTree.parse_key(key), value, append)
def get(self, key, default=UndefinedKey):
"""Get a value from the tree
@@ -161,7 +187,7 @@ class ConfigTree(OrderedDict):
:type default: object
:return: value in the tree located at key
"""
- return self._get(self._parse_key(key), 0, default)
+ return self._get(ConfigTree.parse_key(key), 0, default)
def get_string(self, key, default=UndefinedKey):
"""Return string representation of value found at key
| support for "+=" field separator?
I get an error when parsing a hocon file:
```
[ec2-user@ip-172-16-2-117 ~]$ pyhocon -i /opt/mapr/drill/drill-1.1.0/conf/drill-override-example.conf
Traceback (most recent call last):
File "/usr/local/bin/pyhocon", line 9, in <module>
load_entry_point('pyhocon==0.3.4', 'console_scripts', 'pyhocon')()
File "/usr/local/lib/python2.7/site-packages/pyhocon/tool.py", line 188, in main
HOCONConverter.convert(args.input, args.output, args.format)
File "/usr/local/lib/python2.7/site-packages/pyhocon/tool.py", line 145, in convert
config = ConfigFactory.parse_file(input_file)
File "/usr/local/lib/python2.7/site-packages/pyhocon/__init__.py", line 40, in parse_file
return ConfigFactory.parse_string(content, os.path.dirname(filename))
File "/usr/local/lib/python2.7/site-packages/pyhocon/__init__.py", line 75, in parse_string
return ConfigParser().parse(content, basedir)
File "/usr/local/lib/python2.7/site-packages/pyhocon/__init__.py", line 216, in parse
config = config_expr.parseString(content, parseAll=True)[0]
File "/usr/local/lib/python2.7/site-packages/pyparsing.py", line 1125, in parseString
raise exc
pyparsing.ParseSyntaxException: Expected "{" (at char 1087), (line:20, col:33)
```
Line 20 in the above looks like this:
```
drill.logical.function.packages += "org.apache.drill.exec.expr.fn.impl"
```
And column 33 implicates the "+=" field separator.
Any plans to add support for this?
https://github.com/typesafehub/config/blob/master/HOCON.md#the--field-separator
| chimpler/pyhocon | diff --git a/tests/test_config_parser.py b/tests/test_config_parser.py
index a639f89..2c93f13 100644
--- a/tests/test_config_parser.py
+++ b/tests/test_config_parser.py
@@ -706,6 +706,234 @@ class TestConfigParser(object):
"""
)
+ def test_self_ref_substitution_array(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = [1,2]
+ x = ${x} [3,4]
+ x = [-1, 0] ${x} [5, 6]
+ x = [-3, -2] ${x}
+ """
+ )
+ assert config.get("x") == [-3, -2, -1, 0, 1, 2, 3, 4, 5, 6]
+
+ def test_self_append_array(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = [1,2]
+ x += [3,4]
+ """
+ )
+ assert config.get("x") == [1, 2, 3, 4]
+
+ def test_self_append_string(self):
+ '''
+ Should be equivalent to
+ x = abc
+ x = ${?x} def
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ x = abc
+ x += def
+ """
+ )
+ assert config.get("x") == "abc def"
+
+ def test_self_append_non_existent_string(self):
+ '''
+ Should be equivalent to x = ${?x} def
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ x += def
+ """
+ )
+ assert config.get("x") == " def"
+
+ def test_self_append_nonexistent_array(self):
+ config = ConfigFactory.parse_string(
+ """
+ x += [1,2]
+ """
+ )
+ assert config.get("x") == [1, 2]
+
+ def test_self_append_object(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = {a: 1}
+ x += {b: 2}
+ """
+ )
+ assert config.get("x") == {'a': 1, 'b': 2}
+
+ def test_self_append_nonexistent_object(self):
+ config = ConfigFactory.parse_string(
+ """
+ x += {a: 1}
+ """
+ )
+ assert config.get("x") == {'a': 1}
+
+ def test_self_ref_substitution_array_to_dict(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = [1,2]
+ x = {x: [3,4]}
+ x = {y: [5,6]}
+ x = {z: ${x}}
+ """
+ )
+ assert config.get("x.x") == [3, 4]
+ assert config.get("x.y") == [5, 6]
+ assert config.get("x.z") == {'x': [3, 4], 'y': [5, 6]}
+
+ def test_self_ref_substitiotion_dict_in_array(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = {x: [3,4]}
+ x = [${x}, 2, 3]
+ """
+ )
+ (one, two, three) = config.get("x")
+ assert one == {'x': [3, 4]}
+ assert two == 2
+ assert three == 3
+
+ def test_self_ref_substitution_dict_path(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = {y: {z: 1}}
+ x = ${x.y}
+ """
+ )
+ assert config.get("x.y") == {'z': 1}
+ assert config.get("x.z") == 1
+ assert set(config.get("x").keys()) == set(['y', 'z'])
+
+ def test_self_ref_substitution_dict_path_hide(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = {y: {y: 1}}
+ x = ${x.y}
+ """
+ )
+ assert config.get("x.y") == {'y': 1}
+ assert set(config.get("x").keys()) == set(['y'])
+
+ def test_self_ref_substitution_dict_recurse(self):
+ with pytest.raises(ConfigSubstitutionException):
+ ConfigFactory.parse_string(
+ """
+ x = ${x}
+ """
+ )
+
+ def test_self_ref_substitution_dict_recurse2(self):
+ with pytest.raises(ConfigSubstitutionException):
+ ConfigFactory.parse_string(
+ """
+ x = ${x}
+ x = ${x}
+ """
+ )
+
+ def test_self_ref_substitution_dict_merge(self):
+ '''
+ Example from HOCON spec
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ foo : { a : { c : 1 } }
+ foo : ${foo.a}
+ foo : { a : 2 }
+ """
+ )
+ assert config.get('foo') == {'a': 2, 'c': 1}
+ assert set(config.keys()) == set(['foo'])
+
+ def test_self_ref_substitution_dict_otherfield(self):
+ '''
+ Example from HOCON spec
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ bar : {
+ foo : 42,
+ baz : ${bar.foo}
+ }
+ """
+ )
+ assert config.get("bar") == {'foo': 42, 'baz': 42}
+ assert set(config.keys()) == set(['bar'])
+
+ def test_self_ref_substitution_dict_otherfield_merged_in(self):
+ '''
+ Example from HOCON spec
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ bar : {
+ foo : 42,
+ baz : ${bar.foo}
+ }
+ bar : { foo : 43 }
+ """
+ )
+ assert config.get("bar") == {'foo': 43, 'baz': 43}
+ assert set(config.keys()) == set(['bar'])
+
+ def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self):
+ '''
+ Example from HOCON spec
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ // bar.a should end up as 4
+ bar : { a : ${foo.d}, b : 1 }
+ bar.b = 3
+ // foo.c should end up as 3
+ foo : { c : ${bar.b}, d : 2 }
+ foo.d = 4
+ """
+ )
+ assert config.get("bar") == {'a': 4, 'b': 3}
+ assert config.get("foo") == {'c': 3, 'd': 4}
+ assert set(config.keys()) == set(['bar', 'foo'])
+
+ def test_self_ref_substitution_string_opt_concat(self):
+ '''
+ Example from HOCON spec
+ '''
+ config = ConfigFactory.parse_string(
+ """
+ a = ${?a}foo
+ """
+ )
+ assert config.get("a") == 'foo'
+ assert set(config.keys()) == set(['a'])
+
+ def test_self_ref_substitution_dict_recurse_part(self):
+ with pytest.raises(ConfigSubstitutionException):
+ ConfigFactory.parse_string(
+ """
+ x = ${x} {y: 1}
+ x = ${x.y}
+ """
+ )
+
+ def test_self_ref_substitution_object(self):
+ config = ConfigFactory.parse_string(
+ """
+ x = {a: 1, b: 2}
+ x = ${x} {c: 3}
+ x = {z: 0} ${x}
+ x = {y: -1} ${x} {d: 4}
+ """
+ )
+ assert config.get("x") == {'a': 1, 'b': 2, 'c': 3, 'z': 0, 'y': -1, 'd': 4}
+
def test_concat_multi_line_string(self):
config = ConfigFactory.parse_string(
"""
@@ -1278,6 +1506,66 @@ class TestConfigParser(object):
'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g']
}
+ def test_fallback_self_ref_substitutions_append(self):
+ config1 = ConfigFactory.parse_string(
+ """
+ list = [ 1, 2, 3 ]
+ """
+ )
+ config2 = ConfigFactory.parse_string(
+ """
+ list = ${list} [ 4, 5, 6 ]
+ """,
+ resolve=False
+ )
+ config2 = config2.with_fallback(config1)
+ assert config2.get("list") == [1, 2, 3, 4, 5, 6]
+
+ def test_fallback_self_ref_substitutions_append_plus_equals(self):
+ config1 = ConfigFactory.parse_string(
+ """
+ list = [ 1, 2, 3 ]
+ """
+ )
+ config2 = ConfigFactory.parse_string(
+ """
+ list += [ 4, 5, 6 ]
+ """,
+ resolve=False
+ )
+ config2 = config2.with_fallback(config1)
+ assert config2.get("list") == [1, 2, 3, 4, 5, 6]
+
+ def test_fallback_self_ref_substitutions_merge(self):
+ config1 = ConfigFactory.parse_string(
+ """
+ dict = { x: 1 }
+ """
+ )
+ config2 = ConfigFactory.parse_string(
+ """
+ dict = ${dict} { y: 2 }
+ """,
+ resolve=False
+ )
+ config2 = config2.with_fallback(config1)
+ assert config2.get("dict") == {'x': 1, 'y': 2}
+
+ def test_fallback_self_ref_substitutions_concat_string(self):
+ config1 = ConfigFactory.parse_string(
+ """
+ string = abc
+ """
+ )
+ config2 = ConfigFactory.parse_string(
+ """
+ string = ${string}def
+ """,
+ resolve=False
+ )
+ config2 = config2.with_fallback(config1)
+ assert config2.get("string") == 'abcdef'
+
def test_object_field_substitution(self):
config = ConfigFactory.parse_string(
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 4
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.4",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
-e git+https://github.com/chimpler/pyhocon.git@24117399090b3ca1ea5d041f9de46101569ace13#egg=pyhocon
pyparsing==2.1.1
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: pyhocon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==2.1.1
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/pyhocon
| [
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_string",
"tests/test_config_parser.py::TestConfigParser::test_self_append_non_existent_string",
"tests/test_config_parser.py::TestConfigParser::test_self_append_nonexistent_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_object",
"tests/test_config_parser.py::TestConfigParser::test_self_append_nonexistent_object",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_array_to_dict",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitiotion_dict_in_array",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_path",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_path_hide",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_merge",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_string_opt_concat",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse_part",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_object",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_append",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_append_plus_equals",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_merge",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_concat_string"
]
| []
| [
"tests/test_config_parser.py::TestConfigParser::test_parse_simple_value",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_enclosing_brace",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_enclosing_square_bracket",
"tests/test_config_parser.py::TestConfigParser::test_quoted_key_with_dots",
"tests/test_config_parser.py::TestConfigParser::test_dotted_notation_merge",
"tests/test_config_parser.py::TestConfigParser::test_comma_to_separate_expr",
"tests/test_config_parser.py::TestConfigParser::test_dict_merge",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_comments",
"tests/test_config_parser.py::TestConfigParser::test_missing_config",
"tests/test_config_parser.py::TestConfigParser::test_parse_null",
"tests/test_config_parser.py::TestConfigParser::test_parse_empty",
"tests/test_config_parser.py::TestConfigParser::test_parse_override",
"tests/test_config_parser.py::TestConfigParser::test_concat_dict",
"tests/test_config_parser.py::TestConfigParser::test_concat_string",
"tests/test_config_parser.py::TestConfigParser::test_concat_list",
"tests/test_config_parser.py::TestConfigParser::test_bad_concat",
"tests/test_config_parser.py::TestConfigParser::test_string_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_string_substitutions_with_no_space",
"tests/test_config_parser.py::TestConfigParser::test_int_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_cascade_string_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_multiple_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_dict_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_unquoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_quoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_triple_quoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_int_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_float_noeol",
"tests/test_config_parser.py::TestConfigParser::test_list_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_list_element_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_list_with_append",
"tests/test_config_parser.py::TestConfigParser::test_substitution_list_with_append_substitution",
"tests/test_config_parser.py::TestConfigParser::test_non_existent_substitution",
"tests/test_config_parser.py::TestConfigParser::test_non_compatible_substitution",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse2",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield_merged_in",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield_merged_in_mutual",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_string",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_list",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_dict",
"tests/test_config_parser.py::TestConfigParser::test_parse_URL_from_samples",
"tests/test_config_parser.py::TestConfigParser::test_parse_URL_from_invalid",
"tests/test_config_parser.py::TestConfigParser::test_include_dict_from_samples",
"tests/test_config_parser.py::TestConfigParser::test_list_of_dicts",
"tests/test_config_parser.py::TestConfigParser::test_list_of_lists",
"tests/test_config_parser.py::TestConfigParser::test_list_of_dicts_with_merge",
"tests/test_config_parser.py::TestConfigParser::test_list_of_lists_with_merge",
"tests/test_config_parser.py::TestConfigParser::test_invalid_assignment",
"tests/test_config_parser.py::TestConfigParser::test_invalid_dict",
"tests/test_config_parser.py::TestConfigParser::test_include_list",
"tests/test_config_parser.py::TestConfigParser::test_include_dict",
"tests/test_config_parser.py::TestConfigParser::test_include_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_override",
"tests/test_config_parser.py::TestConfigParser::test_substitution_flat_override",
"tests/test_config_parser.py::TestConfigParser::test_substitution_nested_override",
"tests/test_config_parser.py::TestConfigParser::test_optional_substitution",
"tests/test_config_parser.py::TestConfigParser::test_cascade_optional_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_cycle",
"tests/test_config_parser.py::TestConfigParser::test_assign_number_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_strings_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_list_numbers_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_list_strings_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_dict_strings_with_equal_sign_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_dict_strings_no_equal_sign_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_substitutions_overwrite",
"tests/test_config_parser.py::TestConfigParser::test_fallback_substitutions_overwrite",
"tests/test_config_parser.py::TestConfigParser::test_fallback_substitutions_overwrite_file",
"tests/test_config_parser.py::TestConfigParser::test_object_field_substitution",
"tests/test_config_parser.py::TestConfigParser::test_one_line_quote_escape",
"tests/test_config_parser.py::TestConfigParser::test_multi_line_escape",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_dict",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_ordered_dict",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_nested_dict",
"tests/test_config_parser.py::TestConfigParser::test_object_concat",
"tests/test_config_parser.py::TestConfigParser::test_issue_75"
]
| []
| Apache License 2.0 | 521 | [
"pyhocon/config_tree.py",
"README.md",
"pyhocon/config_parser.py",
"CHANGELOG.md"
]
| [
"pyhocon/config_tree.py",
"README.md",
"pyhocon/config_parser.py",
"CHANGELOG.md"
]
|
|
imageio__imageio-150 | 8f0efc08ae98db69c02dc2820e4e76985cf0e60d | 2016-05-06 23:57:09 | 0a390e31561cf06c495c622626319e9ffdacc007 | diff --git a/imageio/core/__init__.py b/imageio/core/__init__.py
index 0347ec7..b5cedf0 100644
--- a/imageio/core/__init__.py
+++ b/imageio/core/__init__.py
@@ -6,7 +6,7 @@
(everything but the plugins).
"""
-from .util import Image, Dict, asarray, image_as_uint8, urlopen # noqa
+from .util import Image, Dict, asarray, image_as_uint, urlopen # noqa
from .util import BaseProgressIndicator, StdoutProgressIndicator # noqa
from .util import string_types, text_type, binary_type, IS_PYPY # noqa
from .util import get_platform, appdata_dir, resource_dirs, has_module # noqa
diff --git a/imageio/core/util.py b/imageio/core/util.py
index 12463c2..70d0b99 100644
--- a/imageio/core/util.py
+++ b/imageio/core/util.py
@@ -13,6 +13,7 @@ import os
import sys
import time
import struct
+from warnings import warn
import numpy as np
@@ -46,39 +47,71 @@ def urlopen(*args, **kwargs):
return urlopen(*args, **kwargs)
-def image_as_uint8(im):
- """ Convert the given image to uint8
-
- If the dtype is already uint8, it is returned as-is. If the image
- is float, and all values are between 0 and 1, the values are
- multiplied by 255. In all other situations, the values are scaled
- such that the minimum value becomes 0 and the maximum value becomes
- 255.
+def image_as_uint(im, bitdepth=None):
+ """ Convert the given image to uint (default: uint8)
+
+ If the dtype already matches the desired format, it is returned
+ as-is. If the image is float, and all values are between 0 and 1,
+ the values are multiplied by np.power(2.0, bitdepth). In all other
+ situations, the values are scaled such that the minimum value
+ becomes 0 and the maximum value becomes np.power(2.0, bitdepth)-1
+ (255 for 8-bit and 65535 for 16-bit).
"""
+ if not bitdepth:
+ bitdepth = 8
if not isinstance(im, np.ndarray):
- raise ValueError('image must be a numpy array')
+ raise ValueError('Image must be a numpy array')
+ if bitdepth == 8:
+ out_type = np.uint8
+ elif bitdepth == 16:
+ out_type = np.uint16
+ else:
+ raise ValueError('Bitdepth must be either 8 or 16')
dtype_str = str(im.dtype)
- # Already uint8?
- if dtype_str == 'uint8':
+ if ((im.dtype == np.uint8 and bitdepth == 8) or
+ (im.dtype == np.uint16 and bitdepth == 16)):
+ # Already the correct format? Return as-is
return im
- # Handle float
- mi, ma = np.nanmin(im), np.nanmax(im)
- if dtype_str.startswith('float'):
- if mi >= 0 and ma <= 1:
- mi, ma = 0, 1
- # Now make float copy before we scale
- im = im.astype('float32')
- # Scale the values between 0 and 255
- if np.isfinite(mi) and np.isfinite(ma):
- if mi:
- im -= mi
- if ma != 255:
- im *= 255.0 / (ma - mi)
- assert np.nanmax(im) < 256
- return im.astype(np.uint8)
+ if (dtype_str.startswith('float') and
+ np.nanmin(im) >= 0 and np.nanmax(im) <= 1):
+ warn('Lossy conversion from {0} to {1}, range [0, 1]'.format(
+ dtype_str, out_type.__name__))
+ im = im.astype(np.float64) * (np.power(2.0, bitdepth)-1)
+ elif im.dtype == np.uint16 and bitdepth == 8:
+ warn('Lossy conversion from uint16 to uint8, '
+ 'loosing 8 bits of resolution')
+ im = np.right_shift(im, 8)
+ elif im.dtype == np.uint32:
+ warn('Lossy conversion from uint32 to {0}, '
+ 'loosing {1} bits of resolution'.format(out_type.__name__,
+ 32-bitdepth))
+ im = np.right_shift(im, 32-bitdepth)
+ elif im.dtype == np.uint64:
+ warn('Lossy conversion from uint64 to {0}, '
+ 'loosing {1} bits of resolution'.format(out_type.__name__,
+ 64-bitdepth))
+ im = np.right_shift(im, 64-bitdepth)
+ else:
+ mi = np.nanmin(im)
+ ma = np.nanmax(im)
+ if not np.isfinite(mi):
+ raise ValueError('Minimum image value is not finite')
+ if not np.isfinite(ma):
+ raise ValueError('Maximum image value is not finite')
+ if ma == mi:
+ raise ValueError('Max value == min value, ambiguous given dtype')
+ warn('Conversion from {0} to {1}, '
+ 'range [{2}, {3}]'.format(dtype_str, out_type.__name__, mi, ma))
+ # Now make float copy before we scale
+ im = im.astype('float64')
+ # Scale the values between 0 and 1 then multiply by the max value
+ im = (im - mi) / (ma - mi) * (np.power(2.0, bitdepth)-1)
+ assert np.nanmin(im) >= 0
+ assert np.nanmax(im) < np.power(2.0, bitdepth)
+ return im.astype(out_type)
-# currently not used ... the only use it to easly provide the global meta info
+# currently not used ... the only use it to easily provide the global meta info
class ImageList(list):
def __init__(self, meta=None):
list.__init__(self)
diff --git a/imageio/plugins/_freeimage.py b/imageio/plugins/_freeimage.py
index b6541c7..90c9310 100644
--- a/imageio/plugins/_freeimage.py
+++ b/imageio/plugins/_freeimage.py
@@ -559,16 +559,15 @@ class Freeimage(object):
# Test if ok
if ftype == -1:
- raise ValueError('Cannot determine format of file "%s"' %
+ raise ValueError('Cannot determine format of file "%s"' %
filename)
elif mode == 'w' and not lib.FreeImage_FIFSupportsWriting(ftype):
- raise ValueError('Cannot write the format of file "%s"' %
+ raise ValueError('Cannot write the format of file "%s"' %
filename)
elif mode == 'r' and not lib.FreeImage_FIFSupportsReading(ftype):
- raise ValueError('Cannot read the format of file "%s"' %
+ raise ValueError('Cannot read the format of file "%s"' %
filename)
- else:
- return ftype
+ return ftype
def create_bitmap(self, filename, ftype, flags=0):
""" create_bitmap(filename, ftype, flags=0)
@@ -796,8 +795,7 @@ class FIBitmap(FIBaseBitmap):
if not bitmap: # pragma: no cover
raise RuntimeError('Could not allocate bitmap for storage: %s'
% self._fi._get_error_message())
- else:
- self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
+ self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
def load_from_filename(self, filename=None):
if filename is None:
@@ -814,8 +812,7 @@ class FIBitmap(FIBaseBitmap):
raise ValueError('Could not load bitmap "%s": %s' %
(self._filename,
self._fi._get_error_message()))
- else:
- self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
+ self._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
# def load_from_bytes(self, bytes):
# with self._fi as lib:
@@ -1144,12 +1141,12 @@ class FIBitmap(FIBaseBitmap):
raise ValueError('Could not quantize bitmap "%s": %s' %
(self._filename,
self._fi._get_error_message()))
- else:
- new = FIBitmap(self._fi, self._filename, self._ftype,
- self._flags)
- new._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
- new._fi_type = self._fi_type
- return new
+
+ new = FIBitmap(self._fi, self._filename, self._ftype,
+ self._flags)
+ new._set_bitmap(bitmap, (lib.FreeImage_Unload, bitmap))
+ new._fi_type = self._fi_type
+ return new
# def convert_to_32bit(self):
# """ Convert to 32bit image.
@@ -1201,9 +1198,8 @@ class FIMultipageBitmap(FIBaseBitmap):
err = self._fi._get_error_message()
raise ValueError('Could not open file "%s" as multi-image: %s'
% (self._filename, err))
- else:
- self._set_bitmap(multibitmap,
- (lib.FreeImage_CloseMultiBitmap, multibitmap))
+ self._set_bitmap(multibitmap,
+ (lib.FreeImage_CloseMultiBitmap, multibitmap))
# def load_from_bytes(self, bytes):
# with self._fi as lib:
@@ -1248,9 +1244,8 @@ class FIMultipageBitmap(FIBaseBitmap):
msg = ('Could not open file "%s" for writing multi-image: %s'
% (self._filename, self._fi._get_error_message()))
raise ValueError(msg)
- else:
- self._set_bitmap(multibitmap,
- (lib.FreeImage_CloseMultiBitmap, multibitmap))
+ self._set_bitmap(multibitmap,
+ (lib.FreeImage_CloseMultiBitmap, multibitmap))
def __len__(self):
with self._fi as lib:
diff --git a/imageio/plugins/ffmpeg.py b/imageio/plugins/ffmpeg.py
index 8356c5c..0e08670 100644
--- a/imageio/plugins/ffmpeg.py
+++ b/imageio/plugins/ffmpeg.py
@@ -25,7 +25,7 @@ import numpy as np
from .. import formats
from ..core import (Format, get_remote_file, string_types, read_n_bytes,
- image_as_uint8, get_platform, InternetNotAllowedError)
+ image_as_uint, get_platform, InternetNotAllowedError)
FNAME_PER_PLATFORM = {
'osx32': 'ffmpeg.osx',
@@ -575,7 +575,7 @@ class FfmpegFormat(Format):
depth = 1 if im.ndim == 2 else im.shape[2]
# Ensure that image is in uint8
- im = image_as_uint8(im)
+ im = image_as_uint(im, bitdepth=8)
# Set size and initialize if not initialized yet
if self._size is None:
diff --git a/imageio/plugins/freeimage.py b/imageio/plugins/freeimage.py
index 818ba39..af0b242 100644
--- a/imageio/plugins/freeimage.py
+++ b/imageio/plugins/freeimage.py
@@ -11,9 +11,8 @@ from __future__ import absolute_import, print_function, division
import numpy as np
-
from .. import formats
-from ..core import Format, image_as_uint8
+from ..core import Format, image_as_uint
from ._freeimage import fi, IO_FLAGS, FNAME_PER_PLATFORM # noqa
@@ -165,7 +164,7 @@ class FreeimageBmpFormat(FreeimageFormat):
return FreeimageFormat.Writer._open(self, flags)
def _append_data(self, im, meta):
- im = image_as_uint8(im)
+ im = image_as_uint(im, bitdepth=8)
return FreeimageFormat.Writer._append_data(self, im, meta)
@@ -222,7 +221,10 @@ class FreeimagePngFormat(FreeimageFormat):
return FreeimageFormat.Writer._open(self, flags)
def _append_data(self, im, meta):
- im = image_as_uint8(im)
+ if str(im.dtype) == 'uint16':
+ im = image_as_uint(im, bitdepth=16)
+ else:
+ im = image_as_uint(im, bitdepth=8)
FreeimageFormat.Writer._append_data(self, im, meta)
# Quantize?
q = int(self.request.kwargs.get('quantize', False))
@@ -335,7 +337,7 @@ class FreeimageJpegFormat(FreeimageFormat):
def _append_data(self, im, meta):
if im.ndim == 3 and im.shape[-1] == 4:
raise IOError('JPEG does not support alpha channel.')
- im = image_as_uint8(im)
+ im = image_as_uint(im, bitdepth=8)
return FreeimageFormat.Writer._append_data(self, im, meta)
diff --git a/imageio/plugins/freeimagemulti.py b/imageio/plugins/freeimagemulti.py
index 1d0fd1f..1e7b6e7 100644
--- a/imageio/plugins/freeimagemulti.py
+++ b/imageio/plugins/freeimagemulti.py
@@ -10,7 +10,7 @@ from __future__ import absolute_import, print_function, division
import numpy as np
from .. import formats
-from ..core import Format, image_as_uint8
+from ..core import Format, image_as_uint
from ._freeimage import fi, IO_FLAGS
from .freeimage import FreeimageFormat
@@ -73,7 +73,7 @@ class FreeimageMulti(FreeimageFormat):
# Prepare data
if im.ndim == 3 and im.shape[-1] == 1:
im = im[:, :, 0]
- im = image_as_uint8(im)
+ im = image_as_uint(im, bitdepth=8)
# Create sub bitmap
sub1 = fi.create_bitmap(self._bm._filename, self.format.fif)
# Let subclass add data to bitmap, optionally return new
diff --git a/imageio/plugins/swf.py b/imageio/plugins/swf.py
index df7cd78..2006524 100644
--- a/imageio/plugins/swf.py
+++ b/imageio/plugins/swf.py
@@ -14,7 +14,7 @@ from io import BytesIO
import numpy as np
from .. import formats
-from ..core import Format, read_n_bytes, image_as_uint8
+from ..core import Format, read_n_bytes, image_as_uint
_swf = None # lazily loaded in lib()
@@ -301,7 +301,7 @@ class SWFFormat(Format):
# Correct shape and type
if im.ndim == 3 and im.shape[-1] == 1:
im = im[:, :, 0]
- im = image_as_uint8(im)
+ im = image_as_uint(im, bitdepth=8)
# Get frame size
wh = im.shape[1], im.shape[0]
# Write header on first frame
| Support writing 16-bit color channels for PNG
I'm trying to transition away from MATLAB and imageio has been incredibly helpful. However, I need to work with 64-bit PNGs (16-bit color channels and 16-bit alpha channel) and although it looks like reading 16-bit channels is supported, writing is not supported.
I created some 64-bit pngs in MATLAB using the following code:
img_out = zeros(256, 256, 3, 'uint16');
img_alpha = zeros(256, 256, 'uint16');
color_grad = uint16(reshape(0:2^16-1, 256, []));
img_out(:, :, 1) = color_grad;
img_out(:, :, 2) = rot90(color_grad, 1);
img_out(:, :, 3) = rot90(color_grad, 2);
img_out_alpha = rot90(color_grad, 3);
fprintf('Write unique values: R=%u, G=%u, B=%u, A=%u\n', ...
length(unique(img_out(:, :, 1))), ...
length(unique(img_out(:, :, 2))), ...
length(unique(img_out(:, :, 3))), ...
length(unique(img_out_alpha)))
imwrite(img_out, '64bit_MATLAB.png', 'Alpha', img_out_alpha, 'BitDepth', 16)
[img_in, ~, img_in_alpha] = imread('64bit_MATLAB.png');
fprintf('MATLAB PNG unique values: R=%u, G=%u, B=%u, A=%u\n', ...
length(unique(img_in(:, :, 1))), ...
length(unique(img_in(:, :, 2))), ...
length(unique(img_in(:, :, 3))), ...
length(unique(img_in_alpha)))
This gives me the following output:
> Write unique values: R=65536, G=65536, B=65536, A=65536
> MATLAB PNG unique values: R=65536, G=65536, B=65536, A=65536
If I try the same with imageio in Python:
import imageio
import numpy as np
img_out = np.zeros((256, 256, 4), dtype=np.uint16)
color_grad = np.reshape(np.arange(2**16), (256,-1))
img_out[:, :, 0] = color_grad
img_out[:, :, 1] = np.rot90(color_grad, 1)
img_out[:, :, 2] = np.rot90(color_grad, 2)
img_out[:, :, 3] = np.rot90(color_grad, 3)
print('Write unique values: R={}, G={}, B={}, A={}'.format(
len(set(img_out[:, :, 0].flatten().tolist())),
len(set(img_out[:, :, 1].flatten().tolist())),
len(set(img_out[:, :, 2].flatten().tolist())),
len(set(img_out[:, :, 3].flatten().tolist()))))
imageio.imwrite('64bit_imageio.png', img_out)
img_in = imageio.imread('64bit_imageio.png')
print('imageio PNG unique values: R={}, G={}, B={}, A={}'.format(
len(set(img_in[:, :, 0].flatten().tolist())),
len(set(img_in[:, :, 1].flatten().tolist())),
len(set(img_in[:, :, 2].flatten().tolist())),
len(set(img_in[:, :, 3].flatten().tolist()))))
I get the following:
> Write unique values: R=65536, G=65536, B=65536, A=65536
> imageio PNG unique values: R=256, G=256, B=256, A=256
To confirm, imageio is able to read the 64-bit PNG:
img_in = imageio.imread('64bit_MATLAB.png')
print('MATLAB PNG unique values: R={}, G={}, B={}, A={}'.format(
len(set(img_in[:, :, 0].flatten().tolist())),
len(set(img_in[:, :, 1].flatten().tolist())),
len(set(img_in[:, :, 2].flatten().tolist())),
len(set(img_in[:, :, 3].flatten().tolist()))))
> MATLAB PNG unique values: R=65536, G=65536, B=65536, A=65536
Any idea how difficult this would be to implement? | imageio/imageio | diff --git a/tests/test_core.py b/tests/test_core.py
index 1f35e77..8425930 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -738,34 +738,65 @@ def test_util_progres_bar(sleep=0):
return
-def test_util_image_as_uint8():
-
- raises(ValueError, core.image_as_uint8, 4)
- raises(ValueError, core.image_as_uint8, "not an image")
-
- res = core.image_as_uint8(np.array([0, 1], 'uint8'))
- assert res[0] == 0 and res[1] == 1
- res = core.image_as_uint8(np.array([4, 255], 'uint8'))
- assert res[0] == 4 and res[1] == 255
-
- res = core.image_as_uint8(np.array([0, 1], 'int8'))
- assert res[0] == 0 and res[1] == 255
- res = core.image_as_uint8(np.array([-4, 100], 'int8'))
- assert res[0] == 0 and res[1] == 255
-
- res = core.image_as_uint8(np.array([0, 1], 'int16'))
- assert res[0] == 0 and res[1] == 255
- res = core.image_as_uint8(np.array([-4, 100], 'int16'))
- assert res[0] == 0 and res[1] == 255
- res = core.image_as_uint8(np.array([-1000, 8000], 'int16'))
- assert res[0] == 0 and res[1] == 255
-
- res = core.image_as_uint8(np.array([0, 1], 'float32'))
- assert res[0] == 0 and res[1] == 255
- res = core.image_as_uint8(np.array([0.099, 0.785], 'float32'))
- assert res[0] == 25 and res[1] == 200
- res = core.image_as_uint8(np.array([4, 200], 'float32'))
- assert res[0] == 0 and res[1] == 255
+def test_util_image_as_uint():
+ ''' Tests the various type conversions when writing to uint'''
+ raises(ValueError, core.image_as_uint, 4)
+ raises(ValueError, core.image_as_uint, "not an image")
+ raises(ValueError, core.image_as_uint, np.array([0, 1]), bitdepth=13)
+ raises(ValueError, core.image_as_uint, np.array([2.0, 2.0], 'float32'))
+ raises(ValueError, core.image_as_uint, np.array([0.0, np.inf], 'float32'))
+ raises(ValueError, core.image_as_uint, np.array([-np.inf, 0.0], 'float32'))
+
+ test_arrays = ( # (input, output bitdepth, expected output)
+ # No bitdepth specified, assumed to be 8-bit
+ (np.array([0, 2 ** 8 - 1], 'uint8'), None, np.uint8([0, 255])),
+ (np.array([0, 2 ** 16 - 1], 'uint16'), None, np.uint8([0, 255])),
+ (np.array([0, 2 ** 32 - 1], 'uint32'), None, np.uint8([0, 255])),
+ (np.array([0, 2 ** 64 - 1], 'uint64'), None, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int8'), None, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int16'), None, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int32'), None, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int64'), None, np.uint8([0, 255])),
+ (np.array([0, 1], 'float16'), None, np.uint8([0, 255])),
+ (np.array([0, 1], 'float32'), None, np.uint8([0, 255])),
+ (np.array([0, 1], 'float64'), None, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float16'), None, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float32'), None, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float64'), None, np.uint8([0, 255])),
+ # 8-bit output
+ (np.array([0, 2 ** 8 - 1], 'uint8'), 8, np.uint8([0, 255])),
+ (np.array([0, 2 ** 16 - 1], 'uint16'), 8, np.uint8([0, 255])),
+ (np.array([0, 2 ** 32 - 1], 'uint32'), 8, np.uint8([0, 255])),
+ (np.array([0, 2 ** 64 - 1], 'uint64'), 8, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int8'), 8, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int16'), 8, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int32'), 8, np.uint8([0, 255])),
+ (np.array([-2, 2], 'int64'), 8, np.uint8([0, 255])),
+ (np.array([0, 1], 'float16'), 8, np.uint8([0, 255])),
+ (np.array([0, 1], 'float32'), 8, np.uint8([0, 255])),
+ (np.array([0, 1], 'float64'), 8, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float16'), 8, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float32'), 8, np.uint8([0, 255])),
+ (np.array([-1.0, 1.0], 'float64'), 8, np.uint8([0, 255])),
+ # 16-bit output
+ (np.array([0, 2 ** 8 - 1], 'uint8'), 16, np.uint16([0, 65535])),
+ (np.array([0, 2 ** 16 - 1], 'uint16'), 16, np.uint16([0, 65535])),
+ (np.array([0, 2 ** 32 - 1], 'uint32'), 16, np.uint16([0, 65535])),
+ (np.array([0, 2 ** 64 - 1], 'uint64'), 16, np.uint16([0, 65535])),
+ (np.array([-2, 2], 'int8'), 16, np.uint16([0, 65535])),
+ (np.array([-2, 2], 'int16'), 16, np.uint16([0, 65535])),
+ (np.array([-2, 2], 'int32'), 16, np.uint16([0, 65535])),
+ (np.array([-2, 2], 'int64'), 16, np.uint16([0, 65535])),
+ (np.array([0, 1], 'float16'), 16, np.uint16([0, 65535])),
+ (np.array([0, 1], 'float32'), 16, np.uint16([0, 65535])),
+ (np.array([0, 1], 'float64'), 16, np.uint16([0, 65535])),
+ (np.array([-1.0, 1.0], 'float16'), 16, np.uint16([0, 65535])),
+ (np.array([-1.0, 1.0], 'float32'), 16, np.uint16([0, 65535])),
+ (np.array([-1.0, 1.0], 'float64'), 16, np.uint16([0, 65535])),)
+
+ for tup in test_arrays:
+ res = core.image_as_uint(tup[0], bitdepth=tup[1])
+ assert res[0] == tup[2][0] and res[1] == tup[2][1]
def test_util_has_has_module():
diff --git a/tests/test_freeimage.py b/tests/test_freeimage.py
index 6a75fb5..1b5ab83 100644
--- a/tests/test_freeimage.py
+++ b/tests/test_freeimage.py
@@ -37,7 +37,7 @@ im4[20:, :, 3] = 120
fnamebase = os.path.join(test_dir, 'test')
-def get_ref_im(colors, crop, float):
+def get_ref_im(colors, crop, isfloat):
""" Get reference image with
* colors: 0, 1, 3, 4
* cropping: 0-> none, 1-> crop, 2-> crop with non-contiguous data
@@ -45,9 +45,9 @@ def get_ref_im(colors, crop, float):
"""
assert colors in (0, 1, 3, 4)
assert crop in (0, 1, 2)
- assert float in (False, True)
+ assert isfloat in (False, True)
rim = [im0, im1, None, im3, im4][colors]
- if float:
+ if isfloat:
rim = rim.astype(np.float32) / 255.0
if crop == 1:
rim = rim[:-1, :-1].copy()
@@ -144,27 +144,27 @@ def test_freeimage_lib():
def test_png():
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, 1, 2):
for colors in (0, 1, 3, 4):
- fname = fnamebase + '%i.%i.%i.png' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase+'%i.%i.%i.png' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
assert_close(rim * mul, im, 0.1) # lossless
# Run exact same test, but now in pypy backup mode
try:
imageio.plugins._freeimage.TEST_NUMPY_NO_STRIDES = True
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, 1, 2):
for colors in (0, 1, 3, 4):
- fname = fnamebase + '%i.%i.%i.png' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase+'%i.%i.%i.png' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
assert_close(rim * mul, im, 0.1) # lossless
finally:
imageio.plugins._freeimage.TEST_NUMPY_NO_STRIDES = False
@@ -240,14 +240,14 @@ def test_png_dtypes():
def test_jpg():
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, 1, 2):
for colors in (0, 1, 3):
- fname = fnamebase + '%i.%i.%i.jpg' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase + '%i.%i.%i.jpg' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
assert_close(rim * mul, im, 1.1) # lossy
# No alpha in JPEG
@@ -303,14 +303,14 @@ def test_jpg_more():
def test_bmp():
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, 1, 2):
for colors in (0, 1, 3, 4):
- fname = fnamebase + '%i.%i.%i.bmp' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase + '%i.%i.%i.bmp' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
assert_close(rim * mul, im, 0.1) # lossless
# Compression
@@ -328,16 +328,16 @@ def test_bmp():
def test_gif():
# The not-animated gif
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, 1, 2):
for colors in (0, 3, 4):
if colors > 1 and sys.platform.startswith('darwin'):
continue # quantize fails, see also png
- fname = fnamebase + '%i.%i.%i.gif' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase + '%i.%i.%i.gif' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
if colors in (0, 1):
im = im[:, :, 0]
else:
@@ -364,10 +364,10 @@ def test_animated_gif():
ims.append(im)
# Store - animated GIF always poops out RGB
- for float in (False, True):
+ for isfloat in (False, True):
for colors in (3, 4):
ims1 = ims[:]
- if float:
+ if isfloat:
ims1 = [x.astype(np.float32) / 256 for x in ims1]
ims1 = [x[:, :, :colors] for x in ims1]
fname = fnamebase + '.animated.%i.gif' % colors
@@ -420,15 +420,15 @@ def test_ico():
if os.getenv('TRAVIS', '') == 'true' and sys.version_info >= (3, 4):
skip('Freeimage ico is unstable for this Travis build')
- for float in (False, True):
+ for isfloat in (False, True):
for crop in (0, ):
for colors in (1, 3, 4):
- fname = fnamebase + '%i.%i.%i.ico' % (float, crop, colors)
- rim = get_ref_im(colors, crop, float)
+ fname = fnamebase + '%i.%i.%i.ico' % (isfloat, crop, colors)
+ rim = get_ref_im(colors, crop, isfloat)
rim = rim[:32, :32] # ico needs nice size
imageio.imsave(fname, rim)
im = imageio.imread(fname)
- mul = 255 if float else 1
+ mul = 255 if isfloat else 1
assert_close(rim * mul, im, 0.1) # lossless
# Meta data
diff --git a/tests/test_swf.py b/tests/test_swf.py
index 9cabec2..22b2622 100644
--- a/tests/test_swf.py
+++ b/tests/test_swf.py
@@ -168,9 +168,12 @@ def test_types():
fname1 = get_remote_file('images/stent.swf', test_dir)
fname2 = fname1[:-4] + '.out3.swf'
- for dtype in [np.uint8, np.float32]:
- for shape in [(100, 100), (100, 100, 1), (100, 100, 3)]:
- im1 = np.empty(shape, dtype) # empty is nice for testing nan
+ for dtype in [np.uint8, np.uint16, np.uint32, np.uint64,
+ np.int8, np.int16, np.int32, np.int64,
+ np.float16, np.float32, np.float64]:
+ for shape in [(100, 1), (100, 3)]:
+ # Repeats an identity matrix, just for testing
+ im1 = np.dstack((np.identity(shape[0], dtype=dtype), )*shape[1])
imageio.mimsave(fname2, [im1], 'swf')
im2 = imageio.mimread(fname2, 'swf')[0]
assert im2.shape == (100, 100, 4)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_media",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 7
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy",
"pip_packages": [
"pytest",
"pytest-cov",
"coveralls"
],
"pre_install": [
"apt-get update",
"apt-get install -y libfreeimage3"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
idna==3.10
-e git+https://github.com/imageio/imageio.git@8f0efc08ae98db69c02dc2820e4e76985cf0e60d#egg=imageio
iniconfig==2.1.0
numpy @ file:///croot/numpy_and_numpy_base_1736283260865/work/dist/numpy-2.0.2-cp39-cp39-linux_x86_64.whl#sha256=3387e3e62932fa288bc18e8f445ce19e998b418a65ed2064dd40a054f976a6c7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: imageio
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- blas=1.0=openblas
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=11.2.0=h00389a5_1
- libgfortran5=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.21=h043d6bf_0
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- numpy=2.0.2=py39heeff2f4_0
- numpy-base=2.0.2=py39h8a23956_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=72.1.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/imageio
| [
"tests/test_core.py::test_util_image_as_uint"
]
| [
"tests/test_core.py::test_findlib",
"tests/test_core.py::test_util_image",
"tests/test_freeimage.py::test_png",
"tests/test_freeimage.py::test_animated_gif"
]
| [
"tests/test_core.py::test_format",
"tests/test_core.py::test_reader_and_writer",
"tests/test_core.py::test_default_can_read_and_can_write",
"tests/test_core.py::test_format_selection",
"tests/test_core.py::test_format_manager",
"tests/test_core.py::test_fetching",
"tests/test_core.py::test_request",
"tests/test_core.py::test_request_read_sources",
"tests/test_core.py::test_request_save_sources",
"tests/test_core.py::test_request_file_no_seek",
"tests/test_core.py::test_util_imagelist",
"tests/test_core.py::test_util_dict",
"tests/test_core.py::test_util_get_platform",
"tests/test_core.py::test_util_asarray",
"tests/test_core.py::test_util_progres_bar",
"tests/test_core.py::test_util_has_has_module",
"tests/test_core.py::test_functions",
"tests/test_core.py::test_example_plugin",
"tests/test_freeimage.py::test_get_ref_im",
"tests/test_freeimage.py::test_get_fi_lib",
"tests/test_freeimage.py::test_freeimage_format",
"tests/test_freeimage.py::test_freeimage_lib",
"tests/test_freeimage.py::test_png_dtypes",
"tests/test_freeimage.py::test_jpg",
"tests/test_freeimage.py::test_jpg_more",
"tests/test_freeimage.py::test_bmp",
"tests/test_freeimage.py::test_gif",
"tests/test_freeimage.py::test_ico",
"tests/test_freeimage.py::test_mng",
"tests/test_freeimage.py::test_other",
"tests/test_swf.py::test_format_selection",
"tests/test_swf.py::test_reading_saving",
"tests/test_swf.py::test_read_from_url",
"tests/test_swf.py::test_invalid",
"tests/test_swf.py::test_lowlevel",
"tests/test_swf.py::test_types"
]
| []
| BSD 2-Clause "Simplified" License | 523 | [
"imageio/plugins/freeimagemulti.py",
"imageio/plugins/freeimage.py",
"imageio/plugins/ffmpeg.py",
"imageio/plugins/_freeimage.py",
"imageio/core/util.py",
"imageio/core/__init__.py",
"imageio/plugins/swf.py"
]
| [
"imageio/plugins/freeimagemulti.py",
"imageio/plugins/freeimage.py",
"imageio/plugins/ffmpeg.py",
"imageio/plugins/_freeimage.py",
"imageio/core/util.py",
"imageio/core/__init__.py",
"imageio/plugins/swf.py"
]
|
|
Juniper__py-junos-eznc-509 | ae516980deb810d9935cb62222fa7a4c522e1175 | 2016-05-07 11:04:54 | 3ca08f81e0be85394c6fa3e94675dfe03e958e28 | diff --git a/lib/jnpr/junos/device.py b/lib/jnpr/junos/device.py
index 3dcb71df..3438b910 100644
--- a/lib/jnpr/junos/device.py
+++ b/lib/jnpr/junos/device.py
@@ -496,7 +496,7 @@ class Device(object):
self.connected = True
self._nc_transform = self.transform
- self._norm_transform = lambda: JXML.normalize_xslt.encode('UTF-8')
+ self._norm_transform = lambda: JXML.normalize_xslt
normalize = kvargs.get('normalize', self._normalize)
if normalize is True:
diff --git a/lib/jnpr/junos/utils/start_shell.py b/lib/jnpr/junos/utils/start_shell.py
index 61f97df4..5d735ad5 100644
--- a/lib/jnpr/junos/utils/start_shell.py
+++ b/lib/jnpr/junos/utils/start_shell.py
@@ -3,7 +3,7 @@ from select import select
import re
_JUNOS_PROMPT = '> '
-_SHELL_PROMPT = '% '
+_SHELL_PROMPT = '(%|#) '
_SELECT_WAIT = 0.1
_RECVSZ = 1024
@@ -35,8 +35,8 @@ class StartShell(object):
:param str this: expected string/pattern.
- :returns: resulting string of data
- :rtype: str
+ :returns: resulting string of data in a list
+ :rtype: list
.. warning:: need to add a timeout safeguard
"""
@@ -82,8 +82,8 @@ class StartShell(object):
self._client = client
self._chan = chan
- got = self.wait_for('(%|>)')
- if not got[-1].endswith(_SHELL_PROMPT):
+ got = self.wait_for(r'(%|>|#)')
+ if got[-1].endswith(_JUNOS_PROMPT):
self.send('start shell')
self.wait_for(_SHELL_PROMPT)
@@ -116,7 +116,7 @@ class StartShell(object):
rc = ''.join(self.wait_for(this))
self.last_ok = True if rc.find('0') > 0 else False
- return (self.last_ok,got)
+ return (self.last_ok, got)
# -------------------------------------------------------------------------
# CONTEXT MANAGER
| Shell session does not work for root user.
I'm using the below example program.
```
#!/usr/bin/env python
import jnpr.junos.utils
from jnpr.junos import Device
from jnpr.junos.utils import *
from jnpr.junos.utils.start_shell import *
from jnpr.junos.utils.start_shell import StartShell
DUT = Device(host='10.252.191.104', user="root", passwd = "password!")
DUT.open()
print "Device opened!"
DUT_ss = StartShell(DUT)
DUT_ss.open()
print "got thru shell open."
print DUT_ss.run("pwd")
print DUT_ss.run("ls")
```
We never print the line "got thru shell open" because the library hangs while waiting for the "% " shell prompt to appear. I have a fix i'll be suggesting. | Juniper/py-junos-eznc | diff --git a/tests/unit/utils/test_start_shell.py b/tests/unit/utils/test_start_shell.py
index b3812937..ee728b02 100644
--- a/tests/unit/utils/test_start_shell.py
+++ b/tests/unit/utils/test_start_shell.py
@@ -19,9 +19,17 @@ class TestStartShell(unittest.TestCase):
@patch('paramiko.SSHClient')
@patch('jnpr.junos.utils.start_shell.StartShell.wait_for')
- def test_startshell_open(self, mock_connect, mock_wait):
+ def test_startshell_open_with_shell_term(self, mock_wait, mock_connect):
+ mock_wait.return_value = ["user # "]
self.shell.open()
- mock_connect.assert_called_with('(%|>)')
+ mock_wait.assert_called_with('(%|>|#)')
+
+ @patch('paramiko.SSHClient')
+ @patch('jnpr.junos.utils.start_shell.StartShell.wait_for')
+ def test_startshell_open_with_junos_term(self, mock_wait, mock_connect):
+ mock_wait.return_value = ["user > "]
+ self.shell.open()
+ mock_wait.assert_called_with('(%|#) ')
@patch('paramiko.SSHClient')
def test_startshell_close(self, mock_connect):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"pep8",
"pyflakes",
"coveralls",
"ntc_templates",
"cryptography==3.2",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bcrypt==4.2.1
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
coverage==6.5.0
coveralls==3.3.1
cryptography==44.0.2
docopt==0.6.2
exceptiongroup==1.2.2
future==1.0.0
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==3.1.6
-e git+https://github.com/Juniper/py-junos-eznc.git@ae516980deb810d9935cb62222fa7a4c522e1175#egg=junos_eznc
lxml==5.3.1
MarkupSafe==2.1.5
mock==5.2.0
ncclient==0.6.19
netaddr==1.3.0
nose==1.3.7
ntc_templates==4.0.1
packaging==24.0
paramiko==3.5.1
pep8==1.7.1
pluggy==1.2.0
pycparser==2.21
pyflakes==3.0.1
PyNaCl==1.5.0
pytest==7.4.4
PyYAML==6.0.1
requests==2.31.0
scp==0.15.0
six==1.17.0
textfsm==1.1.3
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: py-junos-eznc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bcrypt==4.2.1
- cffi==1.15.1
- charset-normalizer==3.4.1
- coverage==6.5.0
- coveralls==3.3.1
- cryptography==44.0.2
- docopt==0.6.2
- exceptiongroup==1.2.2
- future==1.0.0
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==3.1.6
- lxml==5.3.1
- markupsafe==2.1.5
- mock==5.2.0
- ncclient==0.6.19
- netaddr==1.3.0
- nose==1.3.7
- ntc-templates==4.0.1
- packaging==24.0
- paramiko==3.5.1
- pep8==1.7.1
- pluggy==1.2.0
- pycparser==2.21
- pyflakes==3.0.1
- pynacl==1.5.0
- pytest==7.4.4
- pyyaml==6.0.1
- requests==2.31.0
- scp==0.15.0
- six==1.17.0
- textfsm==1.1.3
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/py-junos-eznc
| [
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_open_with_junos_term",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_open_with_shell_term"
]
| [
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_context"
]
| [
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_close",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_run",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_wait_for",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_wait_for_regex"
]
| []
| Apache License 2.0 | 524 | [
"lib/jnpr/junos/utils/start_shell.py",
"lib/jnpr/junos/device.py"
]
| [
"lib/jnpr/junos/utils/start_shell.py",
"lib/jnpr/junos/device.py"
]
|
|
terryyin__lizard-120 | bdcc784bd22d8e48db22884dfeb42647ffb67fbf | 2016-05-08 06:41:31 | bdcc784bd22d8e48db22884dfeb42647ffb67fbf | rakhimov: Hi @terryyin,
do not yet merge this PR.
I sense there are bugs in implementation of C++ ``current_nesting_level``.
The initial code works with Python,
for which the nesting level metric is producing what is expected.
However, the C++ ``current_nesting_level`` doesn't seem to be fully conformant.
Your suggestions are welcome. | diff --git a/lizard.py b/lizard.py
index 4d21e8a..cac8020 100755
--- a/lizard.py
+++ b/lizard.py
@@ -316,7 +316,7 @@ class NestingStack(object):
self.pending_function = None
self.nesting_stack.append(Namespace(token))
- def start_new_funciton_nesting(self, function):
+ def start_new_function_nesting(self, function):
self.pending_function = function
def _create_nesting(self):
@@ -386,7 +386,7 @@ class FileInfoBuilder(object):
self.fileinfo.filename,
self.current_line)
self.current_function.top_nesting_level = self.current_nesting_level
- self.start_new_funciton_nesting(self.current_function)
+ self.start_new_function_nesting(self.current_function)
def add_condition(self, inc=1):
self.current_function.cyclomatic_complexity += inc
diff --git a/lizard_ext/lizardns.py b/lizard_ext/lizardns.py
index e057e73..4ee09bf 100644
--- a/lizard_ext/lizardns.py
+++ b/lizard_ext/lizardns.py
@@ -1,13 +1,16 @@
"""
This extension counts nested control structures within a function.
-The extension is implemented with C++ in mind.
+
+The extension is implemented with C++ and Python in mind,
+but it is expected to work with other languages supported by Lizard
+with its language reader implementing 'nesting_level' metric for tokens.
The code borrows heavily from implementation of Nesting Depth extension
originally written by Mehrdad Meh and Terry Yin.
"""
-from lizard import FileInfoBuilder, FunctionInfo
-from lizard_ext.lizardnd import patch, patch_append_method
+from lizard import FunctionInfo
+from lizard_ext.lizardnd import patch_append_method
DEFAULT_NS_THRESHOLD = 3
@@ -32,106 +35,90 @@ class LizardExtension(object): # pylint: disable=R0903
def __call__(self, tokens, reader):
"""The intent of the code is to detect control structures as entities.
- The complexity arises from tracking of
- control structures without brackets.
- The termination of such control structures in C-like languages
- is the next statement or control structure with a compound statement.
-
- Moreover, control structures with two or more tokens complicates
- the proper counting, for example, 'else if'.
+ The implementation relies on nesting level metric for tokens
+ provided by language readers.
+ If the following contract for the nesting level metric does not hold,
+ this implementation of nested structure counting is invalid.
- In Python with meaningful indentation,
- tracking the indentation levels becomes crucial
- to identify boundaries of the structures.
- The following code is not designed for Python.
- """
- structures = set(['if', 'else', 'foreach', 'for', 'while', 'do',
- 'try', 'catch', 'switch'])
+ If a control structure has started its block (eg. '{'),
+ and its level is **less** than the next structure,
+ the next structure is nested.
- structure_indicator = "{"
- structure_end = "}"
- indent_indicator = ";"
-
- for token in tokens:
- if reader.context.is_within_structure():
- if token == "(":
- reader.context.add_parentheses(1)
- elif token == ")":
- reader.context.add_parentheses(-1)
+ If a control structure has *not* started its block,
+ and its level is **no more** than the next structure,
+ the next structure is nested (compound statement).
- if not reader.context.is_within_parentheses():
- if token in structures:
- reader.context.add_nested_structure(token)
+ If a control structure level is **higher** than the next structure,
+ it is considered closed.
- elif token == structure_indicator:
- reader.context.add_brace()
-
- elif token == structure_end:
- reader.context.pop_brace()
- reader.context.pop_nested_structure()
-
- elif token == indent_indicator:
- reader.context.pop_nested_structure()
-
- yield token
+ If a control structure has started its block,
+ and its level is **equal** to the next structure,
+ it is considered closed.
-
-# TODO: Some weird false positive from pylint. # pylint: disable=fixme
-# pylint: disable=E1101
-class NSFileInfoAddition(FileInfoBuilder):
-
- def add_nested_structure(self, token):
- """Conditionally adds nested structures."""
- # Handle compound else-if.
- if token == "if" and self.current_function.structure_stack:
- prev_token, br_state = self.current_function.structure_stack[-1]
- if (prev_token == "else" and
- br_state == self.current_function.brace_count):
+ The level of any non-structure tokens is treated
+ with the same logic as for the next structures
+ for control block **starting** and **closing** purposes.
+ """
+ # TODO: Delegate this to language readers # pylint: disable=fixme
+ structures = set(['if', 'else', 'elif', 'for', 'foreach', 'while', 'do',
+ 'try', 'catch', 'switch', 'finally', 'except',
+ 'with'])
+
+ cur_level = 0
+ start_structure = [False] # Just to make it mutable.
+ structure_stack = [] # [(token, ns_level)]
+
+ def add_nested_structure(token):
+ """Conditionally adds nested structures."""
+ if structure_stack:
+ prev_token, ns_level = structure_stack[-1]
+ if cur_level == ns_level:
+ if (token == "if" and prev_token == "else" and
+ not start_structure[0]):
+ return # Compound 'else if' in C-like languages.
+ if start_structure[0]:
+ structure_stack.pop()
+ elif cur_level < ns_level:
+ while structure_stack and ns_level >= cur_level:
+ _, ns_level = structure_stack.pop()
+
+ structure_stack.append((token, cur_level))
+ start_structure[0] = False # Starts on the next level with body.
+
+ ns_cur = len(structure_stack)
+ if reader.context.current_function.max_nested_structures < ns_cur:
+ reader.context.current_function.max_nested_structures = ns_cur
+
+ def pop_nested_structure():
+ """Conditionally pops the nested structures if levels match."""
+ if not structure_stack:
return
- self.current_function.structure_stack.append(
- (token, self.current_function.brace_count))
-
- ns_cur = len(self.current_function.structure_stack)
- if self.current_function.max_nested_structures < ns_cur:
- self.current_function.max_nested_structures = ns_cur
+ _, ns_level = structure_stack[-1]
- def pop_nested_structure(self):
- """Conditionally pops the structure count if braces match."""
- if not self.current_function.structure_stack:
- return
+ if cur_level > ns_level:
+ start_structure[0] = True
- _, br_state = self.current_function.structure_stack[-1]
- if br_state == self.current_function.brace_count:
- self.current_function.structure_stack.pop()
+ elif cur_level < ns_level:
+ while structure_stack and ns_level >= cur_level:
+ _, ns_level = structure_stack.pop()
+ start_structure[0] = bool(structure_stack)
- def add_brace(self):
- self.current_function.brace_count += 1
+ elif start_structure[0]:
+ structure_stack.pop()
- def pop_brace(self):
- # pylint: disable=fixme
- # TODO: For some reason, brace count goes negative.
- # assert self.current_function.brace_count > 0
- self.current_function.brace_count -= 1
-
- def add_parentheses(self, inc):
- """Dual purpose parentheses manipulator."""
- self.current_function.paren_count += inc
-
- def is_within_parentheses(self):
- assert self.current_function.paren_count >= 0
- return self.current_function.paren_count != 0
+ for token in tokens:
+ cur_level = reader.context.current_nesting_level
+ if token in structures:
+ add_nested_structure(token)
+ else:
+ pop_nested_structure()
- def is_within_structure(self):
- return bool(self.current_function.structure_stack)
+ yield token
def _init_nested_structure_data(self, *_):
self.max_nested_structures = 0
- self.brace_count = 0
- self.paren_count = 0
- self.structure_stack = []
-patch(NSFileInfoAddition, FileInfoBuilder)
patch_append_method(_init_nested_structure_data, FunctionInfo, "__init__")
| Detection of Deeply Nested Control Structures
This metric may not apply to the whole function,
but the maximum 'nestedness' (nesting for-loops, if-statements, etc.)
may be an interesting metric to detect code smell.
It closely relates to indentation.
Got this from the Linux kernel coding style:
>The answer to that is that if you need
more than 3 levels of indentation, you're screwed anyway, and should fix
your program. | terryyin/lizard | diff --git a/test/testNestedStructures.py b/test/testNestedStructures.py
old mode 100755
new mode 100644
index 7eee514..1a2d826
--- a/test/testNestedStructures.py
+++ b/test/testNestedStructures.py
@@ -1,5 +1,7 @@
import unittest
-from .testHelpers import get_cpp_function_list_with_extnesion
+
+from .testHelpers import get_cpp_function_list_with_extnesion, \
+ get_python_function_list_with_extnesion
from lizard_ext.lizardns import LizardExtension as NestedStructure
@@ -7,6 +9,10 @@ def process_cpp(source):
return get_cpp_function_list_with_extnesion(source, NestedStructure())
+def process_python(source):
+ return get_python_function_list_with_extnesion(source, NestedStructure())
+
+
class TestCppNestedStructures(unittest.TestCase):
def test_no_structures(self):
@@ -209,3 +215,122 @@ class TestCppNestedStructures(unittest.TestCase):
}
""")
self.assertEqual(3, result[0].max_nested_structures)
+
+
+class TestPythonNestedStructures(unittest.TestCase):
+
+ def test_no_structures(self):
+ result = process_python("def fun():\n pass")
+ self.assertEqual(0, result[0].max_nested_structures)
+
+ def test_if_structure(self):
+ result = process_python("def fun():\n if a:\n return")
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_for_structure(self):
+ result = process_python("def fun():\n for a in b:\n foo()")
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_condition_in_if_structure(self):
+ result = process_python("def fun():\n if a and b:\n return")
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_elif(self):
+ result = process_python("""
+ def c():
+ if a:
+ baz()
+ elif c:
+ foo()
+ """)
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_nested_if_structures(self):
+ result = process_python("""
+ def c():
+ if a:
+ if b:
+ baz()
+ else:
+ foo()
+ """)
+ self.assertEqual(2, result[0].max_nested_structures)
+
+ def test_equal_metric_structures(self):
+ result = process_python("""
+ def c():
+ if a:
+ if b:
+ baz()
+ else:
+ foo()
+
+ for a in b:
+ if c:
+ bar()
+ """)
+ self.assertEqual(2, result[0].max_nested_structures)
+
+ def test_while(self):
+ result = process_python("""
+ def c():
+ while a:
+ baz()
+ """)
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_try_catch(self):
+ result = process_python("""
+ def c():
+ try:
+ f.open()
+ catch Exception as err:
+ print(err)
+ finally:
+ f.close()
+ """)
+ self.assertEqual(1, result[0].max_nested_structures)
+
+ def test_two_functions(self):
+ result = process_python("""
+ def c():
+ try:
+ if a:
+ foo()
+ catch Exception as err:
+ print(err)
+
+ def d():
+ for a in b:
+ for x in y:
+ if i:
+ return j
+ """)
+ self.assertEqual(2, result[0].max_nested_structures)
+ self.assertEqual(3, result[1].max_nested_structures)
+
+ def test_nested_functions(self):
+ result = process_python("""
+ def c():
+ def d():
+ for a in b:
+ for x in y:
+ if i:
+ return j
+ try:
+ if a:
+ foo()
+ catch Exception as err:
+ print(err)
+
+ """)
+ self.assertEqual(3, result[0].max_nested_structures)
+ self.assertEqual(2, result[1].max_nested_structures)
+
+ def test_with_structure(self):
+ result = process_python("""
+ def c():
+ with open(f) as input_file:
+ foo(f)
+ """)
+ self.assertEqual(1, result[0].max_nested_structures)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt",
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
dill==0.3.9
exceptiongroup==1.2.2
iniconfig==2.1.0
isort==6.0.1
-e git+https://github.com/terryyin/lizard.git@bdcc784bd22d8e48db22884dfeb42647ffb67fbf#egg=lizard
mccabe==0.7.0
mock==5.2.0
nose==1.3.7
packaging==24.2
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pylint==3.3.6
pytest==8.3.5
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: lizard
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- dill==0.3.9
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pylint==3.3.6
- pytest==8.3.5
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/lizard
| [
"test/testNestedStructures.py::TestPythonNestedStructures::test_equal_metric_structures",
"test/testNestedStructures.py::TestPythonNestedStructures::test_nested_functions",
"test/testNestedStructures.py::TestPythonNestedStructures::test_nested_if_structures",
"test/testNestedStructures.py::TestPythonNestedStructures::test_try_catch",
"test/testNestedStructures.py::TestPythonNestedStructures::test_two_functions",
"test/testNestedStructures.py::TestPythonNestedStructures::test_with_structure"
]
| []
| [
"test/testNestedStructures.py::TestCppNestedStructures::test_and_condition_in_if_structure",
"test/testNestedStructures.py::TestCppNestedStructures::test_do",
"test/testNestedStructures.py::TestCppNestedStructures::test_forever_loop",
"test/testNestedStructures.py::TestCppNestedStructures::test_if_structure",
"test/testNestedStructures.py::TestCppNestedStructures::test_nested_if_structures",
"test/testNestedStructures.py::TestCppNestedStructures::test_nested_loop_mixed_brackets",
"test/testNestedStructures.py::TestCppNestedStructures::test_no_structures",
"test/testNestedStructures.py::TestCppNestedStructures::test_non_r_value_ref_in_body",
"test/testNestedStructures.py::TestCppNestedStructures::test_scope",
"test/testNestedStructures.py::TestCppNestedStructures::test_switch_case",
"test/testNestedStructures.py::TestCppNestedStructures::test_terminator_in_parentheses",
"test/testNestedStructures.py::TestCppNestedStructures::test_ternary_operator",
"test/testNestedStructures.py::TestCppNestedStructures::test_try_catch",
"test/testNestedStructures.py::TestCppNestedStructures::test_while",
"test/testNestedStructures.py::TestPythonNestedStructures::test_condition_in_if_structure",
"test/testNestedStructures.py::TestPythonNestedStructures::test_elif",
"test/testNestedStructures.py::TestPythonNestedStructures::test_for_structure",
"test/testNestedStructures.py::TestPythonNestedStructures::test_if_structure",
"test/testNestedStructures.py::TestPythonNestedStructures::test_no_structures",
"test/testNestedStructures.py::TestPythonNestedStructures::test_while"
]
| [
"test/testNestedStructures.py::TestCppNestedStructures::test_else_if",
"test/testNestedStructures.py::TestCppNestedStructures::test_equal_metric_structures",
"test/testNestedStructures.py::TestCppNestedStructures::test_gotcha_if_else"
]
| MIT License | 525 | [
"lizard_ext/lizardns.py",
"lizard.py"
]
| [
"lizard_ext/lizardns.py",
"lizard.py"
]
|
yola__yoconfigurator-35 | 54e4dfdba21ae87e08db9621ef2c2f4fb1a9e6cf | 2016-05-09 21:59:46 | 54e4dfdba21ae87e08db9621ef2c2f4fb1a9e6cf | diff --git a/.travis.yml b/.travis.yml
index 5a893c1..07aa865 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,7 +1,8 @@
language: python
python:
- - "2.6"
- "2.7"
-install:
- - pip install -r requirements.txt --use-mirrors
+ - "3.2"
+ - "3.3"
+ - "3.4"
+ - "3.5"
script: python setup.py test
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 2f7e009..a3529c9 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,6 +1,12 @@
Change Log
==========
+Dev
+-----
+
+* Update to work with Python 2.7, 3.2, 3.3, 3.4, 3.5
+* Drop support for Python <= 2.6
+
0.4.6
-----
diff --git a/requirements.txt b/requirements.txt
index 05bf2df..e69de29 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +0,0 @@
-# Only for tests on Python < 2.7:
-argparse
-unittest2
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..2a9acf1
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal = 1
diff --git a/setup.py b/setup.py
index a7d8dbe..51b1d62 100755
--- a/setup.py
+++ b/setup.py
@@ -1,12 +1,7 @@
#!/usr/bin/env python
-import sys
-
from setuptools import setup, find_packages
-install_requires = []
-if sys.version_info < (2, 7):
- install_requires.append('argparse')
setup(
name='yoconfigurator',
@@ -18,6 +13,21 @@ setup(
version='0.4.6',
packages=find_packages(),
scripts=['bin/configurator.py'],
- install_requires=install_requires,
test_suite='yoconfigurator.tests',
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Natural Language :: English',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Topic :: Utilities',
+ ]
)
diff --git a/yoconfigurator/base.py b/yoconfigurator/base.py
index 546ae41..f339b95 100644
--- a/yoconfigurator/base.py
+++ b/yoconfigurator/base.py
@@ -1,15 +1,26 @@
-import imp
import json
import os
+import types
import sys
from yoconfigurator.dicts import DotDict, MissingValue
+try:
+ # SourceFileLoader added in Python 3.3
+ from importlib.machinery import SourceFileLoader
+except ImportError:
+ # imp.load_source deprecated in Python 3.3
+ from imp import load_source
+ _load_module = load_source
+else:
+ def _load_module(module_name, file_name):
+ return SourceFileLoader(module_name, file_name).load_module()
+
class DetectMissingEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, MissingValue):
- raise ValueError("Missing Value found in config: %s" % obj.name)
+ raise ValueError('Missing Value found in config: %s' % obj.name)
return super(DetectMissingEncoder, self).default(obj)
@@ -32,7 +43,7 @@ def get_config_module(config_pathname):
"""Imports the config file to yoconfigurator.configs.<config_basename>."""
configs_mod = 'yoconfigurator.configs'
if configs_mod not in sys.modules:
- sys.modules[configs_mod] = imp.new_module(configs_mod)
- module_name = os.path.basename(config_pathname).rsplit('.', 1)[-1]
+ sys.modules[configs_mod] = types.ModuleType(configs_mod)
+ module_name = os.path.basename(config_pathname).rsplit('.', 1)[0]
module_name = configs_mod + '.' + module_name
- return imp.load_source(module_name, config_pathname)
+ return _load_module(module_name, config_pathname)
diff --git a/yoconfigurator/credentials.py b/yoconfigurator/credentials.py
index 8ef29f9..e703222 100644
--- a/yoconfigurator/credentials.py
+++ b/yoconfigurator/credentials.py
@@ -4,5 +4,5 @@ import hashlib
def seeded_auth_token(client, service, seed):
"""Return an auth token based on the client+service+seed tuple."""
hash_func = hashlib.md5()
- hash_func.update(','.join((client, service, seed)))
+ hash_func.update(b','.join((client, service, seed)))
return hash_func.hexdigest()
diff --git a/yoconfigurator/dicts.py b/yoconfigurator/dicts.py
index 9a3e8c2..d3cfbaf 100644
--- a/yoconfigurator/dicts.py
+++ b/yoconfigurator/dicts.py
@@ -15,7 +15,7 @@ class DotDict(dict):
def __init__(self, *args, **kwargs):
super(DotDict, self).__init__(*args, **kwargs)
- for key, value in self.iteritems():
+ for key, value in self.items():
self[key] = self._convert_item(value)
def __setitem__(self, dottedkey, value):
@@ -90,10 +90,10 @@ class MissingValue(object):
self.name = name
def __getattr__(self, k):
- raise AttributeError("No value provided for %s" % self.name)
+ raise AttributeError('No value provided for %s' % self.name)
def get(self, k, default=None):
- raise KeyError("No value provided for %s" % self.name)
+ raise KeyError('No value provided for %s' % self.name)
__getitem__ = get
@@ -109,7 +109,7 @@ def merge_dicts(d1, d2, _path=None):
if _path is None:
_path = ()
if isinstance(d1, dict) and isinstance(d2, dict):
- for k, v in d2.iteritems():
+ for k, v in d2.items():
if isinstance(v, MissingValue) and v.name is None:
v.name = '.'.join(_path + (k,))
| Make it Python 3.x compatible
We'll need it for our new project (which will be run under Python 3.x) | yola/yoconfigurator | diff --git a/yoconfigurator/tests/__init__.py b/yoconfigurator/tests/__init__.py
index 68116d5..e69de29 100644
--- a/yoconfigurator/tests/__init__.py
+++ b/yoconfigurator/tests/__init__.py
@@ -1,6 +0,0 @@
-import sys
-
-if sys.version_info >= (2, 7):
- import unittest
-else:
- import unittest2 as unittest
diff --git a/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py b/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py
index ac49920..7f14363 100644
--- a/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py
+++ b/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py
@@ -6,8 +6,8 @@ from yoconfigurator.dicts import filter_dict
def filter(config):
"""The subset of configuration keys to be made public."""
keys = [
- "myapp.hello",
- "myapp.some.deeply.nested.value",
- "myapp.oz"
+ 'myapp.hello',
+ 'myapp.some.deeply.nested.value',
+ 'myapp.oz'
]
return filter_dict(config, keys)
diff --git a/yoconfigurator/tests/test_base.py b/yoconfigurator/tests/test_base.py
index 6a33f03..4aff504 100644
--- a/yoconfigurator/tests/test_base.py
+++ b/yoconfigurator/tests/test_base.py
@@ -1,13 +1,12 @@
import json
import os
import shutil
+import unittest
from tempfile import mkdtemp
from ..base import DetectMissingEncoder, read_config, write_config
from ..dicts import merge_dicts, MissingValue
-from . import unittest
-
class TestDetectMissingEncoder(unittest.TestCase):
"""Not part of the public API"""
diff --git a/yoconfigurator/tests/test_configurator.py b/yoconfigurator/tests/test_configurator.py
index b99b63b..92f3d61 100644
--- a/yoconfigurator/tests/test_configurator.py
+++ b/yoconfigurator/tests/test_configurator.py
@@ -2,8 +2,7 @@ import json
import os
import subprocess
import sys
-
-from yoconfigurator.tests import unittest
+import unittest
class TestConfigurator(unittest.TestCase):
@@ -29,7 +28,7 @@ class TestConfigurator(unittest.TestCase):
stderr=subprocess.PIPE, env=env)
out, err = p.communicate()
self.assertEqual(p.wait(), 0)
- self.assertEqual(err, '')
+ self.assertEqual(err, b'')
def tearDown(self):
os.remove(self.pub_conf)
@@ -43,24 +42,24 @@ class TestConfigurator(unittest.TestCase):
def test_creates_a_config_that_looks_as_expected(self):
expected = {
- "yoconfigurator": {
- "app": "myapp"
+ 'yoconfigurator': {
+ 'app': 'myapp'
},
- "myapp": {
- "secret": "sauce",
- "some": {
- "deeply": {
- "nested": {
- "value": "Stefano likes beer"
+ 'myapp': {
+ 'secret': 'sauce',
+ 'some': {
+ 'deeply': {
+ 'nested': {
+ 'value': 'Stefano likes beer'
}
}
},
- "hello": "world",
- "oz": {
- "bears": True,
- "tigers": True,
- "lions": True,
- "zebras": False
+ 'hello': 'world',
+ 'oz': {
+ 'bears': True,
+ 'tigers': True,
+ 'lions': True,
+ 'zebras': False
}
}
}
diff --git a/yoconfigurator/tests/test_credentials.py b/yoconfigurator/tests/test_credentials.py
index 01c8438..4daa915 100644
--- a/yoconfigurator/tests/test_credentials.py
+++ b/yoconfigurator/tests/test_credentials.py
@@ -1,9 +1,9 @@
-from . import unittest
+import unittest
from ..credentials import seeded_auth_token
class TestSeededAuthToken(unittest.TestCase):
def test_seeded_auth(self):
- self.assertEqual(seeded_auth_token('foo', 'bar', 'baz'),
+ self.assertEqual(seeded_auth_token(b'foo', b'bar', b'baz'),
'5a9350198f854de4b2ab56f187f87707')
diff --git a/yoconfigurator/tests/test_dicts.py b/yoconfigurator/tests/test_dicts.py
index 626fa77..9682991 100644
--- a/yoconfigurator/tests/test_dicts.py
+++ b/yoconfigurator/tests/test_dicts.py
@@ -1,20 +1,14 @@
import copy
+import unittest
from yoconfigurator.dicts import (DeletedValue, DotDict, MissingValue,
filter_dict, merge_dicts)
-from yoconfigurator.tests import unittest
class DotDictTestCase(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- # Python < 2.7 compatibility:
- if not hasattr(cls, 'assertIsInstance'):
- cls.assertIsInstance = lambda self, a, b: self.assertTrue(
- isinstance(a, b))
def test_create(self):
- 'ensure that we support all dict creation methods'
+ """Ensure that we support all dict creation methods"""
by_attr = DotDict(one=1, two=2)
by_dict = DotDict({'one': 1, 'two': 2})
by_list = DotDict([['one', 1], ['two', 2]])
@@ -22,30 +16,30 @@ class DotDictTestCase(unittest.TestCase):
self.assertEqual(by_attr, by_list)
def test_create_tree(self):
- 'ensure that nested dicts are converted to DotDicts'
+ """Ensure that nested dicts are converted to DotDicts"""
tree = DotDict({'foo': {'bar': True}})
self.assertIsInstance(tree['foo'], DotDict)
def test_list_of_dicts(self):
- 'ensure that nested dicts insied lists are converted to DotDicts'
+ """Ensure that nested dicts insied lists are converted to DotDicts"""
tree = DotDict({'foo': [{'bar': True}]})
self.assertIsInstance(tree['foo'][0], DotDict)
def test_mutablity(self):
- 'ensure that the whole tree is mutable'
+ """Ensure that the whole tree is mutable"""
tree = DotDict({'foo': {'bar': True}})
self.assertTrue(tree.foo.bar)
tree.foo.bar = False
self.assertFalse(tree.foo.bar)
def test_setdefault(self):
- 'ensure that the setdefault works'
+ """Ensure that the setdefault works"""
tree = DotDict({'foo': 'bar'})
tree.setdefault('baz', {})
self.assertIsInstance(tree.baz, DotDict)
def test_update(self):
- 'ensure that update works'
+ """Ensure that update works"""
tree = DotDict({'foo': 'bar'})
tree.update({'foo': {}})
self.assertIsInstance(tree.foo, DotDict)
@@ -55,17 +49,17 @@ class DotDictTestCase(unittest.TestCase):
self.assertIsInstance(tree.baz, DotDict)
def test_deepcopy(self):
- 'ensure that DotDict can be deepcopied'
+ """Ensure that DotDict can be deepcopied"""
tree = DotDict({'foo': 'bar'})
self.assertEqual(tree, copy.deepcopy(tree))
def test_get_dotted(self):
- 'ensure that DotDict can get values using a dotted key'
+ """Ensure that DotDict can get values using a dotted key"""
tree = DotDict({'foo': {'bar': {'baz': 'huzzah'}}})
self.assertEqual(tree['foo.bar.baz'], 'huzzah')
def test_set_dotted(self):
- 'ensure that DotDict can set values using a dotted key'
+ """Ensure that DotDict can set values using a dotted key"""
tree = DotDict()
tree['foo.bar.baz'] = 'huzzah'
self.assertEqual(tree['foo.bar.baz'], 'huzzah')
@@ -84,7 +78,7 @@ class TestMissingValue(unittest.TestCase):
class TestMergeDicts(unittest.TestCase):
def test_merge(self):
- 'ensure that the new entries in B are merged into A'
+ """Ensure that the new entries in B are merged into A"""
a = DotDict(a=1, b=1)
b = DotDict(c=1)
c = merge_dicts(a, b)
@@ -93,14 +87,14 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.c, 1)
def test_filter(self):
- 'ensure that the subset of A is filtered out using keys'
+ """Ensure that the subset of A is filtered out using keys"""
a = DotDict(a=1, b=1)
keys = ['a']
b = filter_dict(a, keys)
self.assertEqual(b, {'a': 1})
def test_replacement(self):
- 'ensure that the new entries in B replace equivalents in A'
+ """Ensure that the new entries in B replace equivalents in A"""
a = DotDict(a=1, b=1)
b = DotDict(b=2)
c = merge_dicts(a, b)
@@ -108,7 +102,7 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.b, 2)
def test_sub_merge(self):
- 'ensure that a subtree from B is merged with the same subtree in A'
+ """Ensure that a subtree from B is merged with the same subtree in A"""
a = DotDict(a=1, sub={'c': 1})
b = DotDict(b=2, sub={'d': 2})
c = merge_dicts(a, b)
@@ -118,7 +112,7 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.sub.d, 2)
def test_sub_replacement(self):
- 'ensure that a subtree from B is merged with the same subtree in A'
+ """Ensure that a subtree from B is merged with the same subtree in A"""
a = DotDict(a=1, sub={'c': 1})
b = DotDict(b=2, sub={'c': 2})
c = merge_dicts(a, b)
@@ -127,7 +121,7 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.sub.c, 2)
def test_replace_missing_with_dict(self):
- 'ensure that a subtree from B replaces a MissingValue in A'
+ """Ensure that a subtree from B replaces a MissingValue in A"""
a = DotDict(a=1, sub=MissingValue('sub'))
b = DotDict(b=2, sub={'c': 2})
c = merge_dicts(a, b)
@@ -136,21 +130,21 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.sub.c, 2)
def test_unnamed_missing_value(self):
- 'ensure that missing values get a name assigned'
+ """Ensure that missing values get a name assigned"""
a = DotDict()
b = DotDict(foo=MissingValue())
c = merge_dicts(a, b)
self.assertEqual(c.foo.name, 'foo')
def test_unnamed_missing_value_in_new_tree(self):
- 'ensure that missing values in new sub-trees get a name assigned'
+ """Ensure that missing values in new sub-trees get a name assigned"""
a = DotDict()
b = DotDict(foo={'bar': MissingValue()})
c = merge_dicts(a, b)
self.assertEqual(c.foo.bar.name, 'foo.bar')
def test_merge_lists(self):
- 'ensure that leaf lists are merged'
+ """Ensure that leaf lists are merged"""
a = DotDict(a=1, sub=[1, 2])
b = DotDict(b=2, sub=[3, 4])
c = merge_dicts(a, b)
@@ -159,7 +153,7 @@ class TestMergeDicts(unittest.TestCase):
self.assertEqual(c.sub, [1, 2, 3, 4])
def test_merge_incompatible(self):
- 'ensure that the merged items are of the same types'
+ """Ensure that the merged items are of the same types"""
a = DotDict(foo=42)
b = DotDict(foo='42')
self.assertRaises(TypeError, merge_dicts, a, b)
@@ -167,14 +161,14 @@ class TestMergeDicts(unittest.TestCase):
self.assertRaises(TypeError, merge_dicts, a, b)
def test_replace_none(self):
- 'ensure that None can be replaced with another type'
+ """Ensure that None can be replaced with another type"""
a = DotDict(foo=None)
b = DotDict(foo='foo')
c = merge_dicts(a, b)
self.assertEqual(c, {'foo': 'foo'})
def test_deltedvalue(self):
- 'ensure that deletedvalue deletes values'
+ """Ensure that deletedvalue deletes values"""
a = DotDict(foo=42)
b = DotDict(foo=DeletedValue())
c = merge_dicts(a, b)
diff --git a/yoconfigurator/tests/test_script.py b/yoconfigurator/tests/test_script.py
index 7207384..d7f61c3 100644
--- a/yoconfigurator/tests/test_script.py
+++ b/yoconfigurator/tests/test_script.py
@@ -1,8 +1,7 @@
import os
import subprocess
import sys
-
-from . import unittest
+import unittest
class TestScript(unittest.TestCase):
@@ -18,4 +17,4 @@ class TestScript(unittest.TestCase):
stderr=subprocess.PIPE, env=env)
out, err = p.communicate()
self.assertEqual(p.wait(), 0)
- self.assertEqual(err, '')
+ self.assertEqual(err, b'')
diff --git a/yoconfigurator/tests/test_smush.py b/yoconfigurator/tests/test_smush.py
index 42af2d1..2bb9028 100644
--- a/yoconfigurator/tests/test_smush.py
+++ b/yoconfigurator/tests/test_smush.py
@@ -1,14 +1,13 @@
import json
import os
import shutil
+import unittest
from tempfile import mkdtemp
from ..dicts import MissingValue
from ..smush import (config_sources, available_sources, smush_config,
LenientJSONEncoder)
-from . import unittest
-
class TestLenientJSONEncoder(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 6
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"unittest2",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
linecache2==1.0.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
six==1.17.0
tomli==2.2.1
traceback2==1.4.0
unittest2==1.1.0
-e git+https://github.com/yola/yoconfigurator.git@54e4dfdba21ae87e08db9621ef2c2f4fb1a9e6cf#egg=yoconfigurator
| name: yoconfigurator
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- linecache2==1.0.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
- traceback2==1.4.0
- unittest2==1.1.0
prefix: /opt/conda/envs/yoconfigurator
| [
"yoconfigurator/tests/test_base.py::TestReadWriteConfig::test_read_config",
"yoconfigurator/tests/test_base.py::TestReadWriteConfig::test_substituted_missing_value",
"yoconfigurator/tests/test_configurator.py::TestConfigurator::test_creates_a_config_json",
"yoconfigurator/tests/test_configurator.py::TestConfigurator::test_creates_a_config_that_looks_as_expected",
"yoconfigurator/tests/test_configurator.py::TestConfigurator::test_creates_a_public_config",
"yoconfigurator/tests/test_configurator.py::TestConfigurator::test_creates_a_public_config_that_does_not_contain_a_secret",
"yoconfigurator/tests/test_configurator.py::TestConfigurator::test_creates_a_public_config_that_looks_as_expected",
"yoconfigurator/tests/test_credentials.py::TestSeededAuthToken::test_seeded_auth",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_create",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_create_tree",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_deepcopy",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_get_dotted",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_list_of_dicts",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_mutablity",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_set_dotted",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_setdefault",
"yoconfigurator/tests/test_dicts.py::DotDictTestCase::test_update",
"yoconfigurator/tests/test_dicts.py::TestMissingValue::test_attribute_access",
"yoconfigurator/tests/test_dicts.py::TestMissingValue::test_dict_access",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_deltedvalue",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_filter",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_merge",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_merge_incompatible",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_merge_lists",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_replace_missing_with_dict",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_replace_none",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_replacement",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_sub_merge",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_sub_replacement",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_unnamed_missing_value",
"yoconfigurator/tests/test_dicts.py::TestMergeDicts::test_unnamed_missing_value_in_new_tree",
"yoconfigurator/tests/test_smush.py::TestSmush::test_initial",
"yoconfigurator/tests/test_smush.py::TestSmush::test_missing_value",
"yoconfigurator/tests/test_smush.py::TestSmush::test_multiple",
"yoconfigurator/tests/test_smush.py::TestSmush::test_nop",
"yoconfigurator/tests/test_smush.py::TestSmush::test_single"
]
| []
| [
"yoconfigurator/tests/test_base.py::TestDetectMissingEncoder::test_encode",
"yoconfigurator/tests/test_base.py::TestDetectMissingEncoder::test_missing",
"yoconfigurator/tests/test_base.py::TestDetectMissingEncoder::test_other",
"yoconfigurator/tests/test_base.py::TestReadWriteConfig::test_missing_value",
"yoconfigurator/tests/test_base.py::TestReadWriteConfig::test_write_config",
"yoconfigurator/tests/test_script.py::TestScript::test_help",
"yoconfigurator/tests/test_smush.py::TestLenientJSONEncoder::test_encode",
"yoconfigurator/tests/test_smush.py::TestLenientJSONEncoder::test_missing",
"yoconfigurator/tests/test_smush.py::TestLenientJSONEncoder::test_unencodable",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_available_sources",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_build_implies_local",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_build_source_order",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_local_source_order",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_multiple_config_dirs",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_override_config_dirs",
"yoconfigurator/tests/test_smush.py::TestConfigSources::test_source_order"
]
| []
| MIT License | 527 | [
"yoconfigurator/credentials.py",
"yoconfigurator/dicts.py",
"setup.py",
"yoconfigurator/base.py",
"CHANGELOG.rst",
".travis.yml",
"setup.cfg",
"requirements.txt"
]
| [
"yoconfigurator/credentials.py",
"yoconfigurator/dicts.py",
"setup.py",
"yoconfigurator/base.py",
"CHANGELOG.rst",
".travis.yml",
"setup.cfg",
"requirements.txt"
]
|
|
adamtheturtle__chroot_tasker-39 | 7b0b5a74595f65c1b615da9365dea300435181b7 | 2016-05-09 22:15:22 | 7b0b5a74595f65c1b615da9365dea300435181b7 | diff --git a/README.rst b/README.rst
index ed34f92..b95f329 100644
--- a/README.rst
+++ b/README.rst
@@ -67,8 +67,8 @@ To use ``tasker``:
# An image to download, extract and create a chroot jail in.
image_url = 'http://example.com/image.tar'
- # The image will be downloaded and extracted into the parent.
- parent = pathlib.Path(os.getcwd())
+ # The image will be downloaded and extracted into the download_path.
+ download_path = pathlib.Path(os.getcwd())
# See ``args`` at
# https://docs.python.org/2/library/subprocess.html#subprocess.Popen
@@ -77,7 +77,7 @@ To use ``tasker``:
task = Task(
image_url=image_url,
args=args,
- parent=parent,
+ download_path=download_path,
)
pid = task.process.pid
diff --git a/cli/cli.py b/cli/cli.py
index 4171093..c3e9b51 100644
--- a/cli/cli.py
+++ b/cli/cli.py
@@ -28,6 +28,6 @@ def create(image_url, args):
task = Task(
image_url=image_url,
args=args.split(),
- parent=pathlib.Path(os.getcwd()),
+ download_path=pathlib.Path(os.getcwd()),
)
print(task.process.pid)
diff --git a/tasker/tasker.py b/tasker/tasker.py
index b1c4588..3ae608f 100644
--- a/tasker/tasker.py
+++ b/tasker/tasker.py
@@ -12,14 +12,14 @@ import uuid
import pathlib
-def _create_filesystem_dir(image_url, parent):
+def _create_filesystem_dir(image_url, download_path):
"""
- Download a ``.tar`` file, extract it into ``parent`` and delete the
+ Download a ``.tar`` file, extract it into ``download_path`` and delete the
``.tar`` file.
:param str image_url: The url of a ``.tar`` file.
- :param pathlib.Path parent: The parent to extract the downloaded image
- into.
+ :param pathlib.Path download_path: The parent to extract the downloaded
+ image into.
:rtype: pathlib.Path
:returns: The path to the extracted image.
@@ -27,12 +27,12 @@ def _create_filesystem_dir(image_url, parent):
image = urllib.request.urlopen(image_url)
# Use ``image.url`` below instead of image_url in case of a redirect.
image_path = pathlib.Path(urllib.parse.urlparse(image.url).path)
- tar_file = parent.joinpath(image_path.name)
+ tar_file = download_path.joinpath(image_path.name)
with open(str(tar_file), 'wb') as tf:
tf.write(image.read())
unique_id = uuid.uuid4().hex
- filesystem_path = parent.joinpath(image_path.stem + unique_id)
+ filesystem_path = download_path.joinpath(image_path.stem + unique_id)
with tarfile.open(str(tar_file)) as tf:
tf.extractall(str(filesystem_path))
@@ -70,14 +70,14 @@ class Task(object):
A process in a chroot jail.
"""
- def __init__(self, image_url, args, parent):
+ def __init__(self, image_url, args, download_path):
"""
Create a new task.
"""
filesystem = _create_filesystem_dir(
image_url=image_url,
- parent=parent,
+ download_path=download_path,
)
self.process = _run_chroot_process(
filesystem=filesystem,
| Remove unused stdio options | adamtheturtle/chroot_tasker | diff --git a/tasker/tests/test_tasker.py b/tasker/tests/test_tasker.py
index 20cf016..90fb351 100644
--- a/tasker/tests/test_tasker.py
+++ b/tasker/tests/test_tasker.py
@@ -38,14 +38,14 @@ class TestCreateFilestystemDir(object):
def test_filesystem_dir_created(self, tmpdir):
"""
The given ``.tar`` file is downloaded and extracted to the given
- parent.
+ download path.
"""
image_url = self._create_tarfile(tmpdir=tmpdir.mkdir('server'))
client = pathlib.Path(tmpdir.mkdir('client').strpath)
extracted_filesystem = _create_filesystem_dir(
image_url=image_url,
- parent=client,
+ download_path=client,
)
assert extracted_filesystem.parent == client
@@ -60,12 +60,12 @@ class TestCreateFilestystemDir(object):
client = pathlib.Path(tmpdir.mkdir('client').strpath)
extracted_filesystem_1 = _create_filesystem_dir(
image_url=image_url,
- parent=client,
+ download_path=client,
)
extracted_filesystem_2 = _create_filesystem_dir(
image_url=image_url,
- parent=client,
+ download_path=client,
)
assert extracted_filesystem_1 != extracted_filesystem_2
@@ -79,7 +79,7 @@ class TestCreateFilestystemDir(object):
client = pathlib.Path(tmpdir.mkdir('client').strpath)
extracted_filesystem = _create_filesystem_dir(
image_url=image_url,
- parent=client,
+ download_path=client,
)
client_children = [item for item in client.iterdir()]
@@ -98,7 +98,7 @@ class TestRunChrootProcess(object):
"""
filesystem = _create_filesystem_dir(
image_url=ROOTFS_URI,
- parent=pathlib.Path(tmpdir.strpath),
+ download_path=pathlib.Path(tmpdir.strpath),
)
_run_chroot_process(
@@ -118,7 +118,7 @@ class TestRunChrootProcess(object):
"""
filesystem = _create_filesystem_dir(
image_url=ROOTFS_URI,
- parent=pathlib.Path(tmpdir.strpath),
+ download_path=pathlib.Path(tmpdir.strpath),
)
old_pids = psutil.pids()
@@ -135,7 +135,7 @@ class TestRunChrootProcess(object):
"""
filesystem = _create_filesystem_dir(
image_url=ROOTFS_URI,
- parent=pathlib.Path(tmpdir.strpath),
+ download_path=pathlib.Path(tmpdir.strpath),
)
process = _run_chroot_process(
@@ -156,7 +156,9 @@ class TestTask(object):
A task can be created which starts a new process running a given
command.
"""
- args = ['echo', '1']
- parent = pathlib.Path(tmpdir.strpath)
- task = Task(image_url=ROOTFS_URI, args=args, parent=parent)
+ task = Task(
+ image_url=ROOTFS_URI,
+ args=['echo', '1'],
+ download_path=pathlib.Path(tmpdir.strpath),
+ )
assert isinstance(task.process.pid, int)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/adamtheturtle/chroot_tasker.git@7b0b5a74595f65c1b615da9365dea300435181b7#egg=Chroot_Tasker
click==6.6
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
psutil==4.1.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: chroot_tasker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==6.6
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- psutil==4.1.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/chroot_tasker
| [
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_filesystem_dir_created",
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_multiple_filesystems",
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_image_removed",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_run_chroot_process",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_process_returned",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_default_io",
"tasker/tests/test_tasker.py::TestTask::test_create_task"
]
| []
| []
| []
| null | 528 | [
"README.rst",
"cli/cli.py",
"tasker/tasker.py"
]
| [
"README.rst",
"cli/cli.py",
"tasker/tasker.py"
]
|
|
adamtheturtle__chroot_tasker-41 | a11bb67cbc42ec7a8e05c1e501386e223d1d1e85 | 2016-05-10 14:54:33 | a11bb67cbc42ec7a8e05c1e501386e223d1d1e85 | diff --git a/README.rst b/README.rst
index b95f329..0c24344 100644
--- a/README.rst
+++ b/README.rst
@@ -36,8 +36,16 @@ One way to use this is:
.. code:: sh
- $ sudo $(which tasker) create <IMAGE_URL> "<COMMANDS>"
- 8935 # This is the PID of the new process.
+ # Root permissions are necessary to create the task.
+ $ sudo $(which tasker) create <IMAGE_URL> "sleep 100"
+ 8935 # This is the ID of the new task.
+ $ tasker health_check 8935
+ exists: True
+ status: sleeping
+ $ sudo $(which tasker) send_signal SIGINT
+ $ tasker health_check 8935
+ exists: False
+ status: None
``tasker`` downloads the image from the given ``<IMAGE_URL>`` into the current working directory.
Also in the directory, the image is untarred to create a "filesystem".
@@ -49,38 +57,42 @@ Library
``tasker`` is a Python library.
-To install ``tasker``:
+Installation
+^^^^^^^^^^^^
.. code:: sh
pip install -e .
+API
+^^^
+
To use ``tasker``:
.. code:: python
import os
import pathlib
+ import signal
from tasker.tasker import Task
- # An image to download, extract and create a chroot jail in.
- image_url = 'http://example.com/image.tar'
-
- # The image will be downloaded and extracted into the download_path.
- download_path = pathlib.Path(os.getcwd())
-
- # See ``args`` at
- # https://docs.python.org/2/library/subprocess.html#subprocess.Popen
- args = ['echo', '1']
-
task = Task(
- image_url=image_url,
- args=args,
- download_path=download_path,
+ # An image to download, extract and create a chroot jail in.
+ image_url='http://example.com/image.tar',
+ # A command to run in the extracted filesystem.
+ args=['top'],
+ # Where the image will be downloaded and extracted into.
+ download_path=pathlib.Path(os.getcwd()),
)
- pid = task.process.pid
+ task_health = task.get_health()
+ # {"running": True, "time": "0:46"}
+
+ task.send_signal(signal.SIGTERM)
+
+ task_health = task.get_health()
+ # {"running": False, "time": "0:46"}
Supported platforms
-------------------
@@ -108,7 +120,7 @@ In the Vagrant box, create a ``virtualenv``:
.. code:: sh
- mkvirtualenv -p python3.5 chroot_tasker
+ mkvirtualenv -p python3.5 tasker
Install the test dependencies:
@@ -152,4 +164,12 @@ There are at least three options for the directory in which to create the filesy
The current implementation is (2).
Ideally there would be multiple of the above, with (2) as the default.
-The issue for this is https://github.com/adamtheturtle/chroot_tasker/issues/24.
\ No newline at end of file
+The issue for this is https://github.com/adamtheturtle/chroot_tasker/issues/24.
+
+Identifiers
+^^^^^^^^^^^
+
+This uses PIDs as identifiers.
+This is not safe - PIDs get reused and so this could end up with a user manipulating the wrong process.
+This was a simple to implement strategy.
+A long term solution might be stateful and have a mapping of tasks to unique identifiers.
diff --git a/cli/cli.py b/cli/cli.py
index 5b4afaa..f5f414e 100644
--- a/cli/cli.py
+++ b/cli/cli.py
@@ -5,6 +5,7 @@ CLI for creating and interacting with tasks.
import os
import pathlib
import shlex
+from signal import Signals
import click
@@ -35,4 +36,33 @@ def create(image_url, args):
args=shlex.split(args),
download_path=pathlib.Path(os.getcwd()),
)
- print(task.process.pid)
+
+ print(task.id)
+
+
[email protected]('health_check')
[email protected]('task_id')
+def health_check(task_id):
+ """
+ Check the health of a task.
+
+ :param str task_id: The id of an existing task.
+ """
+ task = Task(existing_task=int(task_id))
+ health = task.get_health()
+ print('exists: ' + str(health['exists']))
+ print('status: ' + str(health['status']))
+
+
[email protected]('send_signal')
[email protected]('task_id')
[email protected]('signal')
+def send_signal(task_id, signal):
+ """
+ Send a signal to a process started by an existing task.
+
+ :param str task_id: The id of an existing task.
+ :param str signal: The signal to send.
+ """
+ task = Task(existing_task=int(task_id))
+ task.send_signal(Signals[signal])
diff --git a/tasker/tasker.py b/tasker/tasker.py
index b1e86c9..f475971 100644
--- a/tasker/tasker.py
+++ b/tasker/tasker.py
@@ -3,8 +3,10 @@ Create and interact with tasks in a chroot jail.
"""
import os
+import psutil
import subprocess
import tarfile
+import time
import urllib.parse
import urllib.request
import uuid
@@ -62,6 +64,12 @@ def _run_chroot_process(filesystem, args):
os.fchdir(real_root)
os.chroot(".")
os.close(real_root)
+
+ # On some platforms it seems to take some time for a process to start,
+ # even after this point. Therefore sleep for 5ms to ensure platform
+ # parity. This seems to be necessary on Travis CI hosted.
+ time.sleep(0.05)
+
return process
@@ -70,7 +78,36 @@ class Task(object):
A process in a chroot jail.
"""
- def __init__(self, image_url, args, download_path):
+ def get_health(self):
+ """
+ Get details of the task's health.
+
+ :rtype: dict
+ :returns: The task's process's status.
+ """
+ if self._process is not None:
+ try:
+ status = self._process.status()
+ return {'exists': True, 'status': status}
+ except psutil.NoSuchProcess:
+ pass
+
+ return {'exists': False, 'status': None}
+
+ def send_signal(self, signal):
+ """
+ Send a signal to the task's process.
+
+ :param int signal: The signal to send.
+ """
+ self._process.send_signal(signal)
+ try:
+ os.wait()
+ except OSError: # pragma: no cover
+ pass
+
+ def __init__(self, image_url=None, args=None, download_path=None,
+ existing_task=None):
"""
Create a new task, which is a process running inside a chroot with root
being a downloaded image's root.
@@ -78,13 +115,29 @@ class Task(object):
:param str image_url: The url of a ``.tar`` file.
:param list args: List of strings. See ``subprocess.Popen.args``.
:param pathlib.Path download_path: The parent to extract the downloaded
- image into.
+ image into.
+ :param existing_task: The id of an existing task. If this is given,
+ other parameters are ignored and no new process is started.
+
+ :ivar int id: An identifier for the task.
"""
- filesystem = _create_filesystem_dir(
- image_url=image_url,
- download_path=download_path,
- )
- self.process = _run_chroot_process(
- filesystem=filesystem,
- args=args,
- )
+ if existing_task is not None:
+ try:
+ self._process = psutil.Process(existing_task)
+ except psutil.NoSuchProcess:
+ self._process = None
+
+ self.id = existing_task
+ else:
+ filesystem = _create_filesystem_dir(
+ image_url=image_url,
+ download_path=download_path,
+ )
+
+ process = _run_chroot_process(
+ filesystem=filesystem,
+ args=args,
+ )
+
+ self.id = process.pid
+ self._process = psutil.Process(self.id)
| Library function for sending signals
Also think how this will be presented as part of the CLI. | adamtheturtle/chroot_tasker | diff --git a/cli/tests/test_cli.py b/cli/tests/test_cli.py
index c84b10c..dea2867 100644
--- a/cli/tests/test_cli.py
+++ b/cli/tests/test_cli.py
@@ -4,12 +4,11 @@ Tests for ``cli.cli``.
import os
-import psutil
-
from click.testing import CliRunner
from cli.cli import cli
from common.testtools import ROOTFS_URI
+from tasker.tasker import Task
class TestCreate(object):
@@ -26,12 +25,27 @@ class TestCreate(object):
os.chdir(tmpdir.strpath)
runner = CliRunner()
- subcommand = 'create'
- commands = 'sleep 10'
- result = runner.invoke(cli, [subcommand, ROOTFS_URI, commands])
- process = psutil.Process(int(result.output))
- cmdline = process.cmdline()
- process.kill()
-
+ commands = 'sleep 5'
+ result = runner.invoke(cli, ['create', ROOTFS_URI, commands])
assert result.exit_code == 0
- assert cmdline == commands.split()
+ task = Task(existing_task=int(result.output))
+ assert task._process.cmdline() == commands.split()
+
+ def test_send_signal_healthcheck(self, tmpdir):
+ """
+ Sending a SIGTERM signal to a task stops the process running.
+ The status before and after is relayed by the healthcheck function.
+ """
+ # Change directory to temporary directory so as not to pollute current
+ # working directory with downloaded filesystem.
+ os.chdir(tmpdir.strpath)
+
+ runner = CliRunner()
+ create = runner.invoke(cli, ['create', ROOTFS_URI, 'sleep 5'])
+ task_id = create.output
+
+ before_int = runner.invoke(cli, ['health_check', task_id])
+ assert before_int.output == 'exists: True\nstatus: sleeping\n'
+ runner.invoke(cli, ['send_signal', task_id, 'SIGINT'])
+ after_int = runner.invoke(cli, ['health_check', task_id])
+ assert after_int.output == 'exists: False\nstatus: None\n'
diff --git a/tasker/tests/test_tasker.py b/tasker/tests/test_tasker.py
index 77eb1ae..a491625 100644
--- a/tasker/tests/test_tasker.py
+++ b/tasker/tests/test_tasker.py
@@ -2,8 +2,8 @@
Tests for ``tasker.tasker``.
"""
+import signal
import tarfile
-import time
import pathlib
import psutil
@@ -106,8 +106,6 @@ class TestRunChrootProcess(object):
args=['touch', '/example.txt'],
)
- # ``touch`` takes a short time to work.
- time.sleep(0.01)
assert filesystem.joinpath('example.txt').exists()
def test_process_returned(self, tmpdir):
@@ -156,7 +154,36 @@ class TestTask(object):
"""
task = Task(
image_url=ROOTFS_URI,
- args=['echo', '1'],
+ args=['sleep', '5'],
+ download_path=pathlib.Path(tmpdir.strpath),
+ )
+
+ assert task.get_health() == {'exists': True, 'status': 'sleeping'}
+
+ def test_send_signal(self, tmpdir):
+ """
+ Sending a ``SIGINT`` signal to ``task.send_signal`` kills the child
+ process.
+ """
+ task = Task(
+ image_url=ROOTFS_URI,
+ args=['sleep', '5'],
download_path=pathlib.Path(tmpdir.strpath),
)
- assert isinstance(task.process.pid, int)
+ task.send_signal(signal.SIGINT)
+ assert task.get_health() == {'exists': False, 'status': None}
+
+ def test_existing_task(self, tmpdir):
+ """
+ It is possible to get an existing task by its id.
+ """
+ task = Task(
+ image_url=ROOTFS_URI,
+ args=['sleep', '5'],
+ download_path=pathlib.Path(tmpdir.strpath),
+ )
+
+ other_task = Task(existing_task=task.id)
+ # Interrupting one task interrupts the other, so they are the same task
+ task.send_signal(signal.SIGINT)
+ assert other_task.get_health() == {'exists': False, 'status': None}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"coveralls",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/adamtheturtle/chroot_tasker.git@a11bb67cbc42ec7a8e05c1e501386e223d1d1e85#egg=Chroot_Tasker
click==6.6
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
psutil==4.1.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: chroot_tasker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==6.6
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- psutil==4.1.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/chroot_tasker
| [
"cli/tests/test_cli.py::TestCreate::test_create_task",
"cli/tests/test_cli.py::TestCreate::test_send_signal_healthcheck",
"tasker/tests/test_tasker.py::TestTask::test_create",
"tasker/tests/test_tasker.py::TestTask::test_send_signal",
"tasker/tests/test_tasker.py::TestTask::test_existing_task"
]
| []
| [
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_filesystem_dir_created",
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_multiple_filesystems",
"tasker/tests/test_tasker.py::TestCreateFilestystemDir::test_image_removed",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_run_chroot_process",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_process_returned",
"tasker/tests/test_tasker.py::TestRunChrootProcess::test_default_io"
]
| []
| null | 529 | [
"README.rst",
"cli/cli.py",
"tasker/tasker.py"
]
| [
"README.rst",
"cli/cli.py",
"tasker/tasker.py"
]
|
|
dask__dask-1150 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | 2016-05-10 15:29:30 | 71e3e413d6e00942de3ff32a3ba378408f2648e9 | diff --git a/dask/array/random.py b/dask/array/random.py
index 71050b304..9a1b0b364 100644
--- a/dask/array/random.py
+++ b/dask/array/random.py
@@ -44,8 +44,13 @@ class RandomState(object):
self._numpy_state.seed(seed)
def _wrap(self, func, *args, **kwargs):
+ """ Wrap numpy random function to produce dask.array random function
+
+ extra_chunks should be a chunks tuple to append to the end of chunks
+ """
size = kwargs.pop('size')
chunks = kwargs.pop('chunks')
+ extra_chunks = kwargs.pop('extra_chunks', ())
if not isinstance(size, (tuple, list)):
size = (size,)
@@ -62,12 +67,13 @@ class RandomState(object):
seeds = different_seeds(len(sizes), self._numpy_state)
token = tokenize(seeds, size, chunks, args, kwargs)
name = 'da.random.{0}-{1}'.format(func.__name__, token)
- keys = product([name], *[range(len(bd)) for bd in chunks])
+ keys = product([name], *([range(len(bd)) for bd in chunks]
+ + [[0]] * len(extra_chunks)))
vals = ((_apply_random, func.__name__, seed, size, args, kwargs)
for seed, size in zip(seeds, sizes))
dsk = dict(zip(keys, vals))
- return Array(dsk, name, chunks, dtype=dtype)
+ return Array(dsk, name, chunks + extra_chunks, dtype=dtype)
@doc_wraps(np.random.RandomState.beta)
def beta(self, a, b, size=None, chunks=None):
@@ -144,7 +150,11 @@ class RandomState(object):
return self._wrap(np.random.RandomState.logseries, p,
size=size, chunks=chunks)
- # multinomial
+ @doc_wraps(np.random.RandomState.multinomial)
+ def multinomial(self, n, pvals, size=None, chunks=None):
+ return self._wrap(np.random.RandomState.multinomial, n, pvals,
+ size=size, chunks=chunks,
+ extra_chunks=((len(pvals),),))
@doc_wraps(np.random.RandomState.negative_binomial)
def negative_binomial(self, n, p, size=None, chunks=None):
@@ -295,6 +305,7 @@ laplace = _state.laplace
logistic = _state.logistic
lognormal = _state.lognormal
logseries = _state.logseries
+multinomial = _state.multinomial
negative_binomial = _state.negative_binomial
noncentral_chisquare = _state.noncentral_chisquare
noncentral_f = _state.noncentral_f
| Multinomial random generator
`dask/array/random.py` is missing a multinomial random generator (there is aplaceholder `# multinomial`).
Will dask have a multinomial random generator at some point? Does it require a significantly different approach than the other generators? | dask/dask | diff --git a/dask/array/tests/test_random.py b/dask/array/tests/test_random.py
index 855b200fd..1112a13ae 100644
--- a/dask/array/tests/test_random.py
+++ b/dask/array/tests/test_random.py
@@ -134,6 +134,7 @@ def test_random_all():
da.random.logistic(size=5, chunks=3).compute()
da.random.lognormal(size=5, chunks=3).compute()
da.random.logseries(0.5, size=5, chunks=3).compute()
+ da.random.multinomial(20, [1/6.]*6, size=5, chunks=3).compute()
da.random.negative_binomial(5, 0.5, size=5, chunks=3).compute()
da.random.noncentral_chisquare(2, 2, size=5, chunks=3).compute()
@@ -159,3 +160,11 @@ def test_random_all():
da.random.standard_gamma(2, size=5, chunks=3).compute()
da.random.standard_normal(size=5, chunks=3).compute()
da.random.standard_t(2, size=5, chunks=3).compute()
+
+
+def test_multinomial():
+ for size, chunks in [(5, 3), ((5, 4), (2, 3))]:
+ x = da.random.multinomial(20, [1/6.]*6, size=size, chunks=chunks)
+ y = np.random.multinomial(20, [1/6.]*6, size=size)
+
+ assert x.shape == y.shape == x.compute().shape
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.23.10
botocore==1.26.10
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@71e3e413d6e00942de3ff32a3ba378408f2648e9#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.9.5
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs==0.4.2
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.23.10
- botocore==1.26.10
- distributed==1.9.5
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3fs==0.4.2
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_random.py::test_random_all",
"dask/array/tests/test_random.py::test_multinomial"
]
| []
| [
"dask/array/tests/test_random.py::test_RandomState",
"dask/array/tests/test_random.py::test_concurrency",
"dask/array/tests/test_random.py::test_doc_randomstate",
"dask/array/tests/test_random.py::test_serializability",
"dask/array/tests/test_random.py::test_determinisim_through_dask_values",
"dask/array/tests/test_random.py::test_randomstate_consistent_names",
"dask/array/tests/test_random.py::test_random",
"dask/array/tests/test_random.py::test_parametrized_random_function",
"dask/array/tests/test_random.py::test_kwargs",
"dask/array/tests/test_random.py::test_unique_names",
"dask/array/tests/test_random.py::test_docs",
"dask/array/tests/test_random.py::test_can_make_really_big_random_array",
"dask/array/tests/test_random.py::test_random_seed"
]
| []
| BSD 3-Clause "New" or "Revised" License | 530 | [
"dask/array/random.py"
]
| [
"dask/array/random.py"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.