instance_id
stringlengths 10
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2014-04-30 14:58:36
2025-04-30 20:14:11
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
273k
| patch
stringlengths 251
7.06M
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 231
997k
| meta
dict | version
stringclasses 851
values | install_config
dict | requirements
stringlengths 93
34.2k
⌀ | environment
stringlengths 760
20.5k
⌀ | FAIL_TO_PASS
listlengths 1
9.39k
| FAIL_TO_FAIL
listlengths 0
2.69k
| PASS_TO_PASS
listlengths 0
7.87k
| PASS_TO_FAIL
listlengths 0
192
| license_name
stringclasses 55
values | __index_level_0__
int64 0
21.4k
| before_filepaths
listlengths 1
105
| after_filepaths
listlengths 1
105
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cdent__gabbi-167 | 2bd1c803b71bd26be7822be87e249a811467b863 | 2016-09-02 12:17:35 | 2bd1c803b71bd26be7822be87e249a811467b863 | diff --git a/gabbi/handlers.py b/gabbi/handlers.py
index e4ed7f5..5302f3e 100644
--- a/gabbi/handlers.py
+++ b/gabbi/handlers.py
@@ -110,6 +110,8 @@ class JSONResponseHandler(ResponseHandler):
if (hasattr(expected, 'startswith') and expected.startswith('/')
and expected.endswith('/')):
expected = expected.strip('/').rstrip('/')
+ # match may be a number so stringify
+ match = str(match)
test.assertRegexpMatches(
match, expected,
'Expect jsonpath %s to match /%s/, got %s' %
| regex matching in JSONResponseHandler doesn't deal with numbers
If the match is a number, the regex matching errors out. We need to cast to a string before doing the regex test. | cdent/gabbi | diff --git a/gabbi/tests/gabbits_intercept/regex.yaml b/gabbi/tests/gabbits_intercept/regex.yaml
index 4416be9..9a0c055 100644
--- a/gabbi/tests/gabbits_intercept/regex.yaml
+++ b/gabbi/tests/gabbits_intercept/regex.yaml
@@ -14,6 +14,8 @@ tests:
data:
alpha: cow
beta: pig
+ gamma: 1
response_json_paths:
$.alpha: /ow$/
$.beta: /(?!cow).*/
+ $.gamma: /\d+/
diff --git a/gabbi/tests/test_handlers.py b/gabbi/tests/test_handlers.py
index 67bd30c..3f8b72f 100644
--- a/gabbi/tests/test_handlers.py
+++ b/gabbi/tests/test_handlers.py
@@ -128,6 +128,34 @@ class HandlersTest(unittest.TestCase):
with self.assertRaises(AssertionError):
self._assert_handler(handler)
+ def test_response_json_paths_regex(self):
+ handler = handlers.JSONResponseHandler(self.test_class)
+ self.test.content_type = "application/json"
+ self.test.test_data = {'response_json_paths': {
+ '$.objects[0].name': '/ow/',
+ }}
+ self.test.json_data = {
+ 'objects': [{'name': 'cow',
+ 'location': 'barn'},
+ {'name': 'chris',
+ 'location': 'house'}]
+ }
+ self._assert_handler(handler)
+
+ def test_response_json_paths_regex_number(self):
+ handler = handlers.JSONResponseHandler(self.test_class)
+ self.test.content_type = "application/json"
+ self.test.test_data = {'response_json_paths': {
+ '$.objects[0].name': '/\d+/',
+ }}
+ self.test.json_data = {
+ 'objects': [{'name': 99,
+ 'location': 'barn'},
+ {'name': 'chris',
+ 'location': 'house'}]
+ }
+ self._assert_handler(handler)
+
def test_response_headers(self):
handler = handlers.HeadersResponseHandler(self.test_class)
self.test.response = {'content-type': 'text/plain'}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.24 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"testrepository",
"coverage",
"hacking",
"sphinx"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
colorama==0.4.5
coverage==6.2
decorator==5.1.1
docutils==0.18.1
extras==1.0.0
fixtures==4.0.1
flake8==3.8.4
-e git+https://github.com/cdent/gabbi.git@2bd1c803b71bd26be7822be87e249a811467b863#egg=gabbi
hacking==4.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
MarkupSafe==2.0.1
mccabe==0.6.1
mock==5.2.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testrepository==0.0.21
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- colorama==0.4.5
- coverage==6.2
- decorator==5.1.1
- docutils==0.18.1
- extras==1.0.0
- fixtures==4.0.1
- flake8==3.8.4
- hacking==4.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- markupsafe==2.0.1
- mccabe==0.6.1
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pycodestyle==2.6.0
- pyflakes==2.2.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testrepository==0.0.21
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_regex_number"
]
| []
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_resonse_headers_stringify",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_header",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_regex",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_path",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_regex",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_output",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_payload"
]
| []
| Apache License 2.0 | 741 | [
"gabbi/handlers.py"
]
| [
"gabbi/handlers.py"
]
|
|
napjon__krisk-39 | b7489f45df16b6805b2f576d696dabc1a3bc5235 | 2016-09-02 13:48:46 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/39?src=pr) is 91.69% (diff: 0.00%)
> Merging [#39](https://codecov.io/gh/napjon/krisk/pull/39?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will decrease coverage by **1.95%**
```diff
@@ 0.2-develop #39 diff @@
=============================================
Files 10 10
Lines 299 301 +2
Methods 0 0
Messages 0 0
Branches 39 40 +1
=============================================
- Hits 280 276 -4
- Misses 9 13 +4
- Partials 10 12 +2
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [b7489f4...71201fa](https://codecov.io/gh/napjon/krisk/compare/b7489f45df16b6805b2f576d696dabc1a3bc5235...71201faa3b06c668a6422c677f3fb64dad2253a7?src=pr) | diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index 14e24b8..e702c4a 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -8,7 +8,8 @@ def bar(df,
how='count',
stacked=False,
annotate=None,
- full=False):
+ full=False,
+ trendline=False):
"""
Parameters
----------
@@ -26,10 +27,13 @@ def bar(df,
stacked: Boolean, default to False.
Whether to stacked category on top of the other categories.
annotate: string, {'all',True} default to None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ if True, annotate value on top of the plot element. If stacked is also True, annotate the
+ last category. if 'all' and stacked, annotate all category
full: boolean, default to False.
If true, set to full area stacked chart. Only work if stacked is True.
+ trendline: boolean, default to False.
+ If true, add line that connected the bars. Only work if not category, category but stacked,
+ or not full.
Returns
-------
@@ -39,6 +43,7 @@ def bar(df,
# TODO: add optional argument trendline
return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
+ trendline=trendline,
annotate='top' if annotate == True else annotate)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index 057ed1d..1101415 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -51,6 +51,20 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
if kwargs['annotate'] == 'top':
series[-1]['label'] = d_annotate
+ if kwargs['type'] == 'bar' and kwargs['trendline']:
+ trendline = {'name':'trendline', 'type': 'line'}
+
+ if c and kwargs['stacked']:
+ trendline['data'] = [0] * len(series[-1]['data'])
+ trendline['stack'] = c
+ elif c is None:
+ trendline['data'] = series[0]['data']
+ else:
+ raise AssertionError('Trendline must either stacked category, or not category')
+
+ series.append(trendline)
+
+
# TODO: make annotate receive all kinds supported in echarts.
| Add trendline parameters for bar chart
Add trendline that shows changes for bar chart | napjon/krisk | diff --git a/krisk/tests/data/bar_year_pop_mean_continent_trendline.json b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
new file mode 100644
index 0000000..89aa040
--- /dev/null
+++ b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
@@ -0,0 +1,152 @@
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "title": {
+ "text": ""
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 9279525,
+ 10270856,
+ 11000948,
+ 12760499,
+ 14760787,
+ 17152804,
+ 20033753,
+ 23254956,
+ 26298373,
+ 29072015,
+ 31287142,
+ 33333216
+ ],
+ "name": "Africa"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 17876956,
+ 19610538,
+ 21283783,
+ 22934225,
+ 24779799,
+ 26983828,
+ 29341374,
+ 31620918,
+ 33958947,
+ 36203463,
+ 38331121,
+ 40301927
+ ],
+ "name": "Americas"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 8425333,
+ 9240934,
+ 10267083,
+ 11537966,
+ 13079460,
+ 14880372,
+ 12881816,
+ 13867957,
+ 16317921,
+ 22227415,
+ 25268405,
+ 31889923
+ ],
+ "name": "Asia"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 1282697,
+ 1476505,
+ 1728137,
+ 1984060,
+ 2263554,
+ 2509048,
+ 2780097,
+ 3075321,
+ 3326498,
+ 3428038,
+ 3508512,
+ 3600523
+ ],
+ "name": "Europe"
+ },
+ {
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 8691212,
+ 9712569,
+ 10794968,
+ 11872264,
+ 13177000,
+ 14074100,
+ 15184200,
+ 16257249,
+ 17481977,
+ 18565243,
+ 19546792,
+ 20434176
+ ],
+ "name": "Oceania"
+ },
+ {
+ "stack": "continent",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0
+ ],
+ "type": "line",
+ "name": "trendline"
+ }
+ ],
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/bar_year_pop_mean_trendline.json b/krisk/tests/data/bar_year_pop_mean_trendline.json
new file mode 100644
index 0000000..15ef467
--- /dev/null
+++ b/krisk/tests/data/bar_year_pop_mean_trendline.json
@@ -0,0 +1,68 @@
+{
+ "legend": {
+ "data": []
+ },
+ "title": {
+ "text": ""
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "type": "bar",
+ "data": [
+ 9111144.6,
+ 10062280.4,
+ 11014983.8,
+ 12217802.8,
+ 13612120.0,
+ 15120030.4,
+ 16044248.0,
+ 17615280.2,
+ 19476743.2,
+ 21899234.8,
+ 23588394.4,
+ 25911953.0
+ ],
+ "name": "year"
+ },
+ {
+ "data": [
+ 9111144.6,
+ 10062280.4,
+ 11014983.8,
+ 12217802.8,
+ 13612120.0,
+ 15120030.4,
+ 16044248.0,
+ 17615280.2,
+ 19476743.2,
+ 21899234.8,
+ 23588394.4,
+ 25911953.0
+ ],
+ "type": "line",
+ "name": "trendline"
+ }
+ ],
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 41aed67..bdce2e5 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -77,6 +77,20 @@ def test_full_bar_line(gapminder):
assert bar.option == line.option == true_option
+
+def test_trendline(gapminder):
+
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
+
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
+
+ try:
+ kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
+ except AssertionError:
+ pass
+
def test_hist(gapminder):
true_option = read_option_tests('hist_x.json')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@b7489f45df16b6805b2f576d696dabc1a3bc5235#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_trendline"
]
| []
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 742 | [
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
| [
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
|
falconry__falcon-889 | 18beacf2a84720d5c4de071122eeba3fe5a7f83f | 2016-09-06 04:20:39 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/889?src=pr) is 99.89% (diff: 100%)
> Merging [#889](https://codecov.io/gh/falconry/falcon/pull/889?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will decrease coverage by **0.10%**
```diff
@@ master #889 diff @@
========================================
Files 30 30
Lines 1897 1905 +8
Methods 0 0
Messages 0 0
Branches 314 314
========================================
+ Hits 1897 1903 +6
- Misses 0 2 +2
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [d9d1aed...c8c3ac0](https://codecov.io/gh/falconry/falcon/compare/d9d1aed01ef5e85d3164bd0e6b83f52293647c58...c8c3ac08eac979f469330a6b0baf593526c24106?src=pr)
jmvrbanac: :+1:
fxfitz: LGTM2!!! | diff --git a/docs/api/routing.rst b/docs/api/routing.rst
index c0ce2d2..f579920 100644
--- a/docs/api/routing.rst
+++ b/docs/api/routing.rst
@@ -24,14 +24,15 @@ A custom router is any class that implements the following interface:
"""
def find(self, uri):
- """Search for a route that matches the given URI.
+ """Search for a route that matches the given partial URI.
Args:
- uri (str): Request URI to match to a route.
+ uri(str): The requested path to route
Returns:
- tuple: A 3-member tuple composed of (resource, method_map, params)
- or ``None`` if no route is found.
+ tuple: A 4-member tuple composed of (resource, method_map,
+ params, uri_template), or ``None`` if no route matches
+ the requested path
"""
A custom routing engine may be specified when instantiating
diff --git a/falcon/api.py b/falcon/api.py
index e1e985a..8d62173 100644
--- a/falcon/api.py
+++ b/falcon/api.py
@@ -189,7 +189,7 @@ class API(object):
# next-hop child resource. In that case, the object
# being asked to dispatch to its child will raise an
# HTTP exception signalling the problem, e.g. a 404.
- responder, params, resource = self._get_responder(req)
+ responder, params, resource, req.uri_template = self._get_responder(req)
except Exception as ex:
if not self._handle_exception(ex, req, resp, params):
raise
@@ -508,11 +508,18 @@ class API(object):
path = req.path
method = req.method
+ uri_template = None
route = self._router.find(path)
if route is not None:
- resource, method_map, params = route
+ try:
+ resource, method_map, params, uri_template = route
+ except ValueError:
+ # NOTE(kgriffs): Older routers may not return the
+ # template. But for performance reasons they should at
+ # least return None if they don't support it.
+ resource, method_map, params = route
else:
# NOTE(kgriffs): Older routers may indicate that no route
# was found by returning (None, None, None). Therefore, we
@@ -538,7 +545,7 @@ class API(object):
else:
responder = falcon.responders.path_not_found
- return (responder, params, resource)
+ return (responder, params, resource, uri_template)
def _compose_status_response(self, req, resp, http_status):
"""Composes a response for the given HTTPStatus instance."""
diff --git a/falcon/request.py b/falcon/request.py
index b063321..851898c 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -68,6 +68,8 @@ class Request(object):
Args:
env (dict): A WSGI environment dict passed in from the server. See
also PEP-3333.
+
+ Keyword Arguments
options (dict): Set of global options passed from the API handler.
Attributes:
@@ -140,7 +142,6 @@ class Request(object):
opposed to a class), the function is called like a method of
the current Request instance. Therefore the first argument is
the Request instance itself (self).
-
uri (str): The fully-qualified URI for the request.
url (str): alias for `uri`.
relative_uri (str): The path + query string portion of the full URI.
@@ -148,6 +149,13 @@ class Request(object):
string).
query_string (str): Query string portion of the request URL, without
the preceding '?' character.
+ uri_template (str): The template for the route that was matched for
+ this request. May be ``None`` if the request has not yet been
+ routed, as would be the case for `process_request()` middleware
+ methods. May also be ``None`` if your app uses a custom routing
+ engine and the engine does not provide the URI template when
+ resolving a route.
+
user_agent (str): Value of the User-Agent header, or ``None`` if the
header is missing.
accept (str): Value of the Accept header, or '*/*' if the header is
@@ -244,6 +252,7 @@ class Request(object):
'_cookies',
'_cached_access_route',
'__dict__',
+ 'uri_template',
)
# Child classes may override this
@@ -259,6 +268,8 @@ class Request(object):
self.stream = env['wsgi.input']
self.method = env['REQUEST_METHOD']
+ self.uri_template = None
+
# Normalize path
path = env['PATH_INFO']
if path:
diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py
index 395dc72..32becd8 100644
--- a/falcon/routing/compiled.py
+++ b/falcon/routing/compiled.py
@@ -43,7 +43,15 @@ class CompiledRouter(object):
self._return_values = None
def add_route(self, uri_template, method_map, resource):
- """Adds a route between URI path template and resource."""
+ """Adds a route between a URI path template and a resource.
+
+ Args:
+ uri_template (str): A URI template to use for the route
+ method_map (dict): A mapping of HTTP methods (e.g., 'GET',
+ 'POST') to methods of a resource object.
+ resource (object): The resource instance to associate with
+ the URI template.
+ """
if re.search('\s', uri_template):
raise ValueError('URI templates may not include whitespace.')
@@ -67,6 +75,7 @@ class CompiledRouter(object):
# NOTE(kgriffs): Override previous node
node.method_map = method_map
node.resource = resource
+ node.uri_template = uri_template
else:
insert(node.children, path_index)
@@ -85,6 +94,7 @@ class CompiledRouter(object):
if path_index == len(path) - 1:
new_node.method_map = method_map
new_node.resource = resource
+ new_node.uri_template = uri_template
else:
insert(new_node.children, path_index + 1)
@@ -92,13 +102,23 @@ class CompiledRouter(object):
self._find = self._compile()
def find(self, uri):
- """Finds resource and method map for a URI, or returns None."""
+ """Search for a route that matches the given partial URI.
+
+ Args:
+ uri(str): The requested path to route
+
+ Returns:
+ tuple: A 4-member tuple composed of (resource, method_map,
+ params, uri_template), or ``None`` if no route matches
+ the requested path
+ """
+
path = uri.lstrip('/').split('/')
params = {}
node = self._find(path, self._return_values, self._expressions, params)
if node is not None:
- return node.resource, node.method_map, params
+ return node.resource, node.method_map, params, node.uri_template
else:
return None
@@ -234,12 +254,14 @@ class CompiledRouterNode(object):
_regex_vars = re.compile('{([-_a-zA-Z0-9]+)}')
- def __init__(self, raw_segment, method_map=None, resource=None):
+ def __init__(self, raw_segment,
+ method_map=None, resource=None, uri_template=None):
self.children = []
self.raw_segment = raw_segment
self.method_map = method_map
self.resource = resource
+ self.uri_template = uri_template
self.is_var = False
self.is_complex = False
| Expose routing metadata
Per a [discussion][1] on the mailing list, it can be useful to introspect the URI template that is associated with a given resource. One way to implement this would be to check for an attribute and fill it in as part of the `add_route` logic. For example:
```python
class Thing(object):
def __init__(self):
self.uri_templates = []
def on_get(self, req, resp):
pass
# ...
def add_route(self, uri_template, resource):
# ...
if hasattr(resource, 'uri_templates'):
resource.uri_templates.append(uri_template)
# ...
```
We could also do this:
```python
if hasattr(resource, 'uri_templates'):
resource.uri_templates.append(uri_template)
else:
resource.uri_templates = [uri_template]
```
But that would break in the case that `Thing` defined `__slots__`.
Another option would be to store the URI template in the routing tree and then set an attribute on each Request instance:
```python
def __call__(self, env, start_response):
# ...
responder, params, resource, uri_template = self._get_responder(req)
req = self._request_type(env, options=self.req_options,
uri_template=uri_template)
# ...
```
Either way, this would provide a way for middleware and hooks to key off the URI template to perform additional actions.
[1]: http://librelist.com/browser//falcon/2015/7/28/getting-the-url-template-when-servicing-a-request/ | falconry/falcon | diff --git a/tests/test_custom_router.py b/tests/test_custom_router.py
index cbe1b48..c425ea8 100644
--- a/tests/test_custom_router.py
+++ b/tests/test_custom_router.py
@@ -21,14 +21,20 @@ class TestCustomRouter(testing.TestBase):
def test_custom_router_find_should_be_used(self):
def resource(req, resp, **kwargs):
- resp.body = '{"status": "ok"}'
+ resp.body = '{{"uri_template": "{0}"}}'.format(req.uri_template)
class CustomRouter(object):
def __init__(self):
self.reached_backwards_compat = False
def find(self, uri):
- if uri == '/test':
+ if uri == '/test/42':
+ return resource, {'GET': resource}, {}, '/test/{id}'
+
+ if uri == '/test/42/no-uri-template':
+ return resource, {'GET': resource}, {}, None
+
+ if uri == '/test/42/uri-template/backwards-compat':
return resource, {'GET': resource}, {}
if uri == '/404/backwards-compat':
@@ -39,8 +45,14 @@ class TestCustomRouter(testing.TestBase):
router = CustomRouter()
self.api = falcon.API(router=router)
- body = self.simulate_request('/test')
- self.assertEqual(body, [b'{"status": "ok"}'])
+ body = self.simulate_request('/test/42')
+ self.assertEqual(body, [b'{"uri_template": "/test/{id}"}'])
+
+ body = self.simulate_request('/test/42/no-uri-template')
+ self.assertEqual(body, [b'{"uri_template": "None"}'])
+
+ body = self.simulate_request('/test/42/uri-template/backwards-compat')
+ self.assertEqual(body, [b'{"uri_template": "None"}'])
for uri in ('/404', '/404/backwards-compat'):
body = self.simulate_request(uri)
diff --git a/tests/test_default_router.py b/tests/test_default_router.py
index 9f3c6d5..28ee2e5 100644
--- a/tests/test_default_router.py
+++ b/tests/test_default_router.py
@@ -24,18 +24,18 @@ class TestRegressionCases(testing.TestBase):
def test_versioned_url(self):
self.router.add_route('/{version}/messages', {}, ResourceWithId(2))
- resource, method_map, params = self.router.find('/v2/messages')
+ resource, __, __, __ = self.router.find('/v2/messages')
self.assertEqual(resource.resource_id, 2)
self.router.add_route('/v2', {}, ResourceWithId(1))
- resource, method_map, params = self.router.find('/v2')
+ resource, __, __, __ = self.router.find('/v2')
self.assertEqual(resource.resource_id, 1)
- resource, method_map, params = self.router.find('/v2/messages')
+ resource, __, __, __ = self.router.find('/v2/messages')
self.assertEqual(resource.resource_id, 2)
- resource, method_map, params = self.router.find('/v1/messages')
+ resource, __, __, __ = self.router.find('/v1/messages')
self.assertEqual(resource.resource_id, 2)
route = self.router.find('/v1')
@@ -47,10 +47,10 @@ class TestRegressionCases(testing.TestBase):
self.router.add_route(
'/recipes/baking', {}, ResourceWithId(2))
- resource, method_map, params = self.router.find('/recipes/baking/4242')
+ resource, __, __, __ = self.router.find('/recipes/baking/4242')
self.assertEqual(resource.resource_id, 1)
- resource, method_map, params = self.router.find('/recipes/baking')
+ resource, __, __, __ = self.router.find('/recipes/baking')
self.assertEqual(resource.resource_id, 2)
route = self.router.find('/recipes/grilling')
@@ -165,20 +165,20 @@ class TestComplexRouting(testing.TestBase):
def test_override(self):
self.router.add_route('/emojis/signs/0', {}, ResourceWithId(-1))
- resource, method_map, params = self.router.find('/emojis/signs/0')
+ resource, __, __, __ = self.router.find('/emojis/signs/0')
self.assertEqual(resource.resource_id, -1)
def test_literal_segment(self):
- resource, method_map, params = self.router.find('/emojis/signs/0')
+ resource, __, __, __ = self.router.find('/emojis/signs/0')
self.assertEqual(resource.resource_id, 12)
- resource, method_map, params = self.router.find('/emojis/signs/1')
+ resource, __, __, __ = self.router.find('/emojis/signs/1')
self.assertEqual(resource.resource_id, 13)
- resource, method_map, params = self.router.find('/emojis/signs/42')
+ resource, __, __, __ = self.router.find('/emojis/signs/42')
self.assertEqual(resource.resource_id, 14)
- resource, method_map, params = self.router.find('/emojis/signs/42/small')
+ resource, __, __, __ = self.router.find('/emojis/signs/42/small')
self.assertEqual(resource.resource_id, 14.1)
route = self.router.find('/emojis/signs/1/small')
@@ -204,18 +204,18 @@ class TestComplexRouting(testing.TestBase):
self.assertIs(route, None)
def test_literal(self):
- resource, method_map, params = self.router.find('/user/memberships')
+ resource, __, __, __ = self.router.find('/user/memberships')
self.assertEqual(resource.resource_id, 8)
def test_variable(self):
- resource, method_map, params = self.router.find('/teams/42')
+ resource, __, params, __ = self.router.find('/teams/42')
self.assertEqual(resource.resource_id, 6)
self.assertEqual(params, {'id': '42'})
- resource, method_map, params = self.router.find('/emojis/signs/stop')
+ __, __, params, __ = self.router.find('/emojis/signs/stop')
self.assertEqual(params, {'id': 'stop'})
- resource, method_map, params = self.router.find('/gists/42/raw')
+ __, __, params, __ = self.router.find('/gists/42/raw')
self.assertEqual(params, {'id': '42'})
@ddt.data(
@@ -232,7 +232,7 @@ class TestComplexRouting(testing.TestBase):
)
@ddt.unpack
def test_literal_vs_variable(self, path, expected_id):
- resource, method_map, params = self.router.find(path)
+ resource, __, __, __ = self.router.find(path)
self.assertEqual(resource.resource_id, expected_id)
@ddt.data(
@@ -271,12 +271,12 @@ class TestComplexRouting(testing.TestBase):
self.assertIs(route, None)
def test_multivar(self):
- resource, method_map, params = self.router.find(
+ resource, __, params, __ = self.router.find(
'/repos/racker/falcon/commits')
self.assertEqual(resource.resource_id, 4)
self.assertEqual(params, {'org': 'racker', 'repo': 'falcon'})
- resource, method_map, params = self.router.find(
+ resource, __, params, __ = self.router.find(
'/repos/racker/falcon/compare/all')
self.assertEqual(resource.resource_id, 11)
self.assertEqual(params, {'org': 'racker', 'repo': 'falcon'})
@@ -285,7 +285,7 @@ class TestComplexRouting(testing.TestBase):
@ddt.unpack
def test_complex(self, url_postfix, resource_id):
uri = '/repos/racker/falcon/compare/johndoe:master...janedoe:dev'
- resource, method_map, params = self.router.find(uri + url_postfix)
+ resource, __, params, __ = self.router.find(uri + url_postfix)
self.assertEqual(resource.resource_id, resource_id)
self.assertEqual(params, {
@@ -297,11 +297,14 @@ class TestComplexRouting(testing.TestBase):
'branch1': 'dev',
})
- @ddt.data(('', 16), ('/full', 17))
+ @ddt.data(
+ ('', 16, '/repos/{org}/{repo}/compare/{usr0}:{branch0}'),
+ ('/full', 17, '/repos/{org}/{repo}/compare/{usr0}:{branch0}/full')
+ )
@ddt.unpack
- def test_complex_alt(self, url_postfix, resource_id):
- uri = '/repos/falconry/falcon/compare/johndoe:master'
- resource, method_map, params = self.router.find(uri + url_postfix)
+ def test_complex_alt(self, url_postfix, resource_id, expected_template):
+ uri = '/repos/falconry/falcon/compare/johndoe:master' + url_postfix
+ resource, __, params, uri_template = self.router.find(uri)
self.assertEqual(resource.resource_id, resource_id)
self.assertEqual(params, {
@@ -310,3 +313,4 @@ class TestComplexRouting(testing.TestBase):
'usr0': 'johndoe',
'branch0': 'master',
})
+ self.assertEqual(uri_template, expected_template)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
ddt==1.7.2
exceptiongroup==1.2.2
-e git+https://github.com/falconry/falcon.git@18beacf2a84720d5c4de071122eeba3fe5a7f83f#egg=falcon
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
python-mimeparse==2.0.0
PyYAML==6.0.2
requests==2.32.3
six==1.17.0
testtools==2.7.2
tomli==2.2.1
urllib3==2.3.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- ddt==1.7.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-mimeparse==2.0.0
- pyyaml==6.0.2
- requests==2.32.3
- six==1.17.0
- testtools==2.7.2
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_custom_router.py::TestCustomRouter::test_custom_router_find_should_be_used",
"tests/test_default_router.py::TestRegressionCases::test_recipes",
"tests/test_default_router.py::TestRegressionCases::test_versioned_url",
"tests/test_default_router.py::TestComplexRouting::test_complex_1______5_",
"tests/test_default_router.py::TestComplexRouting::test_complex_2____full___10_",
"tests/test_default_router.py::TestComplexRouting::test_complex_3____part___15_",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_1______16____repos__org___repo__compare__usr0___branch0___",
"tests/test_default_router.py::TestComplexRouting::test_complex_alt_2____full___17____repos__org___repo__compare__usr0___branch0__full__",
"tests/test_default_router.py::TestComplexRouting::test_literal",
"tests/test_default_router.py::TestComplexRouting::test_literal_segment",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_01____teams_default___19_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_02____teams_default_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_03____teams_foo___6_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_04____teams_foo_members___7_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_05____gists_first___20_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_06____gists_first_raw___18_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_07____gists_first_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_08____gists_1776_pdf___21_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_09____emojis_signs_78___13_",
"tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_10____emojis_signs_78_small___22_",
"tests/test_default_router.py::TestComplexRouting::test_multivar",
"tests/test_default_router.py::TestComplexRouting::test_override",
"tests/test_default_router.py::TestComplexRouting::test_variable"
]
| []
| [
"tests/test_custom_router.py::TestCustomRouter::test_can_pass_additional_params_to_add_route",
"tests/test_custom_router.py::TestCustomRouter::test_custom_router_add_route_should_be_used",
"tests/test_default_router.py::TestComplexRouting::test_collision_1__teams__collision_",
"tests/test_default_router.py::TestComplexRouting::test_collision_2__emojis_signs__id_too_",
"tests/test_default_router.py::TestComplexRouting::test_collision_3__repos__org___repo__compare__complex___vs_____complex2___collision_",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_1__teams",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_2__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_3__gists",
"tests/test_default_router.py::TestComplexRouting::test_dead_segment_4__gists_42",
"tests/test_default_router.py::TestComplexRouting::test_dump",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_1____",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_2___9v_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_3____kgriffs_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_4__repos__simple_thing__etc",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_5__repos__or_g___repo__compare__thing_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_6__repos__org___repo__compare___",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_7__repos__complex______thing_",
"tests/test_default_router.py::TestComplexRouting::test_invalid_field_name_8__repos__complex___9v___thing__etc",
"tests/test_default_router.py::TestComplexRouting::test_malformed_pattern",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_1__repos__org___repo__compare__simple_vs_complex_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_2__repos__complex___vs___simple_",
"tests/test_default_router.py::TestComplexRouting::test_non_collision_3__repos__org___repo__compare__complex___vs_____complex2__full",
"tests/test_default_router.py::TestComplexRouting::test_not_found_01__this_does_not_exist",
"tests/test_default_router.py::TestComplexRouting::test_not_found_02__user_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_03__repos_racker_falcon_compare_johndoe_master___janedoe_dev_bogus",
"tests/test_default_router.py::TestComplexRouting::test_not_found_04__teams",
"tests/test_default_router.py::TestComplexRouting::test_not_found_05__teams_42_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_06__teams_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_07__teams_42_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_08__teams_default_members_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_09__teams_default_members_thing_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_10__teams_default_members_thing_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_11__teams_default_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_12__teams_default_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_13__emojis_signs",
"tests/test_default_router.py::TestComplexRouting::test_not_found_14__emojis_signs_0_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_15__emojis_signs_0_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_16__emojis_signs_0_undefined_segments",
"tests/test_default_router.py::TestComplexRouting::test_not_found_17__emojis_signs_20_small",
"tests/test_default_router.py::TestComplexRouting::test_not_found_18__emojis_signs_20_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_19__emojis_signs_42_undefined",
"tests/test_default_router.py::TestComplexRouting::test_not_found_20__emojis_signs_78_undefined",
"tests/test_default_router.py::TestComplexRouting::test_subsegment_not_found"
]
| []
| Apache License 2.0 | 743 | [
"docs/api/routing.rst",
"falcon/request.py",
"falcon/routing/compiled.py",
"falcon/api.py"
]
| [
"docs/api/routing.rst",
"falcon/request.py",
"falcon/routing/compiled.py",
"falcon/api.py"
]
|
falconry__falcon-890 | c2cb7091e28680671cb9092bf5f8a1840f6e6308 | 2016-09-06 05:35:08 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/890?src=pr) is 100% (diff: 100%)
> Merging [#890](https://codecov.io/gh/falconry/falcon/pull/890?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #890 diff @@
====================================
Files 30 30
Lines 1897 1910 +13
Methods 0 0
Messages 0 0
Branches 314 316 +2
====================================
+ Hits 1897 1910 +13
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [d9d1aed...bdae40c](https://codecov.io/gh/falconry/falcon/compare/d9d1aed01ef5e85d3164bd0e6b83f52293647c58...bdae40c242f2e4949a591973e09d7cf1a78033f8?src=pr)
jmvrbanac: LGTM :+1:
jmvrbanac: :+1: | diff --git a/docs/api/middleware.rst b/docs/api/middleware.rst
index f947b7f..b6e9036 100644
--- a/docs/api/middleware.rst
+++ b/docs/api/middleware.rst
@@ -47,7 +47,7 @@ Falcon's middleware interface is defined as follows:
method as keyword arguments.
"""
- def process_response(self, req, resp, resource):
+ def process_response(self, req, resp, resource, req_succeeded):
"""Post-processing of the response (after routing).
Args:
@@ -56,6 +56,9 @@ Falcon's middleware interface is defined as follows:
resource: Resource object to which the request was
routed. May be None if no route was found
for the request.
+ req_succeeded: True if no exceptions were raised while
+ the framework processed and routed the request;
+ otherwise False.
"""
.. Tip::
diff --git a/falcon/api.py b/falcon/api.py
index 853abb1..e1e985a 100644
--- a/falcon/api.py
+++ b/falcon/api.py
@@ -73,7 +73,7 @@ class API(object):
arguments.
\"\"\"
- def process_response(self, req, resp, resource)
+ def process_response(self, req, resp, resource, req_succeeded)
\"\"\"Post-processing of the response (after routing).
Args:
@@ -82,6 +82,9 @@ class API(object):
resource: Resource object to which the request was
routed. May be None if no route was found
for the request.
+ req_succeeded: True if no exceptions were raised
+ while the framework processed and routed the
+ request; otherwise False.
\"\"\"
See also :ref:`Middleware <middleware>`.
@@ -161,9 +164,11 @@ class API(object):
req = self._request_type(env, options=self.req_options)
resp = self._response_type()
resource = None
- mw_pr_stack = [] # Keep track of executed middleware components
params = {}
+ mw_pr_stack = [] # Keep track of executed middleware components
+ req_succeeded = False
+
try:
try:
# NOTE(ealogar): The execution of request middleware
@@ -202,6 +207,7 @@ class API(object):
process_resource(req, resp, resource, params)
responder(req, resp, **params)
+ req_succeeded = True
except Exception as ex:
if not self._handle_exception(ex, req, resp, params):
raise
@@ -217,11 +223,13 @@ class API(object):
while mw_pr_stack:
process_response = mw_pr_stack.pop()
try:
- process_response(req, resp, resource)
+ process_response(req, resp, resource, req_succeeded)
except Exception as ex:
if not self._handle_exception(ex, req, resp, params):
raise
+ req_succeeded = False
+
#
# Set status and headers
#
diff --git a/falcon/api_helpers.py b/falcon/api_helpers.py
index 9f91608..04a5979 100644
--- a/falcon/api_helpers.py
+++ b/falcon/api_helpers.py
@@ -14,6 +14,9 @@
"""Utilities for the API class."""
+from functools import wraps
+import inspect
+
from falcon import util
@@ -49,6 +52,21 @@ def prepare_middleware(middleware=None):
msg = '{0} does not implement the middleware interface'
raise TypeError(msg.format(component))
+ if process_response:
+ # NOTE(kgriffs): Shim older implementations to ensure
+ # backwards-compatibility.
+ spec = inspect.getargspec(process_response)
+
+ if len(spec.args) == 4: # (self, req, resp, resource)
+ def let(process_response=process_response):
+ @wraps(process_response)
+ def shim(req, resp, resource, req_succeeded):
+ process_response(req, resp, resource)
+
+ return shim
+
+ process_response = let()
+
prepared_middleware.append((process_request, process_resource,
process_response))
| Middleware post process error handling
When middleware is added to the api and we have a post_process on it, any exceptions thrown in the responder are not aggregated to an appropriate response. Instead, they are ignored and the post_process attempts to run on as though nothing happened.
It does not matter whether an exception handler is added to the api or not.
Let me know if I just misunderstood how to handle these | falconry/falcon | diff --git a/tests/test_middleware.py b/tests/test_middleware.py
index 652306f..9d48c4c 100644
--- a/tests/test_middleware.py
+++ b/tests/test_middleware.py
@@ -11,8 +11,11 @@ context = {'executed_methods': []}
class CaptureResponseMiddleware(object):
- def process_response(self, req, resp, resource):
+ def process_response(self, req, resp, resource, req_succeeded):
+ self.req = req
self.resp = resp
+ self.resource = resource
+ self.req_succeeded = req_succeeded
class RequestTimeMiddleware(object):
@@ -25,9 +28,10 @@ class RequestTimeMiddleware(object):
global context
context['mid_time'] = datetime.utcnow()
- def process_response(self, req, resp, resource):
+ def process_response(self, req, resp, resource, req_succeeded):
global context
context['end_time'] = datetime.utcnow()
+ context['req_succeeded'] = req_succeeded
class TransactionIdMiddleware(object):
@@ -36,6 +40,9 @@ class TransactionIdMiddleware(object):
global context
context['transaction_id'] = 'unique-req-id'
+ def process_response(self, req, resp, resource):
+ pass
+
class ExecutedFirstMiddleware(object):
@@ -49,11 +56,18 @@ class ExecutedFirstMiddleware(object):
context['executed_methods'].append(
'{0}.{1}'.format(self.__class__.__name__, 'process_resource'))
+ # NOTE(kgriffs): This also tests that the framework can continue to
+ # call process_response() methods that do not have a 'req_succeeded'
+ # arg.
def process_response(self, req, resp, resource):
global context
context['executed_methods'].append(
'{0}.{1}'.format(self.__class__.__name__, 'process_response'))
+ context['req'] = req
+ context['resp'] = resp
+ context['resource'] = resource
+
class ExecutedLastMiddleware(ExecutedFirstMiddleware):
pass
@@ -110,6 +124,7 @@ class TestRequestTimeMiddleware(TestMiddleware):
self.assertIn('start_time', context)
self.assertNotIn('mid_time', context)
self.assertIn('end_time', context)
+ self.assertFalse(context['req_succeeded'])
def test_add_invalid_middleware(self):
"""Test than an invalid class can not be added as middleware"""
@@ -147,6 +162,7 @@ class TestRequestTimeMiddleware(TestMiddleware):
body = self.simulate_json_request(self.test_route)
self.assertEqual(_EXPECTED_BODY, body)
self.assertEqual(self.srmock.status, falcon.HTTP_200)
+
self.assertIn('start_time', context)
self.assertIn('mid_time', context)
self.assertIn('end_time', context)
@@ -155,6 +171,8 @@ class TestRequestTimeMiddleware(TestMiddleware):
self.assertTrue(context['end_time'] >= context['start_time'],
'process_response not executed after request')
+ self.assertTrue(context['req_succeeded'])
+
class TestTransactionIdMiddleware(TestMiddleware):
@@ -194,6 +212,16 @@ class TestSeveralMiddlewares(TestMiddleware):
self.assertTrue(context['end_time'] >= context['start_time'],
'process_response not executed after request')
+ def test_legacy_middleware_called_with_correct_args(self):
+ global context
+ self.api = falcon.API(middleware=[ExecutedFirstMiddleware()])
+ self.api.add_route(self.test_route, MiddlewareClassResource())
+
+ self.simulate_request(self.test_route)
+ self.assertIsInstance(context['req'], falcon.Request)
+ self.assertIsInstance(context['resp'], falcon.Response)
+ self.assertIsInstance(context['resource'], MiddlewareClassResource)
+
def test_middleware_execution_order(self):
global context
self.api = falcon.API(middleware=[ExecutedFirstMiddleware(),
@@ -220,6 +248,8 @@ class TestSeveralMiddlewares(TestMiddleware):
"""Test that error in inner middleware leaves"""
global context
+ context['req_succeeded'] = []
+
class RaiseStatusMiddleware(object):
def process_response(self, req, resp, resource):
raise falcon.HTTPStatus(falcon.HTTP_201)
@@ -229,10 +259,12 @@ class TestSeveralMiddlewares(TestMiddleware):
raise falcon.HTTPError(falcon.HTTP_748)
class ProcessResponseMiddleware(object):
- def process_response(self, req, resp, resource):
+ def process_response(self, req, resp, resource, req_succeeded):
context['executed_methods'].append('process_response')
+ context['req_succeeded'].append(req_succeeded)
- self.api = falcon.API(middleware=[RaiseErrorMiddleware(),
+ self.api = falcon.API(middleware=[ProcessResponseMiddleware(),
+ RaiseErrorMiddleware(),
ProcessResponseMiddleware(),
RaiseStatusMiddleware(),
ProcessResponseMiddleware()])
@@ -242,8 +274,9 @@ class TestSeveralMiddlewares(TestMiddleware):
self.assertEqual(self.srmock.status, falcon.HTTP_748)
- expected_methods = ['process_response', 'process_response']
+ expected_methods = ['process_response'] * 3
self.assertEqual(context['executed_methods'], expected_methods)
+ self.assertEqual(context['req_succeeded'], [True, False, False])
def test_inner_mw_throw_exception(self):
"""Test that error in inner middleware leaves"""
@@ -475,6 +508,13 @@ class TestErrorHandling(TestMiddleware):
composed_body = json.loads(self.mw.resp.body)
self.assertEqual(composed_body['title'], self.srmock.status)
+ self.assertFalse(self.mw.req_succeeded)
+
+ # NOTE(kgriffs): Sanity-check the other params passed to
+ # process_response()
+ self.assertIsInstance(self.mw.req, falcon.Request)
+ self.assertIsInstance(self.mw.resource, MiddlewareClassResource)
+
def test_http_status_raised_from_error_handler(self):
def _http_error_handler(error, req, resp, params):
raise falcon.HTTPStatus(falcon.HTTP_201)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"ddt",
"pytest-randomly",
"pytest-cov",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
execnet==1.9.0
-e git+https://github.com/falconry/falcon.git@c2cb7091e28680671cb9092bf5f8a1840f6e6308#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-randomly==3.10.3
pytest-xdist==3.0.2
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- execnet==1.9.0
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-randomly==3.10.3
- pytest-xdist==3.0.2
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_middleware.py::TestSeveralMiddlewares::test_multiple_reponse_mw_throw_exception",
"tests/test_middleware.py::TestSeveralMiddlewares::test_inner_mw_throw_exception",
"tests/test_middleware.py::TestSeveralMiddlewares::test_generate_trans_id_and_time_with_request",
"tests/test_middleware.py::TestSeveralMiddlewares::test_inner_mw_with_ex_handler_throw_exception",
"tests/test_middleware.py::TestRequestTimeMiddleware::test_skip_process_resource",
"tests/test_middleware.py::TestRequestTimeMiddleware::test_log_get_request",
"tests/test_middleware.py::TestErrorHandling::test_http_status_raised_from_error_handler",
"tests/test_middleware.py::TestErrorHandling::test_error_composed_before_resp_middleware_called"
]
| []
| [
"tests/test_middleware.py::TestRemoveBasePathMiddleware::test_base_path_is_removed_before_routing",
"tests/test_middleware.py::TestSeveralMiddlewares::test_middleware_execution_order",
"tests/test_middleware.py::TestSeveralMiddlewares::test_legacy_middleware_called_with_correct_args",
"tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_rsrc",
"tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_req",
"tests/test_middleware.py::TestSeveralMiddlewares::test_order_mw_executed_when_exception_in_resp",
"tests/test_middleware.py::TestSeveralMiddlewares::test_outer_mw_with_ex_handler_throw_exception",
"tests/test_middleware.py::TestTransactionIdMiddleware::test_generate_trans_id_with_request",
"tests/test_middleware.py::TestResourceMiddleware::test_can_access_resource_params",
"tests/test_middleware.py::TestRequestTimeMiddleware::test_response_middleware_raises_exception",
"tests/test_middleware.py::TestRequestTimeMiddleware::test_add_invalid_middleware"
]
| []
| Apache License 2.0 | 744 | [
"docs/api/middleware.rst",
"falcon/api.py",
"falcon/api_helpers.py"
]
| [
"docs/api/middleware.rst",
"falcon/api.py",
"falcon/api_helpers.py"
]
|
Juniper__py-junos-eznc-590 | aa2d97698f9b741413a9b04b468fe78a168ac0ad | 2016-09-06 09:42:36 | 1aa12dab9511d4e6a2636b72eab67aa45bf9fd70 | diff --git a/.dockerignore b/.dockerignore
deleted file mode 100644
index 1069b0df..00000000
--- a/.dockerignore
+++ /dev/null
@@ -1,10 +0,0 @@
-/docs/*
-/tests/*
-/vagrant/*
-INSTALL*
-COPYRIGHT
-LICENSE
-MANIFEST.md
-RELEASE-NOTES.md
-development.txt
-docreq.txt
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 11f1f000..00000000
--- a/Dockerfile
+++ /dev/null
@@ -1,30 +0,0 @@
-FROM alpine:3.4
-
-MAINTAINER [email protected]
-
-RUN mkdir /source \
- && mkdir /scripts
-
-WORKDIR /source
-
-## Copy project inside the container
-ADD setup.py setup.py
-ADD requirements.txt requirements.txt
-ADD lib lib
-
-## Install dependancies and Pyez
-RUN apk update \
- && apk upgrade \
- && apk add build-base gcc g++ make python-dev py-pip py-lxml \
- libxslt-dev libxml2-dev libffi-dev openssl-dev curl \
- && pip install -r requirements.txt \
- && apk del -r --purge gcc make g++ \
- && python setup.py install \
- && rm -rf /source/* \
- && rm -rf /var/cache/apk/*
-
-WORKDIR /scripts
-
-VOLUME ["$PWD:/scripts"]
-
-CMD sh
diff --git a/lib/jnpr/junos/device.py b/lib/jnpr/junos/device.py
index 9c335e50..c2bd3712 100644
--- a/lib/jnpr/junos/device.py
+++ b/lib/jnpr/junos/device.py
@@ -919,7 +919,7 @@ class Device(_Connection):
# protocol: operation-failed
# error: device asdf not found
# </rpc-reply>
- if rpc_rsp_e.text.strip() is not '':
+ if rpc_rsp_e.text is not None and rpc_rsp_e.text.strip() is not '':
return rpc_rsp_e
# no children, so assume it means we are OK
return True
diff --git a/lib/jnpr/junos/utils/scp.py b/lib/jnpr/junos/utils/scp.py
index 8a150d6c..adc49fd3 100644
--- a/lib/jnpr/junos/utils/scp.py
+++ b/lib/jnpr/junos/utils/scp.py
@@ -73,8 +73,11 @@ class SCP(object):
.. note:: This method uses the same username/password authentication
credentials as used by :class:`jnpr.junos.device.Device`.
- It can also use ``ssh_private_key_file`` option if provided
- to the :class:`jnpr.junos.device.Device`
+
+ .. warning:: The :class:`jnpr.junos.device.Device` ``ssh_private_key_file``
+ option is currently **not** supported.
+
+ .. todo:: add support for ``ssh_private_key_file``.
:returns: SCPClient object
"""
@@ -89,7 +92,6 @@ class SCP(object):
# through a jumphost.
config = {}
- kwargs = {}
ssh_config = getattr(junos, '_sshconf_path')
if ssh_config:
config = paramiko.SSHConfig()
@@ -99,9 +101,6 @@ class SCP(object):
if config.get("proxycommand"):
sock = paramiko.proxy.ProxyCommand(config.get("proxycommand"))
- if self._junos._ssh_private_key_file is not None:
- kwargs['key_filename']=self._junos._ssh_private_key_file
-
self._ssh.connect(hostname=junos._hostname,
port=(
22, int(
@@ -109,7 +108,7 @@ class SCP(object):
junos._hostname == 'localhost'],
username=junos._auth_user,
password=junos._auth_password,
- sock=sock, **kwargs
+ sock=sock
)
return SCPClient(self._ssh.get_transport(), **scpargs)
diff --git a/lib/jnpr/junos/utils/start_shell.py b/lib/jnpr/junos/utils/start_shell.py
index 07956c0e..67a0e1e1 100644
--- a/lib/jnpr/junos/utils/start_shell.py
+++ b/lib/jnpr/junos/utils/start_shell.py
@@ -1,7 +1,6 @@
import paramiko
from select import select
import re
-import datetime
_JUNOS_PROMPT = '> '
_SHELL_PROMPT = '(%|#)\s'
@@ -22,28 +21,20 @@ class StartShell(object):
"""
- def __init__(self, nc, timeout=30):
+ def __init__(self, nc):
"""
Utility Constructor
:param Device nc: The Device object
-
- :param int timeout:
- Timeout value in seconds to wait for expected string/pattern.
"""
self._nc = nc
- self.timeout = timeout
- def wait_for(self, this=_SHELL_PROMPT, timeout=0):
+ def wait_for(self, this=_SHELL_PROMPT):
"""
Wait for the result of the command, expecting **this** prompt.
:param str this: expected string/pattern.
- :param int timeout:
- Timeout value in seconds to wait for expected string/pattern.
- If not specified defaults to self.timeout.
-
:returns: resulting string of data in a list
:rtype: list
@@ -51,15 +42,12 @@ class StartShell(object):
"""
chan = self._chan
got = []
- timeout = timeout or self.timeout
- timeout = datetime.datetime.now()+datetime.timedelta(
- seconds=timeout)
- while timeout > datetime.datetime.now():
+ while True:
rd, wr, err = select([chan], [], [], _SELECT_WAIT)
if rd:
data = chan.recv(_RECVSZ)
if isinstance(data, bytes):
- data = data.decode('utf-8', 'replace')
+ data = data.decode('utf-8')
got.append(data)
if re.search(r'{0}\s?$'.format(this), data):
break
@@ -106,7 +94,7 @@ class StartShell(object):
self._chan.close()
self._client.close()
- def run(self, command, this=_SHELL_PROMPT, timeout=0):
+ def run(self, command, this=_SHELL_PROMPT):
"""
Run a shell command and wait for the response. The return is a
tuple. The first item is True/False if exit-code is 0. The second
@@ -114,30 +102,22 @@ class StartShell(object):
:param str command: the shell command to execute
:param str this: the exected shell-prompt to wait for
- :param int timeout:
- Timeout value in seconds to wait for expected string/pattern (this).
- If not specified defaults to self.timeout. This timeout is specific
- to individual run call.
:returns: (last_ok, result of the executed shell command (str) )
.. note:: as a *side-effect* this method will set the ``self.last_ok``
property. This property is set to ``True`` if ``$?`` is
- "0"; indicating the last shell command was successful else
- False
+ "0"; indicating the last shell command was successful.
"""
- timeout = timeout or self.timeout
# run the command and capture the output
self.send(command)
- got = ''.join(self.wait_for(this, timeout))
- self.last_ok = False
- if this != _SHELL_PROMPT:
- self.last_ok = re.search(r'{0}\s?$'.format(this), got) is not None
- elif re.search(r'{0}\s?$'.format(_SHELL_PROMPT), got) is not None:
- # use $? to get the exit code of the command
- self.send('echo $?')
- rc = ''.join(self.wait_for(_SHELL_PROMPT))
- self.last_ok = rc.find('0') > 0
+ got = ''.join(self.wait_for(this))
+
+ # use $? to get the exit code of the command
+ self.send('echo $?')
+ rc = ''.join(self.wait_for(this))
+ self.last_ok = True if rc.find('0') > 0 else False
+
return (self.last_ok, got)
# -------------------------------------------------------------------------
@@ -150,4 +130,3 @@ class StartShell(object):
def __exit__(self, exc_ty, exc_val, exc_tb):
self.close()
-
diff --git a/lib/jnpr/junos/version.py b/lib/jnpr/junos/version.py
index 55ee6d04..95a27f1a 100644
--- a/lib/jnpr/junos/version.py
+++ b/lib/jnpr/junos/version.py
@@ -1,2 +1,2 @@
-VERSION = "2.0.1"
+VERSION = "2.0.0"
DATE = "2016-Sept-01"
diff --git a/setup.py b/setup.py
index 25a7e84c..53fa4682 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@ if sys.version_info[:2] == (2, 6):
setup(
name="junos-eznc",
namespace_packages=['jnpr'],
- version="2.0.1",
+ version="2.0.0",
author="Jeremy Schulman, Nitin Kumar",
author_email="[email protected]",
description=("Junos 'EZ' automation for non-programmers"),
| Config.lock() return exception when normalize is on
>>> dev=Device('172.27.62.164',user='root',passwd='lab123',normalize=True)
>>> dev.open()
Device(172.27.62.164)
>>> cu=Config(dev)
>>> cu.lock()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/root/pyez2.0/lib/python2.7/site-packages/jnpr/junos/utils/config.py", line 467, in lock
raise LockError(rsp=JXML.remove_namespaces(err.xml))
AttributeError: 'exceptions.AttributeError' object has no attribute 'xml'
If normalize is not enabled, no problem
This problem is introduced in pyez 2.0 | Juniper/py-junos-eznc | diff --git a/tests/unit/utils/test_scp.py b/tests/unit/utils/test_scp.py
index d72571c8..c0ddd22e 100644
--- a/tests/unit/utils/test_scp.py
+++ b/tests/unit/utils/test_scp.py
@@ -99,22 +99,6 @@ class TestScp(unittest.TestCase):
self.assertEqual(mock_scpclient.mock_calls[0][2]['progress'].__name__,
'_scp_progress')
- @patch('ncclient.manager.connect')
- @patch('paramiko.SSHClient.connect')
- @patch('scp.SCPClient.put')
- @patch('scp.SCPClient.__init__')
- def test_ssh_private_key_file(self, mock_scpclient, mock_put,
- mock_sshclient, mock_ncclient):
- mock_scpclient.return_value = None
- package = 'test.tgz'
- dev = Device(host='1.1.1.1', user='user',
- ssh_private_key_file='/Users/test/testkey')
- dev.open(gather_facts=False)
- with SCP(dev) as scp:
- scp.put(package)
- self.assertEqual(mock_sshclient.mock_calls[0][2]['key_filename'],
- '/Users/test/testkey')
-
@contextmanager
def capture(self, command, *args, **kwargs):
out, sys.stdout = sys.stdout, StringIO()
diff --git a/tests/unit/utils/test_start_shell.py b/tests/unit/utils/test_start_shell.py
index d429748d..eb6dcdca 100644
--- a/tests/unit/utils/test_start_shell.py
+++ b/tests/unit/utils/test_start_shell.py
@@ -42,7 +42,6 @@ class TestStartShell(unittest.TestCase):
@patch('jnpr.junos.utils.start_shell.StartShell.wait_for')
def test_startshell_run(self, mock_wait):
self.shell._chan = MagicMock()
- mock_wait.return_value = ["user % "]
self.shell.run('ls')
self.assertTrue(call.send('echo $?') in self.shell._chan.mock_calls)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 5
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"pep8",
"pyflakes",
"coveralls",
"ntc_templates",
"cryptography==3.2",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bcrypt==4.2.1
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
coverage==6.5.0
coveralls==3.3.1
cryptography==44.0.2
docopt==0.6.2
exceptiongroup==1.2.2
future==1.0.0
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==3.1.6
-e git+https://github.com/Juniper/py-junos-eznc.git@aa2d97698f9b741413a9b04b468fe78a168ac0ad#egg=junos_eznc
lxml==5.3.1
MarkupSafe==2.1.5
mock==5.2.0
ncclient==0.6.19
netaddr==1.3.0
nose==1.3.7
ntc_templates==4.0.1
packaging==24.0
paramiko==3.5.1
pep8==1.7.1
pluggy==1.2.0
pycparser==2.21
pyflakes==3.0.1
PyNaCl==1.5.0
pyserial==3.5
pytest==7.4.4
PyYAML==6.0.1
requests==2.31.0
scp==0.15.0
six==1.17.0
textfsm==1.1.3
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: py-junos-eznc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bcrypt==4.2.1
- cffi==1.15.1
- charset-normalizer==3.4.1
- coverage==6.5.0
- coveralls==3.3.1
- cryptography==44.0.2
- docopt==0.6.2
- exceptiongroup==1.2.2
- future==1.0.0
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==3.1.6
- lxml==5.3.1
- markupsafe==2.1.5
- mock==5.2.0
- ncclient==0.6.19
- netaddr==1.3.0
- nose==1.3.7
- ntc-templates==4.0.1
- packaging==24.0
- paramiko==3.5.1
- pep8==1.7.1
- pluggy==1.2.0
- pycparser==2.21
- pyflakes==3.0.1
- pynacl==1.5.0
- pyserial==3.5
- pytest==7.4.4
- pyyaml==6.0.1
- requests==2.31.0
- scp==0.15.0
- six==1.17.0
- textfsm==1.1.3
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/py-junos-eznc
| [
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_run"
]
| [
"tests/unit/utils/test_scp.py::TestScp::test_scp_proxycommand",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_context"
]
| [
"tests/unit/utils/test_scp.py::TestScp::test_scp_close",
"tests/unit/utils/test_scp.py::TestScp::test_scp_context",
"tests/unit/utils/test_scp.py::TestScp::test_scp_open",
"tests/unit/utils/test_scp.py::TestScp::test_scp_progress",
"tests/unit/utils/test_scp.py::TestScp::test_scp_progress_true",
"tests/unit/utils/test_scp.py::TestScp::test_scp_user_def_progress",
"tests/unit/utils/test_scp.py::TestScp::test_scp_user_def_progress_args_2",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_close",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_open_with_junos_term",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_open_with_shell_term",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_wait_for",
"tests/unit/utils/test_start_shell.py::TestStartShell::test_startshell_wait_for_regex"
]
| []
| Apache License 2.0 | 745 | [
"lib/jnpr/junos/utils/scp.py",
"setup.py",
"lib/jnpr/junos/utils/start_shell.py",
"lib/jnpr/junos/device.py",
"lib/jnpr/junos/version.py",
"Dockerfile",
".dockerignore"
]
| [
"lib/jnpr/junos/utils/scp.py",
"setup.py",
"lib/jnpr/junos/utils/start_shell.py",
"lib/jnpr/junos/device.py",
"lib/jnpr/junos/version.py",
"Dockerfile",
".dockerignore"
]
|
|
napjon__krisk-41 | 6944f1150e675a1e4ef824bc3ed6c0c41526e54a | 2016-09-06 10:23:36 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/41?src=pr) is 93.49% (diff: 88.23%)
> Merging [#41](https://codecov.io/gh/napjon/krisk/pull/41?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will decrease coverage by **0.33%**
```diff
@@ 0.2-develop #41 diff @@
=============================================
Files 10 10
Lines 308 323 +15
Methods 0 0
Messages 0 0
Branches 42 48 +6
=============================================
+ Hits 289 302 +13
- Misses 9 10 +1
- Partials 10 11 +1
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [6944f11...c0196f5](https://codecov.io/gh/napjon/krisk/compare/6944f1150e675a1e4ef824bc3ed6c0c41526e54a...c0196f5585fb08641348110a5537a8df2930575a?src=pr) | diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index e702c4a..842f2bd 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -55,7 +55,8 @@ def line(df,
stacked=False,
area=False,
annotate=None,
- full=False):
+ full=False,
+ smooth=False):
"""
Parameters
----------
@@ -77,12 +78,15 @@ def line(df,
category. if 'all' and stacked, annotate all category
full: boolean, default to False.
If true, set to full area stacked chart. Only work if stacked is True.
+ smooth: boolean, default to False.
+ If true, smooth the line.
Returns
-------
Chart Object
"""
return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
+ smooth=smooth,
annotate='top' if annotate == True else annotate)
@@ -92,7 +96,8 @@ def hist(df,
bins=10,
normed=False,
stacked=False,
- annotate=None):
+ annotate=None,
+ density=False):
"""
Parameters
----------
@@ -110,13 +115,16 @@ def hist(df,
Whether to stacked category on top of the other categories.
annotate: string, {'all',True} default to None
if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ category. if 'all' and stacked, annotate all category
+ density: boolean, default to False.
+ Whether to add density to the plot
Returns
-------
Chart Object
"""
return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,stacked=stacked,
+ density=density,
annotate='top' if annotate == True else annotate)
@@ -134,6 +142,7 @@ def scatter(df, x, y, s=None, c=None, size_px=(10, 70)):
column used as grouping color category
size_px: tuple, default to (10,70)
boundary size, lower and upper limit in pixel for min-max scatter points
+
Returns
-------
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index 1101415..4c35402 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -2,7 +2,7 @@ from copy import deepcopy
import numpy as np
import pandas as pd
-from krisk.plot.make_chart import insert_series_data
+from krisk.plot.make_chart import insert_series_data, round_list
def set_bar_line_chart(chart, df, x, c, **kwargs):
@@ -50,9 +50,12 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
if kwargs['annotate'] == 'top':
series[-1]['label'] = d_annotate
+ # TODO: make annotate receive all kinds supported in echarts.
+ # Special Bar Condition: Trendline
if kwargs['type'] == 'bar' and kwargs['trendline']:
- trendline = {'name':'trendline', 'type': 'line'}
+ trendline = {'name':'trendline', 'type': 'line',
+ 'lineStyle': {'normal': {'color': '#000'}}}
if c and kwargs['stacked']:
trendline['data'] = [0] * len(series[-1]['data'])
@@ -64,9 +67,39 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
series.append(trendline)
+ # Special Line Condition: Smooth
+ if kwargs['type'] == 'line' and kwargs['smooth']:
+ for s in series:
+ s['smooth'] = True
+
+
+ # Special Histogram Condition: Density
+ #TODO NEED IMPROVEMENT!
+ if kwargs['type'] == 'hist' and kwargs['density']:
+
+ density = {'name':'density', 'type': 'line', 'smooth': True,
+ 'lineStyle': {'normal': {'color': '#000'}}}
+ chart.option['xAxis']['boundaryGap'] = False
+
+ # The density have to be closed at zero. So all of xAxis and series must be updated
+ # To incorporate the changes
+ chart.option['xAxis']['data'] = [0] + chart.option['xAxis']['data'] + [0]
+
+ for s in series:
+ s['data'] = [0] + s['data']
+
+ if c and kwargs['stacked']:
+ density['data'] = [0] + round_list(data.sum(axis=1)) + [0]
+ elif c is None:
+ density['data'] = [0] + round_list(data) + [0]
+ else:
+ raise AssertionError('Density must either stacked category, or not category')
+
+ series.append(density)
+
- # TODO: make annotate receive all kinds supported in echarts.
+
def get_bar_line_data(df, x, c, y, **kwargs):
@@ -89,6 +122,7 @@ def get_bar_line_data(df, x, c, y, **kwargs):
if c and kwargs['stacked'] and kwargs['full']:
data = data.div(data.sum(1),axis=0)
+
return data
| Add smooth line for histogram and all type of line chart as well
Echarts Density Line for Histogram: http://echarts.baidu.com/demo.html#mix-line-bar
* Add an option to disable bar, line, or using both for histogram.
* For all type of line chart, added smooth boolean as additional argument, add to every line series. | napjon/krisk | diff --git a/krisk/tests/data/bar_year_pop_mean_continent_trendline.json b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
index 89aa040..98ef7d2 100644
--- a/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
+++ b/krisk/tests/data/bar_year_pop_mean_continent_trendline.json
@@ -1,4 +1,20 @@
{
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
"legend": {
"data": [
"Africa",
@@ -14,6 +30,7 @@
"yAxis": {},
"series": [
{
+ "name": "Africa",
"stack": "continent",
"type": "bar",
"data": [
@@ -29,10 +46,10 @@
29072015,
31287142,
33333216
- ],
- "name": "Africa"
+ ]
},
{
+ "name": "Americas",
"stack": "continent",
"type": "bar",
"data": [
@@ -48,10 +65,10 @@
36203463,
38331121,
40301927
- ],
- "name": "Americas"
+ ]
},
{
+ "name": "Asia",
"stack": "continent",
"type": "bar",
"data": [
@@ -67,10 +84,10 @@
22227415,
25268405,
31889923
- ],
- "name": "Asia"
+ ]
},
{
+ "name": "Europe",
"stack": "continent",
"type": "bar",
"data": [
@@ -86,10 +103,10 @@
3428038,
3508512,
3600523
- ],
- "name": "Europe"
+ ]
},
{
+ "name": "Oceania",
"stack": "continent",
"type": "bar",
"data": [
@@ -105,11 +122,14 @@
18565243,
19546792,
20434176
- ],
- "name": "Oceania"
+ ]
},
{
- "stack": "continent",
+ "lineStyle": {
+ "normal": {
+ "color": "#000"
+ }
+ },
"data": [
0,
0,
@@ -124,6 +144,7 @@
0,
0
],
+ "stack": "continent",
"type": "line",
"name": "trendline"
}
@@ -132,21 +153,5 @@
"axisPointer": {
"type": ""
}
- },
- "xAxis": {
- "data": [
- 1952,
- 1957,
- 1962,
- 1967,
- 1972,
- 1977,
- 1982,
- 1987,
- 1992,
- 1997,
- 2002,
- 2007
- ]
}
}
\ No newline at end of file
diff --git a/krisk/tests/data/bar_year_pop_mean_trendline.json b/krisk/tests/data/bar_year_pop_mean_trendline.json
index 15ef467..f806836 100644
--- a/krisk/tests/data/bar_year_pop_mean_trendline.json
+++ b/krisk/tests/data/bar_year_pop_mean_trendline.json
@@ -1,4 +1,20 @@
{
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
"legend": {
"data": []
},
@@ -8,6 +24,7 @@
"yAxis": {},
"series": [
{
+ "name": "year",
"type": "bar",
"data": [
9111144.6,
@@ -22,10 +39,14 @@
21899234.8,
23588394.4,
25911953.0
- ],
- "name": "year"
+ ]
},
{
+ "lineStyle": {
+ "normal": {
+ "color": "#000"
+ }
+ },
"data": [
9111144.6,
10062280.4,
@@ -48,21 +69,5 @@
"axisPointer": {
"type": ""
}
- },
- "xAxis": {
- "data": [
- 1952,
- 1957,
- 1962,
- 1967,
- 1972,
- 1977,
- 1982,
- 1987,
- 1992,
- 1997,
- 2002,
- 2007
- ]
}
}
\ No newline at end of file
diff --git a/krisk/tests/data/hist_lifeExp_b10_continent_density.json b/krisk/tests/data/hist_lifeExp_b10_continent_density.json
new file mode 100644
index 0000000..247f887
--- /dev/null
+++ b/krisk/tests/data/hist_lifeExp_b10_continent_density.json
@@ -0,0 +1,154 @@
+{
+ "xAxis": {
+ "boundaryGap": false,
+ "data": [
+ 0,
+ 28,
+ 34,
+ 39,
+ 44,
+ 49,
+ 55,
+ 60,
+ 65,
+ 70,
+ 75,
+ 81,
+ 0
+ ]
+ },
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "title": {
+ "text": ""
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "name": "Africa",
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 1,
+ 2,
+ 2,
+ 1,
+ 1,
+ 3,
+ 2,
+ 0
+ ]
+ },
+ {
+ "name": "Americas",
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3,
+ 4,
+ 5,
+ 0
+ ]
+ },
+ {
+ "name": "Asia",
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 0,
+ 3,
+ 2,
+ 6,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0
+ ]
+ },
+ {
+ "name": "Europe",
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2,
+ 1,
+ 4,
+ 4,
+ 1
+ ]
+ },
+ {
+ "name": "Oceania",
+ "stack": "continent",
+ "type": "bar",
+ "data": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2,
+ 5,
+ 5
+ ]
+ },
+ {
+ "lineStyle": {
+ "normal": {
+ "color": "#000"
+ }
+ },
+ "data": [
+ 0,
+ 3,
+ 2,
+ 7,
+ 2,
+ 2,
+ 3,
+ 5,
+ 13,
+ 16,
+ 6,
+ 0
+ ],
+ "smooth": true,
+ "type": "line",
+ "name": "density"
+ }
+ ],
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index bdce2e5..08cf413 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -45,7 +45,18 @@ def test_bar(gapminder):
'xAxis': {'data': ['Africa', 'Americas', 'Asia', 'Europe', 'Oceania']},
'yAxis': {}}
+def test_trendline(gapminder):
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
+
+ p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
+ assert p.get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
+
+ try:
+ kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
+ except AssertionError:
+ pass
def test_line(gapminder):
@@ -62,6 +73,11 @@ def test_line(gapminder):
assert p.get_option() == true_option
+def test_smooth_line(gapminder):
+
+ p = kk.line(gapminder[gapminder.year == 1952],'continent',y='pop',how='mean',smooth=True)
+ assert p.get_option()['series'][0]['smooth'] == True
+
def test_full_bar_line(gapminder):
bar = kk.bar(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
line = kk.line(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
@@ -78,19 +94,6 @@ def test_full_bar_line(gapminder):
assert bar.option == line.option == true_option
-def test_trendline(gapminder):
-
- p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
- assert p.get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
-
- p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
- assert p.get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
-
- try:
- kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
- except AssertionError:
- pass
-
def test_hist(gapminder):
true_option = read_option_tests('hist_x.json')
@@ -98,7 +101,7 @@ def test_hist(gapminder):
assert p.get_option() == true_option
- true_option = json.load(open(DATA_DIR + '/hist.json', 'r'))
+ true_option = read_option_tests('hist.json')
p = kk.hist(
gapminder,
'lifeExp',
@@ -110,6 +113,30 @@ def test_hist(gapminder):
assert p.get_option() == true_option
+def test_density(gapminder):
+
+ option = kk.hist(gapminder,'lifeExp',density=True).get_option()
+
+ assert option['series'][0]['data'] == [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6]
+ assert option['series'][-1] == {'data': [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6, 0],
+ 'lineStyle': {'normal': {'color': '#000'}},
+ 'name': 'density',
+ 'smooth': True,
+ 'type': 'line'}
+ assert option['xAxis'] == {'boundaryGap': False,
+ 'data': [0, 28, 34, 39, 44, 49, 55, 60, 65, 70, 75, 81, 0]}
+
+ true_option = read_option_tests('hist_lifeExp_b10_continent_density.json')
+ option2 = kk.hist(gapminder,'lifeExp',bins=10,c='continent',stacked=True,density=True).get_option()
+ assert true_option == option2
+
+ try:
+ kk.hist(gapminder,'year',density=True,c='continent')
+ except AssertionError:
+ pass
+
+
+
def test_scatter(gapminder):
# Simple Scatter
p = kk.scatter(gapminder[gapminder.year == 1952],'pop','lifeExp')
@@ -117,7 +144,7 @@ def test_scatter(gapminder):
assert p.get_option() == true_option
# Grouped Scatter
- true_option = json.load(open(DATA_DIR + '/scatter.json', 'r'))
+ true_option = read_option_tests('scatter.json')
p = kk.scatter(
gapminder[gapminder.year == 1952],
'lifeExp',
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@6944f1150e675a1e4ef824bc3ed6c0c41526e54a#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_density"
]
| []
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 746 | [
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
| [
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
|
dask__dask-1529 | abde2826b9f3e591dd5a8b0f34286f5cbcfea6fe | 2016-09-07 23:49:47 | abde2826b9f3e591dd5a8b0f34286f5cbcfea6fe | jcrist: Oop, turns out slicing `np.memmap` objects returns a `np.memmap`, so this PR isn't quite good due to my `getarray` -> `getitem` swap for those. Before I revert that, it might be nice to consider a third getter function that allows these rewrites but still converts to `np.ndarray` internally. So we'd have:
```
getitem: no conversion, allows rewrites
getarray: conversion, no rewrites
safe_getarray: conversion, allows rewrites
```
Depending on backend, `from_array` could then use `getarray` or `safe_getarray` appropriately.
Alternatively, a parameter `safe` could be added to `getarray`, and the rewrite could check that to see if it's valid. Either works.
shoyer: As is, this would be a significant performance regression for xarray. We need a way to be able to call `da.from_array` with these optimizations on non-numpy arrays.
jcrist: Would exposing `safe=True` (with a better keyword name) as a keyword on `from_array` be good, or would you rather have an extensible dispatch (more work for us) to determine which `get*` to use in `from_array` for each type?
shoyer: I think a keyword argument would suffice -- an extensible dispatch system
is overkill at this point. We can inspect the types of input arrays on our
end and call from_array properly instead xarray's chunk method.
On Wed, Sep 7, 2016 at 5:04 PM, Jim Crist <[email protected]> wrote:
> Would exposing safe=True (with a better keyword name) as a keyword on
> from_array be good, or would you rather have an extensible dispatch (more
> work for us) to determine which get* to use in from_array for each type?
>
> —
> You are receiving this because you commented.
> Reply to this email directly, view it on GitHub
> <https://github.com/dask/dask/pull/1529#issuecomment-245456737>, or mute
> the thread
> <https://github.com/notifications/unsubscribe-auth/ABKS1rkTDmtjFIsj7z2RA4PRVMYCmzO3ks5qn1EagaJpZM4J3eN_>
> .
>
alimanfoo: Just out of interest, what set of fancy indexing features do you make use of in the case where the backend is numpy array?
FWIW I'd be happy to use a keyword argument if that's easier. Maybe "fancy=True" if the intention is to specify whether the backend supports fancy indexing?
shoyer: > Just out of interest, what set of fancy indexing features do you make use of in the case where the backend is numpy array?
In the simplest case, just pulling out a single value from some axes of an array, e.g., `x[0, 0, :]` (not even fancy indexing) makes use of fusing. This sort of behavior works with almost any backend (including netCDF4, h5py, etc.), even if it doesn't support full fancy indexing (which is the case for both of those), and often it yields major performance benefits because you no longer need to fully chunk the array ahead of time.
alimanfoo: Zarr supports this, I.e., indexing with slices and/or integers. It doesn't
support anything else, eg, Boolean array or list/array of indices.
I asked because if dask needs only a limited subset of fancy indexing
features in the backend then I was thinking it might be doable to implement
in Zarr. But on reflection I think that would be a stretch, I'd rather keep
Zarr to just slices and ints, at least for now.
On Thursday, 8 September 2016, Stephan Hoyer <[email protected]>
wrote:
> Just out of interest, what set of fancy indexing features do you make use
> of in the case where the backend is numpy array?
>
> In the simplest case, just pulling out a single value from some axes of an
> array, e.g., x[0, 0, :] (not even fancy indexing) makes use of fusing.
> This sort of behavior works with almost any backend (including netCDF4,
> h5py, etc.), even if it doesn't support full fancy indexing (which is the
> case for both of those), and often it yields major performance benefits
> because you no longer need to fully chunk the array ahead of time.
>
> —
> You are receiving this because you commented.
> Reply to this email directly, view it on GitHub
> <https://github.com/dask/dask/pull/1529#issuecomment-245647258>, or mute
> the thread
> <https://github.com/notifications/unsubscribe-auth/AAq8Ql-URyyo0K9Wc4muefesxQi-pIdVks5qoDFVgaJpZM4J3eN_>
> .
>
--
Alistair Miles
Head of Epidemiological Informatics
Centre for Genomics and Global Health <http://cggh.org>
The Wellcome Trust Centre for Human Genetics
Roosevelt Drive
Oxford
OX3 7BN
United Kingdom
Email: [email protected]
Web: http://purl.org/net/aliman
Twitter: https://twitter.com/alimanfoo
Tel: +44 (0)1865 287721
shoyer: > Zarr supports this, I.e., indexing with slices and/or integers. It doesn't support anything else, eg, Boolean array or list/array of indices.
In that case this isn't even a good fix for Zarr. We need to not fuse fancy indexing operations, but integers/slices should still be fused.
jcrist: I added a keyword `fancy` to `da.from_array`, to indicate whether the backend supports fancy indexing. The default is True, keeping with current behavior. If False, no fancy indexing will be pushed inside of a call to `getarray_nofancy` during the optimization pass.
Example using zarr:
```python
import zarr
import dask.array as da
import numpy as np
x = np.random.randint(0, 4, size=(10000, 100))
zx = zarr.array(x)
dzx = da.from_array(zx, chunks=zx.chunks, fancy=False)
dzx[[1, 2]].compute() # This works fine
dzx = da.from_array(zx, chunks=zx.chunks) # don't pass in `Fancy=False`
dzx[[1, 2]].compute() # This errors
``` | diff --git a/dask/array/core.py b/dask/array/core.py
index 66de14033..6d0b6c812 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -55,6 +55,15 @@ def getarray(a, b, lock=None):
return c
+def getarray_nofancy(a, b, lock=None):
+ """ A simple wrapper around ``getarray``.
+
+ Used to indicate to the optimization passes that the backend doesn't
+ support "fancy indexing"
+ """
+ return getarray(a, b, lock=lock)
+
+
from .optimization import optimize
@@ -76,7 +85,7 @@ def slices_from_chunks(chunks):
for start, shape in zip(starts, shapes)]
-def getem(arr, chunks, shape=None, out_name=None):
+def getem(arr, chunks, shape=None, out_name=None, fancy=True, lock=False):
""" Dask getting various chunks from an array-like
>>> getem('X', chunks=(2, 3), shape=(4, 6)) # doctest: +SKIP
@@ -95,8 +104,13 @@ def getem(arr, chunks, shape=None, out_name=None):
chunks = normalize_chunks(chunks, shape)
keys = list(product([out_name], *[range(len(bds)) for bds in chunks]))
+ slices = slices_from_chunks(chunks)
+ getter = getarray if fancy else getarray_nofancy
- values = [(getarray, arr, x) for x in slices_from_chunks(chunks)]
+ if lock:
+ values = [(getter, arr, x, lock) for x in slices]
+ else:
+ values = [(getter, arr, x) for x in slices]
return dict(zip(keys, values))
@@ -1504,17 +1518,28 @@ def normalize_chunks(chunks, shape=None):
return tuple(map(tuple, chunks))
-def from_array(x, chunks, name=None, lock=False):
+def from_array(x, chunks, name=None, lock=False, fancy=True):
""" Create dask array from something that looks like an array
Input must have a ``.shape`` and support numpy-style slicing.
- The ``chunks`` argument must be one of the following forms:
-
- - a blocksize like 1000
- - a blockshape like (1000, 1000)
- - explicit sizes of all blocks along all dimensions
- like ((1000, 1000, 500), (400, 400)).
+ Parameters
+ ----------
+ x : array_like
+ chunks : int, tuple
+ How to chunk the array. Must be one of the following forms:
+ - A blocksize like 1000.
+ - A blockshape like (1000, 1000).
+ - Explicit sizes of all blocks along all dimensions
+ like ((1000, 1000, 500), (400, 400)).
+ name : str, optional
+ The key name to use for the array. Defaults to a hash of ``x``.
+ lock : bool or Lock, optional
+ If ``x`` doesn't support concurrent reads then provide a lock here, or
+ pass in True to have dask.array create one for you.
+ fancy : bool, optional
+ If ``x`` doesn't support fancy indexing (e.g. indexing with lists or
+ arrays) then set to False. Default is True.
Examples
--------
@@ -1538,11 +1563,9 @@ def from_array(x, chunks, name=None, lock=False):
token = tokenize(x, chunks)
original_name = (name or 'array-') + 'original-' + token
name = name or 'array-' + token
- dsk = getem(original_name, chunks, out_name=name)
if lock is True:
lock = Lock()
- if lock:
- dsk = dict((k, v + (lock,)) for k, v in dsk.items())
+ dsk = getem(original_name, chunks, out_name=name, fancy=fancy, lock=lock)
return Array(merge({original_name: x}, dsk), name, chunks, dtype=x.dtype)
diff --git a/dask/array/optimization.py b/dask/array/optimization.py
index 14e4e8bb8..5c5141989 100644
--- a/dask/array/optimization.py
+++ b/dask/array/optimization.py
@@ -4,7 +4,7 @@ from operator import getitem
import numpy as np
-from .core import getarray
+from .core import getarray, getarray_nofancy
from ..core import flatten
from ..optimize import cull, fuse, inline_functions
@@ -18,7 +18,7 @@ def optimize(dsk, keys, **kwargs):
"""
keys = list(flatten(keys))
fast_functions = kwargs.get('fast_functions',
- set([getarray, np.transpose]))
+ set([getarray, getarray_nofancy, np.transpose]))
dsk2, dependencies = cull(dsk, keys)
dsk4, dependencies = fuse(dsk2, keys, dependencies)
dsk5 = optimize_slices(dsk4)
@@ -36,48 +36,53 @@ def optimize_slices(dsk):
See also:
fuse_slice_dict
"""
- getters = (getarray, getitem)
+ fancy_ind_types = (list, np.ndarray)
+ getters = (getarray_nofancy, getarray, getitem)
dsk = dsk.copy()
for k, v in dsk.items():
- if type(v) is tuple:
- if v[0] in getters:
- try:
- func, a, a_index = v
- use_getarray = func is getarray
- except ValueError: # has four elements, includes a lock
- continue
- while type(a) is tuple and a[0] in getters:
- try:
- f2, b, b_index = a
- use_getarray |= f2 is getarray
- except ValueError: # has four elements, includes a lock
- break
- if (type(a_index) is tuple) != (type(b_index) is tuple):
- break
- if ((type(a_index) is tuple) and
- (len(a_index) != len(b_index)) and
- any(i is None for i in b_index + a_index)):
+ if type(v) is tuple and v[0] in getters and len(v) == 3:
+ f, a, a_index = v
+ getter = f
+ while type(a) is tuple and a[0] in getters and len(a) == 3:
+ f2, b, b_index = a
+ if (type(a_index) is tuple) != (type(b_index) is tuple):
+ break
+ if type(a_index) is tuple:
+ indices = b_index + a_index
+ if (len(a_index) != len(b_index) and
+ any(i is None for i in indices)):
break
- try:
- c_index = fuse_slice(b_index, a_index)
- except NotImplementedError:
+ if (f2 is getarray_nofancy and
+ any(isinstance(i, fancy_ind_types) for i in indices)):
break
- (a, a_index) = (b, c_index)
- if use_getarray:
- dsk[k] = (getarray, a, a_index)
- elif (type(a_index) is slice and
- not a_index.start and
- a_index.stop is None and
- a_index.step is None):
- dsk[k] = a
- elif type(a_index) is tuple and all(type(s) is slice and
- not s.start and
- s.stop is None and
- s.step is None
- for s in a_index):
- dsk[k] = a
- else:
- dsk[k] = (getitem, a, a_index)
+ elif (f2 is getarray_nofancy and
+ (type(a_index) in fancy_ind_types or
+ type(b_index) in fancy_ind_types)):
+ break
+ try:
+ c_index = fuse_slice(b_index, a_index)
+ # rely on fact that nested gets never decrease in
+ # strictness e.g. `(getarray, (getitem, ...))` never
+ # happens
+ getter = f2
+ except NotImplementedError:
+ break
+ a, a_index = b, c_index
+ if getter is not getitem:
+ dsk[k] = (getter, a, a_index)
+ elif (type(a_index) is slice and
+ not a_index.start and
+ a_index.stop is None and
+ a_index.step is None):
+ dsk[k] = a
+ elif type(a_index) is tuple and all(type(s) is slice and
+ not s.start and
+ s.stop is None and
+ s.step is None
+ for s in a_index):
+ dsk[k] = a
+ else:
+ dsk[k] = (getitem, a, a_index)
return dsk
diff --git a/dask/optimize.py b/dask/optimize.py
index fce01624c..f17409bad 100644
--- a/dask/optimize.py
+++ b/dask/optimize.py
@@ -176,6 +176,9 @@ def inline(dsk, keys=None, inline_constants=True, dependencies=None):
>>> inline(d, keys='y', inline_constants=False) # doctest: +SKIP
{'x': 1, 'y': (inc, 1), 'z': (add, 'x', (inc, 'x'))}
"""
+ if dependencies and isinstance(next(iter(dependencies.values())), list):
+ dependencies = {k: set(v) for k, v in dependencies.items()}
+
keys = _flat_set(keys)
if dependencies is None:
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 0b7e72326..f2c8cc73d 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -167,30 +167,6 @@ then you should start here.
dataframe.rst
delayed.rst
-**Graphs**
-
-Dask graphs encode algorithms in a simple format involving Python dicts,
-tuples, and functions. This graph format can be used in isolation from the
-dask collections. Working directly with dask graphs is an excellent way to
-implement and test new algorithms in fields such as linear algebra,
-optimization, and machine learning. If you are a *developer*, you should start
-here.
-
-* :doc:`graphs`
-* :doc:`spec`
-* :doc:`custom-graphs`
-* :doc:`optimize`
-
-.. toctree::
- :maxdepth: 1
- :hidden:
- :caption: Graphs
-
- graphs.rst
- spec.rst
- custom-graphs.rst
- optimize.rst
-
**Scheduling**
Schedulers execute task graphs. Dask currently has two main schedulers, one
@@ -229,6 +205,29 @@ help make debugging and profiling graph execution easier.
inspect.rst
diagnostics.rst
+**Graphs**
+
+Internally Dask encodes algorithms in a simple format involving Python dicts,
+tuples, and functions. This graph format can be used in isolation from the
+dask collections. Working directly with dask graphs is rare unless you intend
+to develop new modules with Dask. Even then, :doc:`dask.delayed <delayed>` is
+often a better choice. If you are a *core developer*, then you should start here.
+
+* :doc:`graphs`
+* :doc:`spec`
+* :doc:`custom-graphs`
+* :doc:`optimize`
+
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+ :caption: Graphs
+
+ graphs.rst
+ spec.rst
+ custom-graphs.rst
+ optimize.rst
+
**Help & reference**
* :doc:`changelog`
| da.compress() error with zarr array
I'm running into a problem using ``da.compress()`` with a 2D zarr array. Minimal example is below. I think what happens is that dask implements compress by trying to index (``__getitem__``) the input array with a list of integers. Zarr arrays don't support this, only contiguous slices, and so an error gets raised by zarr. Would it be possible to implement ``da.compress()`` without requiring the underling storage to support this?
I sort of naively expected that dask would just load each full chunk required from the input array, via a contiguous slice, into a numpy array, then call numpy.compress on this chunk with the appropriate subset of the condition array, if that makes any sense. And so the input array only needs to support contiguous slices in ``__getitem__``.
```
>>> import numpy as np
>>> import zarr
>>> zarr.__version__
'2.0.1'
>>> import dask
>>> dask.__version__
'0.11.0'
>>> x = zarr.array(np.random.randint(0, 4, size=(10000, 100)))
>>> f = np.random.randint(0, 2, size=10000, dtype=bool)
>>> import dask.array as da
>>> d = da.from_array(x, chunks=x.chunks)
>>> d
dask.array<array-6..., shape=(10000, 100), dtype=int64, chunksize=(1250, 13)>
>>> y = da.compress(f, d, axis=0)
>>> y
dask.array<getitem..., shape=(4991, 100), dtype=int64, chunksize=(598, 13)>
>>> y.compute()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/base.py", line 86, in compute
return compute(self, **kwargs)[0]
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/base.py", line 179, in compute
results = get(dsk, keys, **kwargs)
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/threaded.py", line 58, in get
**kwargs)
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/async.py", line 481, in get_async
raise(remote_exception(res, tb))
dask.async.TypeError: expected integer or slice, found: [3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 23, 25, 27, 30, 32, 33, 36, 37, 38, 39, 40, 41, 45, 46, 47, 48, 50, 52, 55, 56, 57, 61, 63, 65, 68, 70, 71, 75, 76, 78, 79, 80, 81, 84, 86, 89, 90, 92, 93, 94, 96, 97, 98, 99, 102, 105, 106, 108, 109, 110, 113, 115, 117, 118, 119, 120, 121, 123, 125, 126, 128, 129, 131, 132, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 149, 150, 151, 152, 154, 155, 157, 158, 164, 165, 166, 167, 168, 169, 171, 172, 174, 176, 178, 182, 183, 184, 187, 188, 190, 194, 195, 198, 202, 203, 204, 208, 209, 210, 211, 212, 213, 216, 217, 218, 219, 220, 222, 223, 224, 228, 229, 232, 234, 235, 238, 240, 242, 247, 252, 253, 254, 258, 263, 264, 267, 268, 269, 271, 272, 273, 282, 286, 287, 289, 290, 291, 292, 296, 297, 299, 300, 304, 305, 311, 312, 313, 315, 319, 321, 325, 328, 329, 331, 334, 338, 339, 341, 343, 344, 345, 348, 352, 354, 355, 364, 365, 370, 372, 373, 377, 378, 381, 383, 384, 389, 396, 398, 403, 404, 405, 409, 411, 412, 416, 420, 421, 422, 424, 427, 428, 430, 433, 434, 437, 439, 441, 448, 450, 451, 452, 458, 459, 461, 462, 463, 465, 472, 474, 475, 477, 479, 480, 482, 483, 484, 485, 486, 487, 488, 489, 490, 493, 496, 498, 501, 502, 505, 506, 507, 510, 512, 514, 515, 520, 521, 525, 528, 535, 536, 537, 538, 540, 544, 548, 549, 550, 559, 560, 562, 564, 565, 566, 568, 570, 571, 577, 578, 579, 580, 581, 584, 585, 591, 592, 593, 594, 595, 599, 600, 602, 604, 607, 608, 609, 613, 615, 616, 618, 624, 626, 629, 631, 632, 634, 636, 637, 642, 644, 645, 646, 648, 650, 651, 652, 653, 657, 660, 661, 662, 663, 666, 668, 670, 672, 675, 679, 681, 682, 687, 688, 692, 694, 696, 697, 699, 701, 702, 704, 706, 708, 710, 711, 713, 715, 717, 718, 720, 724, 726, 727, 730, 731, 733, 735, 737, 738, 739, 740, 741, 742, 747, 748, 750, 753, 758, 761, 762, 768, 769, 771, 776, 778, 779, 780, 781, 783, 785, 788, 791, 792, 793, 794, 795, 797, 803, 807, 810, 814, 818, 821, 826, 827, 830, 832, 839, 841, 843, 844, 845, 846, 848, 850, 851, 852, 855, 857, 858, 861, 862, 864, 866, 868, 869, 873, 877, 878, 880, 882, 884, 887, 889, 892, 893, 894, 895, 899, 900, 902, 903, 904, 905, 909, 911, 913, 914, 917, 920, 921, 926, 932, 935, 936, 938, 940, 942, 946, 947, 948, 949, 952, 953, 956, 957, 959, 962, 963, 964, 966, 969, 971, 972, 973, 974, 976, 979, 980, 990, 991, 996, 998, 999, 1001, 1002, 1003, 1004, 1006, 1007, 1009, 1010, 1011, 1013, 1014, 1015, 1016, 1017, 1018, 1020, 1023, 1024, 1025, 1030, 1031, 1033, 1034, 1035, 1036, 1037, 1043, 1048, 1050, 1054, 1056, 1059, 1061, 1062, 1069, 1070, 1075, 1076, 1077, 1080, 1082, 1084, 1087, 1089, 1092, 1093, 1094, 1099, 1103, 1107, 1108, 1111, 1112, 1114, 1115, 1116, 1117, 1118, 1120, 1125, 1130, 1131, 1132, 1133, 1135, 1139, 1141, 1142, 1143, 1145, 1146, 1147, 1148, 1149, 1152, 1153, 1154, 1156, 1157, 1160, 1161, 1162, 1163, 1165, 1167, 1168, 1170, 1173, 1176, 1177, 1178, 1181, 1186, 1188, 1189, 1192, 1194, 1195, 1196, 1197, 1198, 1200, 1203, 1208, 1209, 1214, 1215, 1217, 1219, 1221, 1222, 1223, 1224, 1227, 1229, 1231, 1232, 1234, 1235, 1236, 1240, 1241, 1242, 1246, 1248]
Traceback
---------
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/async.py", line 263, in execute_task
result = _execute_task(task, data)
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/async.py", line 245, in _execute_task
return func(*args2)
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/dask/array/core.py", line 49, in getarray
c = a[b]
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/zarr/core.py", line 370, in __getitem__
selection = normalize_array_selection(item, self._shape)
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/zarr/util.py", line 183, in normalize_array_selection
for i, l in zip(item, shape))
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/zarr/util.py", line 183, in <genexpr>
for i, l in zip(item, shape))
File "/home/aliman/miniconda3/envs/biipy/lib/python3.5/site-packages/zarr/util.py", line 162, in normalize_axis_selection
raise TypeError('expected integer or slice, found: %r' % item)
``` | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index e76afa4f1..beb282d9b 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -21,10 +21,10 @@ from dask.utils import raises, ignoring, tmpfile, tmpdir
from dask.utils_test import inc
from dask.array import chunk
-from dask.array.core import (getem, getarray, top, dotmany, concatenate3,
- broadcast_dimensions, Array, stack, concatenate,
- from_array, take, elemwise, isnull, notnull,
- broadcast_shapes, partial_by_order, exp,
+from dask.array.core import (getem, getarray, getarray_nofancy, top, dotmany,
+ concatenate3, broadcast_dimensions, Array, stack,
+ concatenate, from_array, take, elemwise, isnull,
+ notnull, broadcast_shapes, partial_by_order, exp,
tensordot, choose, where, coarsen, insert,
broadcast_to, reshape, fromfunction,
blockdims_from_blockshape, store, optimize,
@@ -51,11 +51,12 @@ def same_keys(a, b):
def test_getem():
- assert getem('X', (2, 3), shape=(4, 6)) == \
- {('X', 0, 0): (getarray, 'X', (slice(0, 2), slice(0, 3))),
- ('X', 1, 0): (getarray, 'X', (slice(2, 4), slice(0, 3))),
- ('X', 1, 1): (getarray, 'X', (slice(2, 4), slice(3, 6))),
- ('X', 0, 1): (getarray, 'X', (slice(0, 2), slice(3, 6)))}
+ for fancy, getter in [(True, getarray), (False, getarray_nofancy)]:
+ sol = {('X', 0, 0): (getter, 'X', (slice(0, 2), slice(0, 3))),
+ ('X', 1, 0): (getter, 'X', (slice(2, 4), slice(0, 3))),
+ ('X', 1, 1): (getter, 'X', (slice(2, 4), slice(3, 6))),
+ ('X', 0, 1): (getter, 'X', (slice(0, 2), slice(3, 6)))}
+ assert getem('X', (2, 3), shape=(4, 6), fancy=fancy) == sol
def test_top():
diff --git a/dask/array/tests/test_optimization.py b/dask/array/tests/test_optimization.py
index 867f048a2..81ddc9ce8 100644
--- a/dask/array/tests/test_optimization.py
+++ b/dask/array/tests/test_optimization.py
@@ -7,7 +7,7 @@ from dask.array.optimization import (getitem, optimize, optimize_slices,
fuse_slice)
from dask.utils import raises
-from dask.array.core import getarray
+from dask.array.core import getarray, getarray_nofancy
def test_fuse_getitem():
@@ -18,6 +18,10 @@ def test_fuse_getitem():
(slice(15, 20), slice(50, 60))),
(getarray, 'x', (slice(1015, 1020), slice(150, 160)))),
+ ((getitem, (getarray_nofancy, 'x', (slice(1000, 2000), slice(100, 200))),
+ (slice(15, 20), slice(50, 60))),
+ (getarray_nofancy, 'x', (slice(1015, 1020), slice(150, 160)))),
+
((getarray, (getarray, 'x', slice(1000, 2000)), 10),
(getarray, 'x', 1010)),
@@ -25,6 +29,10 @@ def test_fuse_getitem():
(slice(15, 20),)),
(getarray, 'x', (slice(1015, 1020), 10))),
+ ((getitem, (getarray_nofancy, 'x', (slice(1000, 2000), 10)),
+ (slice(15, 20),)),
+ (getarray_nofancy, 'x', (slice(1015, 1020), 10))),
+
((getarray, (getarray, 'x', (10, slice(1000, 2000))),
(slice(15, 20),)),
(getarray, 'x', (10, slice(1015, 1020)))),
@@ -117,3 +125,12 @@ def test_dont_fuse_different_slices():
y = x.rechunk((1, 10))
dsk = optimize(y.dask, y._keys())
assert len(dsk) > 100
+
+
+def test_dont_fuse_fancy_indexing_in_getarray_nofancy():
+ dsk = {'a': (getitem, (getarray_nofancy, 'x', (slice(10, 20, None), slice(100, 200, None))),
+ ([1, 3], slice(50, 60, None)))}
+ assert optimize_slices(dsk) == dsk
+
+ dsk = {'a': (getitem, (getarray_nofancy, 'x', [1, 2, 3]), 0)}
+ assert optimize_slices(dsk) == dsk
diff --git a/dask/tests/test_optimize.py b/dask/tests/test_optimize.py
index 8c0059b18..d0835f9e7 100644
--- a/dask/tests/test_optimize.py
+++ b/dask/tests/test_optimize.py
@@ -386,3 +386,14 @@ def test_fuse_selections():
dsk2 = fuse_selections(dsk, getitem, load, merge)
dsk2, dependencies = cull(dsk2, 'y')
assert dsk2 == {'y': (load, 'store', 'part', 'a')}
+
+
+def test_inline_cull_dependencies():
+ d = {'a': 1,
+ 'b': 'a',
+ 'c': 'b',
+ 'd': ['a', 'b', 'c'],
+ 'e': (add, (len, 'd'), 'a')}
+
+ d2, dependencies = cull(d, ['d', 'e'])
+ inline(d2, {'b'}, dependencies=dependencies)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 1.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
async-timeout==4.0.2
asynctest==0.13.0
attrs==22.2.0
botocore==1.23.24
certifi==2021.5.30
charset-normalizer==3.0.1
click==8.0.4
cloudpickle==2.2.1
-e git+https://github.com/dask/dask.git@abde2826b9f3e591dd5a8b0f34286f5cbcfea6fe#egg=dask
distributed==1.13.3
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
importlib-metadata==4.8.3
iniconfig==1.1.1
jmespath==0.10.0
locket==1.0.0
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
packaging==21.3
pandas==1.1.5
partd==1.2.0
pluggy==1.0.0
psutil==7.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
s3fs==2022.1.0
six==1.17.0
tblib==1.7.0
tomli==1.2.3
toolz==0.12.0
tornado==6.1
typing_extensions==4.1.1
urllib3==1.26.20
wrapt==1.16.0
yarl==1.7.2
zict==2.1.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- attrs==22.2.0
- botocore==1.23.24
- charset-normalizer==3.0.1
- click==8.0.4
- cloudpickle==2.2.1
- distributed==1.13.3
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jmespath==0.10.0
- locket==1.0.0
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- partd==1.2.0
- pluggy==1.0.0
- psutil==7.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- s3fs==2022.1.0
- six==1.17.0
- tblib==1.7.0
- tomli==1.2.3
- toolz==0.12.0
- tornado==6.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- wrapt==1.16.0
- yarl==1.7.2
- zict==2.1.0
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_reshape",
"dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_digitize",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_array_picklable",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_optimization.py::test_fuse_getitem",
"dask/array/tests/test_optimization.py::test_optimize_with_getitem_fusion",
"dask/array/tests/test_optimization.py::test_optimize_slicing",
"dask/array/tests/test_optimization.py::test_fuse_slice",
"dask/array/tests/test_optimization.py::test_fuse_slice_with_lists",
"dask/array/tests/test_optimization.py::test_hard_fuse_slice_cases",
"dask/array/tests/test_optimization.py::test_dont_fuse_different_slices",
"dask/array/tests/test_optimization.py::test_dont_fuse_fancy_indexing_in_getarray_nofancy",
"dask/tests/test_optimize.py::test_cull",
"dask/tests/test_optimize.py::test_fuse",
"dask/tests/test_optimize.py::test_fuse_keys",
"dask/tests/test_optimize.py::test_inline",
"dask/tests/test_optimize.py::test_inline_functions",
"dask/tests/test_optimize.py::test_inline_ignores_curries_and_partials",
"dask/tests/test_optimize.py::test_inline_doesnt_shrink_fast_functions_at_top",
"dask/tests/test_optimize.py::test_inline_traverses_lists",
"dask/tests/test_optimize.py::test_inline_functions_protects_output_keys",
"dask/tests/test_optimize.py::test_functions_of",
"dask/tests/test_optimize.py::test_dealias",
"dask/tests/test_optimize.py::test_equivalent",
"dask/tests/test_optimize.py::test_equivalence_uncomparable",
"dask/tests/test_optimize.py::test_sync_keys",
"dask/tests/test_optimize.py::test_sync_uncomparable",
"dask/tests/test_optimize.py::test_merge_sync",
"dask/tests/test_optimize.py::test_fuse_getitem",
"dask/tests/test_optimize.py::test_fuse_selections",
"dask/tests/test_optimize.py::test_inline_cull_dependencies"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess"
]
| []
| []
| BSD 3-Clause "New" or "Revised" License | 747 | [
"dask/array/optimization.py",
"dask/array/core.py",
"docs/source/index.rst",
"dask/optimize.py"
]
| [
"dask/array/optimization.py",
"dask/array/core.py",
"docs/source/index.rst",
"dask/optimize.py"
]
|
zhmcclient__python-zhmcclient-56 | 01674398fed64b49ebbf94ac49236cee4a02b3d3 | 2016-09-08 08:00:52 | 5b90883d9934257d21ead1cbc548423b5fb682f8 | coveralls:
[](https://coveralls.io/builds/7794079)
Coverage increased (+0.4%) to 88.352% when pulling **447c885d4954aacf55f533d18e209b331d2edee9 on andy/create-return-object** into **01674398fed64b49ebbf94ac49236cee4a02b3d3 on master**.
| diff --git a/zhmcclient/_adapter.py b/zhmcclient/_adapter.py
index cf76ac0..615ddc9 100644
--- a/zhmcclient/_adapter.py
+++ b/zhmcclient/_adapter.py
@@ -145,7 +145,9 @@ class AdapterManager(BaseManager):
Returns:
- string: The resource URI of the new adapter.
+ Adapter: The resource object for the new HiperSockets adapter.
+ The object will have its 'object-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -156,7 +158,11 @@ class AdapterManager(BaseManager):
"""
cpc_uri = self.cpc.get_property('object-uri')
result = self.session.post(cpc_uri + '/adapters', body=properties)
- return result['object-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Adapter(self, props['object-uri'], props)
class Adapter(BaseResource):
diff --git a/zhmcclient/_hba.py b/zhmcclient/_hba.py
index 880d3a5..1042c23 100644
--- a/zhmcclient/_hba.py
+++ b/zhmcclient/_hba.py
@@ -79,9 +79,6 @@ class HbaManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
hbas_res = self.partition.get_property('hba-uris')
hba_list = []
if hbas_res:
@@ -105,7 +102,9 @@ class HbaManager(BaseManager):
Returns:
- string: The resource URI of the new HBA.
+ Hba: The resource object for the new HBA.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -116,7 +115,11 @@ class HbaManager(BaseManager):
"""
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/hbas', body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Hba(self, props['element-uri'], props)
class Hba(BaseResource):
diff --git a/zhmcclient/_nic.py b/zhmcclient/_nic.py
index 4cc8779..720ee14 100644
--- a/zhmcclient/_nic.py
+++ b/zhmcclient/_nic.py
@@ -79,9 +79,6 @@ class NicManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
nics_res = self.partition.get_property('nic-uris')
nic_list = []
if nics_res:
@@ -105,7 +102,9 @@ class NicManager(BaseManager):
Returns:
- string: The resource URI of the new NIC.
+ Nic: The resource object for the new NIC.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -116,7 +115,11 @@ class NicManager(BaseManager):
"""
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/nics', body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Nic(self, props['element-uri'], props)
class Nic(BaseResource):
diff --git a/zhmcclient/_partition.py b/zhmcclient/_partition.py
index 697883e..7c21dd7 100644
--- a/zhmcclient/_partition.py
+++ b/zhmcclient/_partition.py
@@ -110,7 +110,9 @@ class PartitionManager(BaseManager):
Returns:
- string: The resource URI of the new partition.
+ Partition: The resource object for the new partition.
+ The object will have its 'object-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -121,7 +123,11 @@ class PartitionManager(BaseManager):
"""
cpc_uri = self.cpc.get_property('object-uri')
result = self.session.post(cpc_uri + '/partitions', body=properties)
- return result['object-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return Partition(self, props['object-uri'], props)
class Partition(BaseResource):
diff --git a/zhmcclient/_resource.py b/zhmcclient/_resource.py
index 169da83..4554132 100644
--- a/zhmcclient/_resource.py
+++ b/zhmcclient/_resource.py
@@ -110,9 +110,11 @@ class BaseResource(object):
@property
def full_properties(self):
"""
- A boolean indicating whether the resource properties in this object
- are the full set of resource properties, vs. just the short set of
- resource properties as obtained by list functions.
+ A boolean indicating whether or not the resource properties in this
+ object are the full set of resource properties.
+
+ Note that listing resources and creating new resources produces objects
+ that have less than the full set of properties.
"""
return self._full_properties
@@ -170,7 +172,7 @@ class BaseResource(object):
try:
return self._properties[name]
except KeyError:
- if self.full_properties:
+ if self._full_properties:
raise
self.pull_full_properties()
return self._properties[name]
diff --git a/zhmcclient/_virtual_function.py b/zhmcclient/_virtual_function.py
index 02e3127..f4b75d3 100644
--- a/zhmcclient/_virtual_function.py
+++ b/zhmcclient/_virtual_function.py
@@ -78,9 +78,6 @@ class VirtualFunctionManager(BaseManager):
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
- if not self.partition.full_properties:
- self.partition.pull_full_properties()
-
vfs_res = self.partition.get_property('virtual-function-uris')
vf_list = []
if vfs_res:
@@ -105,7 +102,9 @@ class VirtualFunctionManager(BaseManager):
Returns:
- string: The resource URI of the new Virtual Function.
+ VirtualFunction: The resource object for the new virtual function.
+ The object will have its 'element-uri' property set as returned by
+ the HMC, and will also have the input properties set.
Raises:
@@ -117,7 +116,11 @@ class VirtualFunctionManager(BaseManager):
partition_uri = self.partition.get_property('object-uri')
result = self.session.post(partition_uri + '/virtual-functions',
body=properties)
- return result['element-uri']
+ # There should not be overlaps, but just in case there are, the
+ # returned props should overwrite the input props:
+ props = properties.copy()
+ props.update(result)
+ return VirtualFunction(self, props['element-uri'], props)
class VirtualFunction(BaseResource):
| Return newly created resources as a resource object not as a URI
In the current design, any `create()` methods return the new resource with their URI string.
This is inconsistent with the strategy of the zhmcclient project to encapsulate resources as Python objects.
**Proposal:**
Return a new Python object for the resource, in which only the URI property is set. That creates 3 different degrees of "property presence":
* only the URI property (only for resources that can be created by users)
* the short set of properties (as returned by list operations)
* the full set of properties (as returned by get properties operations)
We are already in the current design hiding the degree of property presence, for example, we are caching the properties after pulling them from the HMC, and `get_property()` pulls the properties as needed, so returning such a resource object seems to be consistent with that strategy.
We need to discuss what the `__str__()` and `__repr__()` functions should show (they will probably show the current state, i.e. make the three degrees of property presence visible to users).
We could drop the external availability of the `full_properties` flag, and make that internal only. | zhmcclient/python-zhmcclient | diff --git a/tests/test_adapter.py b/tests/test_adapter.py
index 7a1a4a3..9cfb62b 100644
--- a/tests/test_adapter.py
+++ b/tests/test_adapter.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Adapter
class AdapterTests(unittest.TestCase):
@@ -184,8 +184,11 @@ class AdapterTests(unittest.TestCase):
}
m.post('/api/cpcs/adapter-cpc-id-1/adapters', json=result)
- status = adapter_mgr.create_hipersocket(properties={})
- self.assertEqual(status, result['object-uri'])
+ adapter = adapter_mgr.create_hipersocket(properties={})
+
+ self.assertTrue(isinstance(adapter, Adapter))
+ self.assertEqual(adapter.properties, result)
+ self.assertEqual(adapter.uri, result['object-uri'])
def test_delete(self):
"""
diff --git a/tests/test_hba.py b/tests/test_hba.py
index c9549fe..e4edf34 100644
--- a/tests/test_hba.py
+++ b/tests/test_hba.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Hba
class HbaTests(unittest.TestCase):
@@ -194,8 +194,11 @@ class HbaTests(unittest.TestCase):
}
m.post('/api/partitions/fake-part-id-1/hbas', json=result)
- status = hba_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ hba = hba_mgr.create(properties={})
+
+ self.assertTrue(isinstance(hba, Hba))
+ self.assertEqual(hba.properties, result)
+ self.assertEqual(hba.uri, result['element-uri'])
def test_delete(self):
"""
diff --git a/tests/test_nic.py b/tests/test_nic.py
index b0a77fe..a5d01e7 100644
--- a/tests/test_nic.py
+++ b/tests/test_nic.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Nic
class NicTests(unittest.TestCase):
@@ -194,8 +194,11 @@ class NicTests(unittest.TestCase):
}
m.post('/api/partitions/fake-part-id-1/nics', json=result)
- status = nic_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ nic = nic_mgr.create(properties={})
+
+ self.assertTrue(isinstance(nic, Nic))
+ self.assertEqual(nic.properties, result)
+ self.assertEqual(nic.uri, result['element-uri'])
def test_delete(self):
"""
diff --git a/tests/test_partition.py b/tests/test_partition.py
index 501fc0a..e7e468e 100644
--- a/tests/test_partition.py
+++ b/tests/test_partition.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Partition
class PartitionTests(unittest.TestCase):
@@ -165,8 +165,11 @@ class PartitionTests(unittest.TestCase):
}
m.post('/api/cpcs/fake-cpc-id-1/partitions', json=result)
- status = partition_mgr.create(properties={})
- self.assertEqual(status, result['object-uri'])
+ partition = partition_mgr.create(properties={})
+
+ self.assertTrue(isinstance(partition, Partition))
+ self.assertEqual(partition.properties, result)
+ self.assertEqual(partition.uri, result['object-uri'])
def test_start(self):
"""
diff --git a/tests/test_virtual_function.py b/tests/test_virtual_function.py
index dbe8ef2..27e8140 100644
--- a/tests/test_virtual_function.py
+++ b/tests/test_virtual_function.py
@@ -22,7 +22,7 @@ from __future__ import absolute_import
import unittest
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, VirtualFunction
class VirtualFunctionTests(unittest.TestCase):
@@ -206,8 +206,11 @@ class VirtualFunctionTests(unittest.TestCase):
m.post('/api/partitions/fake-part-id-1/virtual-functions',
json=result)
- status = vf_mgr.create(properties={})
- self.assertEqual(status, result['element-uri'])
+ vf = vf_mgr.create(properties={})
+
+ self.assertTrue(isinstance(vf, VirtualFunction))
+ self.assertEqual(vf.properties, result)
+ self.assertEqual(vf.uri, result['element-uri'])
def test_delete(self):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 6
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"requests-mock",
"testfixtures"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
decorator==5.2.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-mock==1.12.1
six==1.17.0
testfixtures==8.3.0
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/zhmcclient/python-zhmcclient.git@01674398fed64b49ebbf94ac49236cee4a02b3d3#egg=zhmcclient
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- decorator==5.2.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-mock==1.12.1
- six==1.17.0
- testfixtures==8.3.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/test_adapter.py::AdapterTests::test_create_hipersocket",
"tests/test_hba.py::HbaTests::test_create",
"tests/test_nic.py::NicTests::test_create",
"tests/test_partition.py::PartitionTests::test_create",
"tests/test_virtual_function.py::VirtualFunctionTests::test_create"
]
| []
| [
"tests/test_adapter.py::AdapterTests::test_delete",
"tests/test_adapter.py::AdapterTests::test_init",
"tests/test_adapter.py::AdapterTests::test_list_full_ok",
"tests/test_adapter.py::AdapterTests::test_list_short_ok",
"tests/test_adapter.py::AdapterTests::test_update_properties",
"tests/test_hba.py::HbaTests::test_delete",
"tests/test_hba.py::HbaTests::test_init",
"tests/test_hba.py::HbaTests::test_list_full_ok",
"tests/test_hba.py::HbaTests::test_list_short_ok",
"tests/test_hba.py::HbaTests::test_update_properties",
"tests/test_nic.py::NicTests::test_delete",
"tests/test_nic.py::NicTests::test_init",
"tests/test_nic.py::NicTests::test_list_full_ok",
"tests/test_nic.py::NicTests::test_list_short_ok",
"tests/test_nic.py::NicTests::test_update_properties",
"tests/test_partition.py::PartitionTests::test_delete",
"tests/test_partition.py::PartitionTests::test_dump_partition",
"tests/test_partition.py::PartitionTests::test_init",
"tests/test_partition.py::PartitionTests::test_list_full_ok",
"tests/test_partition.py::PartitionTests::test_list_short_ok",
"tests/test_partition.py::PartitionTests::test_mount_iso_image",
"tests/test_partition.py::PartitionTests::test_psw_restart",
"tests/test_partition.py::PartitionTests::test_start",
"tests/test_partition.py::PartitionTests::test_stop",
"tests/test_partition.py::PartitionTests::test_unmount_iso_image",
"tests/test_partition.py::PartitionTests::test_update_properties",
"tests/test_virtual_function.py::VirtualFunctionTests::test_delete",
"tests/test_virtual_function.py::VirtualFunctionTests::test_init",
"tests/test_virtual_function.py::VirtualFunctionTests::test_list_full_ok",
"tests/test_virtual_function.py::VirtualFunctionTests::test_list_short_ok",
"tests/test_virtual_function.py::VirtualFunctionTests::test_update_properties"
]
| []
| Apache License 2.0 | 748 | [
"zhmcclient/_nic.py",
"zhmcclient/_virtual_function.py",
"zhmcclient/_partition.py",
"zhmcclient/_adapter.py",
"zhmcclient/_hba.py",
"zhmcclient/_resource.py"
]
| [
"zhmcclient/_nic.py",
"zhmcclient/_virtual_function.py",
"zhmcclient/_partition.py",
"zhmcclient/_adapter.py",
"zhmcclient/_hba.py",
"zhmcclient/_resource.py"
]
|
clld__clldutils-10 | 9abc525e90db488ca3bcfa1ccc92ec3b64780447 | 2016-09-08 13:18:11 | 9abc525e90db488ca3bcfa1ccc92ec3b64780447 | diff --git a/clldutils/iso_639_3.py b/clldutils/iso_639_3.py
index 5b2a1a5..92e48a1 100644
--- a/clldutils/iso_639_3.py
+++ b/clldutils/iso_639_3.py
@@ -10,7 +10,6 @@ import re
from datetime import date
from collections import defaultdict, OrderedDict
import functools
-from string import ascii_lowercase
from six.moves.urllib.request import urlretrieve, urlopen
@@ -92,10 +91,10 @@ class Code(UnicodeMixin):
def __init__(self, item, tablename, registry):
code = item['Id']
self._change_to = []
- self.retired = False
if tablename == 'Codes':
self._scope = self._scope_map[item['Scope']]
self._type = self._type_map[item['Language_Type']]
+ self.retired = False
elif tablename == 'Retirements':
self._scope = 'Retirement'
self._type = self._rtype_map[item['Ret_Reason']]
@@ -110,12 +109,8 @@ class Code(UnicodeMixin):
self._change_to = [
c for c in self._code_pattern.findall(item['Ret_Remedy'])
if c != code]
- elif tablename == 'Local':
- self._scope = 'Local'
- self._type = 'Special'
else:
raise ValueError(tablename) # pragma: no cover
-
self.code = code
self.name = item['Ref_Name']
self._registry = registry
@@ -139,10 +134,6 @@ class Code(UnicodeMixin):
res.extend(code.change_to)
return res
- @property
- def is_local(self):
- return self._scope == 'Local'
-
@property
def is_macrolanguage(self):
return self._scope == 'Macrolanguage'
@@ -183,10 +174,6 @@ class ISO(OrderedDict):
# been in effect for some time. E.g. lcq has been changed to ppr
# from 2012-02-03 until 2013-01-23 when it was changed back to lcq
self[item['Id']] = Code(item, tablename, self)
- for code in ['q' + x + y
- for x in ascii_lowercase[:ascii_lowercase.index('t') + 1]
- for y in ascii_lowercase]:
- self[code] = Code(dict(Id=code, Ref_Name=None), 'Local', self)
def by_type(self, type_):
return [c for c in self.values() if c._type == type_]
@@ -225,5 +212,4 @@ class ISO(OrderedDict):
@property
def languages(self):
- return [c for c in self.values()
- if not c.is_macrolanguage and not c.is_retired and not c.is_local]
+ return [c for c in self.values() if not c.is_macrolanguage and not c.is_retired]
diff --git a/clldutils/licenses.py b/clldutils/licenses.py
new file mode 100644
index 0000000..530ab62
--- /dev/null
+++ b/clldutils/licenses.py
@@ -0,0 +1,1101 @@
+# coding: utf8
+from __future__ import unicode_literals, print_function, division
+from collections import namedtuple
+
+
+License = namedtuple('License', 'id name url')
+
+_LICENSES = {
+ "Glide": {
+ "name": "3dfx Glide License",
+ "url": "http://www.users.on.net/~triforce/glidexp/COPYING.txt",
+ },
+ "Abstyles": {
+ "name": "Abstyles License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Abstyles",
+ },
+ "AFL-1.1": {
+ "name": "Academic Free License v1.1",
+ "url": "http://opensource.linux-mirror.org/licenses/afl-1.1.txt",
+ },
+ "AFL-1.2": {
+ "name": "Academic Free License v1.2",
+ "url": "http://opensource.linux-mirror.org/licenses/afl-1.2.txt",
+ },
+ "AFL-2.0": {
+ "name": "Academic Free License v2.0",
+ "url": "http://opensource.linux-mirror.org/licenses/afl-2.0.txt",
+ },
+ "AFL-2.1": {
+ "name": "Academic Free License v2.1",
+ "url": "http://opensource.linux-mirror.org/licenses/afl-2.1.txt",
+ },
+ "AFL-3.0": {
+ "name": "Academic Free License v3.0",
+ "url": "http://www.opensource.org/licenses/afl-3.0",
+ },
+ "AMPAS": {
+ "name": "Academy of Motion Picture Arts and Sciences BSD",
+ "url": "https://fedoraproject.org/wiki/Licensing/BSD#AMPASBSD",
+ },
+ "APL-1.0": {
+ "name": "Adaptive Public License 1.0",
+ "url": "http://www.opensource.org/licenses/APL-1.0",
+ },
+ "Adobe-Glyph": {
+ "name": "Adobe Glyph List License",
+ "url": "https://fedoraproject.org/wiki/Licensing/MIT#AdobeGlyph",
+ },
+ "APAFML": {
+ "name": "Adobe Postscript AFM License",
+ "url": "https://fedoraproject.org/wiki/Licensing/AdobePostscriptAFM",
+ },
+ "Adobe-2006": {
+ "name": "Adobe Systems Incorporated Source Code License Agreement",
+ "url": "https://fedoraproject.org/wiki/Licensing/AdobeLicense",
+ },
+ "AGPL-1.0": {
+ "name": "Affero General Public License v1.0",
+ "url": "http://www.affero.org/oagpl.html",
+ },
+ "Afmparse": {
+ "name": "Afmparse License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Afmparse",
+ },
+ "Aladdin": {
+ "name": "Aladdin Free Public License",
+ "url": "http://pages.cs.wisc.edu/~ghost/doc/AFPL/6.01/Public.htm",
+ },
+ "ADSL": {
+ "name": "Amazon Digital Services License",
+ "url": "https://fedoraproject.org/wiki/Licensing/AmazonDigitalServicesLicense",
+ },
+ "AMDPLPA": {
+ "name": "AMD's plpa_map.c License",
+ "url": "https://fedoraproject.org/wiki/Licensing/AMD_plpa_map_License",
+ },
+ "ANTLR-PD": {
+ "name": "ANTLR Software Rights Notice",
+ "url": "http://www.antlr2.org/license.html",
+ },
+ "Apache-1.0": {
+ "name": "Apache License 1.0",
+ "url": "http://www.apache.org/licenses/LICENSE-1.0",
+ },
+ "Apache-1.1": {
+ "name": "Apache License 1.1",
+ "url": "http://apache.org/licenses/LICENSE-1.1",
+ },
+ "Apache-2.0": {
+ "name": "Apache License 2.0",
+ "url": "http://www.apache.org/licenses/LICENSE-2.0",
+ },
+ "AML": {
+ "name": "Apple MIT License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Apple_MIT_License",
+ },
+ "APSL-1.0": {
+ "name": "Apple Public Source License 1.0",
+ "url": "https://fedoraproject.org/wiki/Licensing/Apple_Public_Source_License_1.0",
+ },
+ "APSL-1.2": {
+ "name": "Apple Public Source License 1.2",
+ "url": "http://www.samurajdata.se/opensource/mirror/licenses/apsl.php",
+ },
+ "APSL-2.0": {
+ "name": "Apple Public Source License 2.0",
+ "url": "http://www.opensource.apple.com/license/apsl/",
+ },
+ "Artistic-1.0": {
+ "name": "Artistic License 1.0",
+ "url": "http://opensource.org/licenses/Artistic-1.0",
+ },
+ "Artistic-1.0-Perl": {
+ "name": "Artistic License 1.0 (Perl)",
+ "url": "http://dev.perl.org/licenses/artistic.html",
+ },
+ "Artistic-1.0-cl8": {
+ "name": "Artistic License 1.0 w/clause 8",
+ "url": "http://opensource.org/licenses/Artistic-1.0",
+ },
+ "Artistic-2.0": {
+ "name": "Artistic License 2.0",
+ "url": "http://www.opensource.org/licenses/artistic-license-2.0",
+ },
+ "AAL": {
+ "name": "Attribution Assurance License",
+ "url": "http://www.opensource.org/licenses/attribution",
+ },
+ "Bahyph": {
+ "name": "Bahyph License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Bahyph",
+ },
+ "Barr": {
+ "name": "Barr License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Barr",
+ },
+ "Beerware": {
+ "name": "Beerware License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Beerware",
+ },
+ "BitTorrent-1.1": {
+ "name": "BitTorrent Open Source License v1.1",
+ "url": "http://directory.fsf.org/wiki/License:BitTorrentOSL1.1",
+ },
+ "BSL-1.0": {
+ "name": "Boost Software License 1.0",
+ "url": "http://www.boost.org/LICENSE_1_0.txt",
+ },
+ "Borceux": {
+ "name": "Borceux license",
+ "url": "https://fedoraproject.org/wiki/Licensing/Borceux",
+ },
+ "BSD-2-Clause": {
+ "name": "BSD 2-clause \"Simplified\" License",
+ "url": "http://www.opensource.org/licenses/BSD-2-Clause",
+ },
+ "BSD-2-Clause-FreeBSD": {
+ "name": "BSD 2-clause FreeBSD License",
+ "url": "http://www.freebsd.org/copyright/freebsd-license.html",
+ },
+ "BSD-2-Clause-NetBSD": {
+ "name": "BSD 2-clause NetBSD License",
+ "url": "http://www.netbsd.org/about/redistribution.html#default",
+ },
+ "BSD-3-Clause": {
+ "name": "BSD 3-clause \"New\" or \"Revised\" License",
+ "url": "http://www.opensource.org/licenses/BSD-3-Clause",
+ },
+ "BSD-3-Clause-Clear": {
+ "name": "BSD 3-clause Clear License",
+ "url": "http://labs.metacarta.com/license-explanation.html#license",
+ },
+ "BSD-4-Clause": {
+ "name": "BSD 4-clause \"Original\" or \"Old\" License",
+ "url": "http://directory.fsf.org/wiki/License:BSD_4Clause",
+ },
+ "BSD-Protection": {
+ "name": "BSD Protection License",
+ "url": "https://fedoraproject.org/wiki/Licensing/BSD_Protection_License",
+ },
+ "BSD-3-Clause-Attribution": {
+ "name": "BSD with attribution",
+ "url": "https://fedoraproject.org/wiki/Licensing/BSD_with_Attribution",
+ },
+ "0BSD": {
+ "name": "BSD Zero Clause License",
+ "url": "http://landley.net/toybox/license.html ",
+ },
+ "BSD-4-Clause-UC": {
+ "name": "BSD-4-Clause (University of California-Specific)",
+ "url": "http://www.freebsd.org/copyright/license.html",
+ },
+ "bzip2-1.0.5": {
+ "name": "bzip2 and libbzip2 License v1.0.5",
+ "url": "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html",
+ },
+ "bzip2-1.0.6": {
+ "name": "bzip2 and libbzip2 License v1.0.6",
+ "url": "https://github.com/asimonov-im/bzip2/blob/master/LICENSE",
+ },
+ "Caldera": {
+ "name": "Caldera License",
+ "url": "http://www.lemis.com/grog/UNIX/ancient-source-all.pdf",
+ },
+ "CECILL-1.0": {
+ "name": "CeCILL Free Software License Agreement v1.0",
+ "url": "http://www.cecill.info/licences/Licence_CeCILL_V1-fr.html",
+ },
+ "CECILL-1.1": {
+ "name": "CeCILL Free Software License Agreement v1.1",
+ "url": "http://www.cecill.info/licences/Licence_CeCILL_V1.1-US.html",
+ },
+ "CECILL-2.0": {
+ "name": "CeCILL Free Software License Agreement v2.0",
+ "url": "http://www.cecill.info/licences/Licence_CeCILL_V2-fr.html",
+ },
+ "CECILL-2.1": {
+ "name": "CeCILL Free Software License Agreement v2.1",
+ "url": "http://opensource.org/licenses/CECILL-2.1",
+ },
+ "CECILL-B": {
+ "name": "CeCILL-B Free Software License Agreement",
+ "url": "http://www.cecill.info/licences/Licence_CeCILL-B_V1-fr.html",
+ },
+ "CECILL-C": {
+ "name": "CeCILL-C Free Software License Agreement",
+ "url": "http://www.cecill.info/licences/Licence_CeCILL-C_V1-fr.html",
+ },
+ "ClArtistic": {
+ "name": "Clarified Artistic License",
+ "url": "http://www.ncftp.com/ncftp/doc/LICENSE.txt",
+ },
+ "MIT-CMU": {
+ "name": "CMU License",
+ "url": "https://fedoraproject.org/wiki/Licensing:MIT?rd=Licensing/MIT#CMU_Style",
+ },
+ "CNRI-Jython": {
+ "name": "CNRI Jython License",
+ "url": "http://www.jython.org/license.html",
+ },
+ "CNRI-Python": {
+ "name": "CNRI Python License",
+ "url": "http://www.opensource.org/licenses/CNRI-Python",
+ },
+ "CNRI-Python-GPL-Compatible": {
+ "name": "CNRI Python Open Source GPL Compatible License Agreement",
+ "url": "http://www.python.org/download/releases/1.6.1/download_win/",
+ },
+ "CPOL-1.02": {
+ "name": "Code Project Open License 1.02",
+ "url": "http://www.codeproject.com/info/cpol10.aspx",
+ },
+ "CDDL-1.0": {
+ "name": "Common Development and Distribution License 1.0",
+ "url": "http://www.opensource.org/licenses/cddl1",
+ },
+ "CDDL-1.1": {
+ "name": "Common Development and Distribution License 1.1",
+ "url": "http://glassfish.java.net/public/CDDL+GPL_1_1.html",
+ },
+ "CPAL-1.0": {
+ "name": "Common Public Attribution License 1.0",
+ "url": "http://www.opensource.org/licenses/CPAL-1.0",
+ },
+ "CPL-1.0": {
+ "name": "Common Public License 1.0",
+ "url": "http://opensource.org/licenses/CPL-1.0",
+ },
+ "CATOSL-1.1": {
+ "name": "Computer Associates Trusted Open Source License 1.1",
+ "url": "http://opensource.org/licenses/CATOSL-1.1",
+ },
+ "Condor-1.1": {
+ "name": "Condor Public License v1.1",
+ "url": "http://research.cs.wisc.edu/condor/license.html#condor",
+ },
+ "CC-BY-1.0": {
+ "name": "Creative Commons Attribution 1.0",
+ "url": "https://creativecommons.org/licenses/by/1.0/",
+ },
+ "CC-BY-2.0": {
+ "name": "Creative Commons Attribution 2.0",
+ "url": "https://creativecommons.org/licenses/by/2.0/",
+ },
+ "CC-BY-2.5": {
+ "name": "Creative Commons Attribution 2.5",
+ "url": "https://creativecommons.org/licenses/by/2.5/",
+ },
+ "CC-BY-3.0": {
+ "name": "Creative Commons Attribution 3.0",
+ "url": "https://creativecommons.org/licenses/by/3.0/",
+ },
+ "CC-BY-4.0": {
+ "name": "Creative Commons Attribution 4.0",
+ "url": "https://creativecommons.org/licenses/by/4.0/",
+ },
+ "CC-BY-ND-1.0": {
+ "name": "Creative Commons Attribution No Derivatives 1.0",
+ "url": "https://creativecommons.org/licenses/by-nd/1.0/",
+ },
+ "CC-BY-ND-2.0": {
+ "name": "Creative Commons Attribution No Derivatives 2.0",
+ "url": "https://creativecommons.org/licenses/by-nd/2.0/",
+ },
+ "CC-BY-ND-2.5": {
+ "name": "Creative Commons Attribution No Derivatives 2.5",
+ "url": "https://creativecommons.org/licenses/by-nd/2.5/",
+ },
+ "CC-BY-ND-3.0": {
+ "name": "Creative Commons Attribution No Derivatives 3.0",
+ "url": "https://creativecommons.org/licenses/by-nd/3.0/",
+ },
+ "CC-BY-ND-4.0": {
+ "name": "Creative Commons Attribution No Derivatives 4.0",
+ "url": "https://creativecommons.org/licenses/by-nd/4.0/",
+ },
+ "CC-BY-NC-1.0": {
+ "name": "Creative Commons Attribution Non Commercial 1.0",
+ "url": "https://creativecommons.org/licenses/by-nc/1.0/",
+ },
+ "CC-BY-NC-2.0": {
+ "name": "Creative Commons Attribution Non Commercial 2.0",
+ "url": "https://creativecommons.org/licenses/by-nc/2.0/",
+ },
+ "CC-BY-NC-2.5": {
+ "name": "Creative Commons Attribution Non Commercial 2.5",
+ "url": "https://creativecommons.org/licenses/by-nc/2.5/",
+ },
+ "CC-BY-NC-3.0": {
+ "name": "Creative Commons Attribution Non Commercial 3.0",
+ "url": "https://creativecommons.org/licenses/by-nc/3.0/",
+ },
+ "CC-BY-NC-4.0": {
+ "name": "Creative Commons Attribution Non Commercial 4.0",
+ "url": "https://creativecommons.org/licenses/by-nc/4.0/",
+ },
+ "CC-BY-NC-ND-1.0": {
+ "name": "Creative Commons Attribution Non Commercial No Derivatives 1.0",
+ "url": "https://creativecommons.org/licenses/by-nd-nc/1.0/",
+ },
+ "CC-BY-NC-ND-2.0": {
+ "name": "Creative Commons Attribution Non Commercial No Derivatives 2.0",
+ "url": "https://creativecommons.org/licenses/by-nc-nd/2.0/",
+ },
+ "CC-BY-NC-ND-2.5": {
+ "name": "Creative Commons Attribution Non Commercial No Derivatives 2.5",
+ "url": "https://creativecommons.org/licenses/by-nc-nd/2.5/",
+ },
+ "CC-BY-NC-ND-3.0": {
+ "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0",
+ "url": "https://creativecommons.org/licenses/by-nc-nd/3.0/",
+ },
+ "CC-BY-NC-ND-4.0": {
+ "name": "Creative Commons Attribution Non Commercial No Derivatives 4.0",
+ "url": "https://creativecommons.org/licenses/by-nc-nd/4.0/",
+ },
+ "CC-BY-NC-SA-1.0": {
+ "name": "Creative Commons Attribution Non Commercial Share Alike 1.0",
+ "url": "https://creativecommons.org/licenses/by-nc-sa/1.0/",
+ },
+ "CC-BY-NC-SA-2.0": {
+ "name": "Creative Commons Attribution Non Commercial Share Alike 2.0",
+ "url": "https://creativecommons.org/licenses/by-nc-sa/2.0/",
+ },
+ "CC-BY-NC-SA-2.5": {
+ "name": "Creative Commons Attribution Non Commercial Share Alike 2.5",
+ "url": "https://creativecommons.org/licenses/by-nc-sa/2.5/",
+ },
+ "CC-BY-NC-SA-3.0": {
+ "name": "Creative Commons Attribution Non Commercial Share Alike 3.0",
+ "url": "https://creativecommons.org/licenses/by-nc-sa/3.0/",
+ },
+ "CC-BY-NC-SA-4.0": {
+ "name": "Creative Commons Attribution Non Commercial Share Alike 4.0",
+ "url": "https://creativecommons.org/licenses/by-nc-sa/4.0/",
+ },
+ "CC-BY-SA-1.0": {
+ "name": "Creative Commons Attribution Share Alike 1.0",
+ "url": "https://creativecommons.org/licenses/by-sa/1.0/",
+ },
+ "CC-BY-SA-2.0": {
+ "name": "Creative Commons Attribution Share Alike 2.0",
+ "url": "https://creativecommons.org/licenses/by-sa/2.0/",
+ },
+ "CC-BY-SA-2.5": {
+ "name": "Creative Commons Attribution Share Alike 2.5",
+ "url": "https://creativecommons.org/licenses/by-sa/2.5/",
+ },
+ "CC-BY-SA-3.0": {
+ "name": "Creative Commons Attribution Share Alike 3.0",
+ "url": "https://creativecommons.org/licenses/by-sa/3.0/",
+ },
+ "CC-BY-SA-4.0": {
+ "name": "Creative Commons Attribution Share Alike 4.0",
+ "url": "https://creativecommons.org/licenses/by-sa/4.0/",
+ },
+ "CC0-1.0": {
+ "name": "Creative Commons Zero v1.0 Universal",
+ "url": "https://creativecommons.org/publicdomain/zero/1.0/",
+ },
+ "Crossword": {
+ "name": "Crossword License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Crossword",
+ },
+ "CUA-OPL-1.0": {
+ "name": "CUA Office Public License v1.0",
+ "url": "http://opensource.org/licenses/CUA-OPL-1.0",
+ },
+ "Cube": {
+ "name": "Cube License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Cube",
+ },
+ "D-FSL-1.0": {
+ "name": "Deutsche Freie Software Lizenz",
+ "url": "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/de/D-FSL-1_0_de.txt",
+ },
+ "diffmark": {
+ "name": "diffmark license",
+ "url": "https://fedoraproject.org/wiki/Licensing/diffmark",
+ },
+ "WTFPL": {
+ "name": "Do What The F*ck You Want To Public License",
+ "url": "http://sam.zoy.org/wtfpl/COPYING",
+ },
+ "DOC": {
+ "name": "DOC License",
+ "url": "http://www.cs.wustl.edu/~schmidt/ACE-copying.html",
+ },
+ "Dotseqn": {
+ "name": "Dotseqn License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Dotseqn",
+ },
+ "DSDP": {
+ "name": "DSDP License",
+ "url": "https://fedoraproject.org/wiki/Licensing/DSDP",
+ },
+ "dvipdfm": {
+ "name": "dvipdfm License",
+ "url": "https://fedoraproject.org/wiki/Licensing/dvipdfm",
+ },
+ "EPL-1.0": {
+ "name": "Eclipse Public License 1.0",
+ "url": "http://www.opensource.org/licenses/EPL-1.0",
+ },
+ "ECL-1.0": {
+ "name": "Educational Community License v1.0",
+ "url": "http://opensource.org/licenses/ECL-1.0",
+ },
+ "ECL-2.0": {
+ "name": "Educational Community License v2.0",
+ "url": "http://opensource.org/licenses/ECL-2.0",
+ },
+ "EFL-1.0": {
+ "name": "Eiffel Forum License v1.0",
+ "url": "http://opensource.org/licenses/EFL-1.0",
+ },
+ "EFL-2.0": {
+ "name": "Eiffel Forum License v2.0",
+ "url": "http://opensource.org/licenses/EFL-2.0",
+ },
+ "MIT-advertising": {
+ "name": "Enlightenment License (e16)",
+ "url": "https://fedoraproject.org/wiki/Licensing/MIT_With_Advertising",
+ },
+ "MIT-enna": {
+ "name": "enna License",
+ "url": "https://fedoraproject.org/wiki/Licensing/MIT#enna",
+ },
+ "Entessa": {
+ "name": "Entessa Public License v1.0",
+ "url": "http://opensource.org/licenses/Entessa",
+ },
+ "ErlPL-1.1": {
+ "name": "Erlang Public License v1.1",
+ "url": "http://www.erlang.org/EPLICENSE",
+ },
+ "EUDatagrid": {
+ "name": "EU DataGrid Software License",
+ "url": "http://www.opensource.org/licenses/EUDatagrid",
+ },
+ "EUPL-1.0": {
+ "name": "European Union Public License 1.0",
+ "url": "http://ec.europa.eu/idabc/en/document/7330.html",
+ },
+ "EUPL-1.1": {
+ "name": "European Union Public License 1.1",
+ "url": "http://www.opensource.org/licenses/EUPL-1.1",
+ },
+ "Eurosym": {
+ "name": "Eurosym License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Eurosym",
+ },
+ "Fair": {
+ "name": "Fair License",
+ "url": "http://www.opensource.org/licenses/Fair",
+ },
+ "MIT-feh": {
+ "name": "feh License",
+ "url": "https://fedoraproject.org/wiki/Licensing/MIT#feh",
+ },
+ "Frameworx-1.0": {
+ "name": "Frameworx Open License 1.0",
+ "url": "http://www.opensource.org/licenses/Frameworx-1.0",
+ },
+ "FreeImage": {
+ "name": "FreeImage Public License v1.0",
+ "url": "http://freeimage.sourceforge.net/freeimage-license.txt",
+ },
+ "FTL": {
+ "name": "Freetype Project License",
+ "url": "http://freetype.fis.uniroma2.it/FTL.TXT",
+ },
+ "FSFUL": {
+ "name": "FSF Unlimited License",
+ "url": "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License",
+ },
+ "FSFULLR": {
+ "name": "FSF Unlimited License (with License Retention)",
+ "url": "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License",
+ },
+ "Giftware": {
+ "name": "Giftware License",
+ "url": "http://alleg.sourceforge.net//license.html",
+ },
+ "GL2PS": {
+ "name": "GL2PS License",
+ "url": "http://www.geuz.org/gl2ps/COPYING.GL2PS",
+ },
+ "Glulxe": {
+ "name": "Glulxe License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Glulxe",
+ },
+ "AGPL-3.0": {
+ "name": "GNU Affero General Public License v3.0",
+ "url": "http://www.gnu.org/licenses/agpl.txt",
+ },
+ "GFDL-1.1": {
+ "name": "GNU Free Documentation License v1.1",
+ "url": "http://www.gnu.org/licenses/old-licenses/fdl-1.1.txt",
+ },
+ "GFDL-1.2": {
+ "name": "GNU Free Documentation License v1.2",
+ "url": "http://www.gnu.org/licenses/old-licenses/fdl-1.2.txt",
+ },
+ "GFDL-1.3": {
+ "name": "GNU Free Documentation License v1.3",
+ "url": "http://www.gnu.org/licenses/fdl-1.3.txt",
+ },
+ "GPL-1.0": {
+ "name": "GNU General Public License v1.0 only",
+ "url": "http://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html",
+ },
+ "GPL-2.0": {
+ "name": "GNU General Public License v2.0 only",
+ "url": "http://www.opensource.org/licenses/GPL-2.0",
+ },
+ "GPL-3.0": {
+ "name": "GNU General Public License v3.0 only",
+ "url": "http://www.opensource.org/licenses/GPL-3.0",
+ },
+ "LGPL-2.1": {
+ "name": "GNU Lesser General Public License v2.1 only",
+ "url": "http://www.opensource.org/licenses/LGPL-2.1",
+ },
+ "LGPL-3.0": {
+ "name": "GNU Lesser General Public License v3.0 only",
+ "url": "http://www.opensource.org/licenses/LGPL-3.0",
+ },
+ "LGPL-2.0": {
+ "name": "GNU Library General Public License v2 only",
+ "url": "http://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html",
+ },
+ "gnuplot": {
+ "name": "gnuplot License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Gnuplot",
+ },
+ "gSOAP-1.3b": {
+ "name": "gSOAP Public License v1.3b",
+ "url": "http://www.cs.fsu.edu/~engelen/license.html",
+ },
+ "HaskellReport": {
+ "name": "Haskell Language Report License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Haskell_Language_Report_License",
+ },
+ "HPND": {
+ "name": "Historic Permission Notice and Disclaimer",
+ "url": "http://www.opensource.org/licenses/HPND",
+ },
+ "IPL-1.0": {
+ "name": "IBM Public License v1.0",
+ "url": "http://www.opensource.org/licenses/IPL-1.0",
+ },
+ "ICU": {
+ "name": "ICU License",
+ "url": "http://source.icu-project.org/repos/icu/icu/trunk/license.html",
+ },
+ "ImageMagick": {
+ "name": "ImageMagick License",
+ "url": "http://www.imagemagick.org/script/license.php",
+ },
+ "iMatix": {
+ "name": "iMatix Standard Function Library Agreement",
+ "url": "http://legacy.imatix.com/html/sfl/sfl4.htm#license",
+ },
+ "Imlib2": {
+ "name": "Imlib2 License",
+ "url": "http://trac.enlightenment.org/e/browser/trunk/imlib2/COPYING",
+ },
+ "IJG": {
+ "name": "Independent JPEG Group License",
+ "url": "http://dev.w3.org/cvsweb/Amaya/libjpeg/Attic/README?rev=1.2",
+ },
+ "Intel": {
+ "name": "Intel Open Source License",
+ "url": "http://opensource.org/licenses/Intel",
+ },
+ "IPA": {
+ "name": "IPA Font License",
+ "url": "http://www.opensource.org/licenses/IPA",
+ },
+ "JasPer-2.0": {
+ "name": "JasPer License",
+ "url": "http://www.ece.uvic.ca/~mdadams/jasper/LICENSE",
+ },
+ "JSON": {
+ "name": "JSON License",
+ "url": "http://www.json.org/license.html",
+ },
+ "LPPL-1.3a": {
+ "name": "LaTeX Project Public License 1.3a",
+ "url": "http://www.latex-project.org/lppl/lppl-1-3a.txt",
+ },
+ "LPPL-1.0": {
+ "name": "LaTeX Project Public License v1.0",
+ "url": "http://www.latex-project.org/lppl/lppl-1-0.txt",
+ },
+ "LPPL-1.1": {
+ "name": "LaTeX Project Public License v1.1",
+ "url": "http://www.latex-project.org/lppl/lppl-1-1.txt",
+ },
+ "LPPL-1.2": {
+ "name": "LaTeX Project Public License v1.2",
+ "url": "http://www.latex-project.org/lppl/lppl-1-2.txt",
+ },
+ "LPPL-1.3c": {
+ "name": "LaTeX Project Public License v1.3c",
+ "url": "http://www.opensource.org/licenses/LPPL-1.3c",
+ },
+ "Latex2e": {
+ "name": "Latex2e License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Latex2e",
+ },
+ "BSD-3-Clause-LBNL": {
+ "name": "Lawrence Berkeley National Labs BSD variant license",
+ "url": "https://fedoraproject.org/wiki/Licensing/LBNLBSD",
+ },
+ "Leptonica": {
+ "name": "Leptonica License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Leptonica",
+ },
+ "LGPLLR": {
+ "name": "Lesser General Public License For Linguistic Resources",
+ "url": "http://www-igm.univ-mlv.fr/~unitex/lgpllr.html",
+ },
+ "Libpng": {
+ "name": "libpng License",
+ "url": "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt",
+ },
+ "libtiff": {
+ "name": "libtiff License",
+ "url": "https://fedoraproject.org/wiki/Licensing/libtiff",
+ },
+ "LPL-1.02": {
+ "name": "Lucent Public License v1.02",
+ "url": "http://www.opensource.org/licenses/LPL-1.02",
+ },
+ "LPL-1.0": {
+ "name": "Lucent Public License Version 1.0",
+ "url": "http://opensource.org/licenses/LPL-1.0",
+ },
+ "MakeIndex": {
+ "name": "MakeIndex License",
+ "url": "https://fedoraproject.org/wiki/Licensing/MakeIndex",
+ },
+ "MTLL": {
+ "name": "Matrix Template Library License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Matrix_Template_Library_License",
+ },
+ "MS-PL": {
+ "name": "Microsoft Public License",
+ "url": "http://www.opensource.org/licenses/MS-PL",
+ },
+ "MS-RL": {
+ "name": "Microsoft Reciprocal License",
+ "url": "http://www.opensource.org/licenses/MS-RL",
+ },
+ "MirOS": {
+ "name": "MirOS Licence",
+ "url": "http://www.opensource.org/licenses/MirOS",
+ },
+ "MITNFA": {
+ "name": "MIT +no-false-attribs license",
+ "url": "https://fedoraproject.org/wiki/Licensing/MITNFA",
+ },
+ "MIT": {
+ "name": "MIT License",
+ "url": "http://www.opensource.org/licenses/MIT",
+ },
+ "Motosoto": {
+ "name": "Motosoto License",
+ "url": "http://www.opensource.org/licenses/Motosoto",
+ },
+ "MPL-1.0": {
+ "name": "Mozilla Public License 1.0",
+ "url": "http://www.mozilla.org/MPL/MPL-1.0.html",
+ },
+ "MPL-1.1": {
+ "name": "Mozilla Public License 1.1",
+ "url": "http://www.mozilla.org/MPL/MPL-1.1.html",
+ },
+ "MPL-2.0": {
+ "name": "Mozilla Public License 2.0",
+ "url": "http://www.mozilla.org/MPL/2.0/\nhttp://opensource.org/licenses/MPL-2.0",
+ },
+ "MPL-2.0-no-copyleft-exception": {
+ "name": "Mozilla Public License 2.0 (no copyleft exception)",
+ "url": "http://www.mozilla.org/MPL/2.0/\nhttp://opensource.org/licenses/MPL-2.0",
+ },
+ "mpich2": {
+ "name": "mpich2 License",
+ "url": "https://fedoraproject.org/wiki/Licensing/MIT",
+ },
+ "Multics": {
+ "name": "Multics License",
+ "url": "http://www.opensource.org/licenses/Multics",
+ },
+ "Mup": {
+ "name": "Mup License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Mup",
+ },
+ "NASA-1.3": {
+ "name": "NASA Open Source Agreement 1.3",
+ "url": "http://www.opensource.org/licenses/NASA-1.3",
+ },
+ "Naumen": {
+ "name": "Naumen Public License",
+ "url": "http://www.opensource.org/licenses/Naumen",
+ },
+ "NetCDF": {
+ "name": "NetCDF license",
+ "url": "http://www.unidata.ucar.edu/software/netcdf/copyright.html",
+ },
+ "NGPL": {
+ "name": "Nethack General Public License",
+ "url": "http://www.opensource.org/licenses/NGPL",
+ },
+ "NOSL": {
+ "name": "Netizen Open Source License",
+ "url": "http://bits.netizen.com.au/licenses/NOSL/nosl.txt",
+ },
+ "NPL-1.0": {
+ "name": "Netscape Public License v1.0",
+ "url": "http://www.mozilla.org/MPL/NPL/1.0/",
+ },
+ "NPL-1.1": {
+ "name": "Netscape Public License v1.1",
+ "url": "http://www.mozilla.org/MPL/NPL/1.1/",
+ },
+ "Newsletr": {
+ "name": "Newsletr License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Newsletr",
+ },
+ "NLPL": {
+ "name": "No Limit Public License",
+ "url": "https://fedoraproject.org/wiki/Licensing/NLPL",
+ },
+ "Nokia": {
+ "name": "Nokia Open Source License",
+ "url": "http://www.opensource.org/licenses/nokia",
+ },
+ "NPOSL-3.0": {
+ "name": "Non-Profit Open Software License 3.0",
+ "url": "http://www.opensource.org/licenses/NOSL3.0",
+ },
+ "Noweb": {
+ "name": "Noweb License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Noweb",
+ },
+ "NRL": {
+ "name": "NRL License",
+ "url": "http://web.mit.edu/network/isakmp/nrllicense.html",
+ },
+ "NTP": {
+ "name": "NTP License",
+ "url": "http://www.opensource.org/licenses/NTP",
+ },
+ "Nunit": {
+ "name": "Nunit License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Nunit",
+ },
+ "OCLC-2.0": {
+ "name": "OCLC Research Public License 2.0",
+ "url": "http://www.opensource.org/licenses/OCLC-2.0",
+ },
+ "ODbL-1.0": {
+ "name": "ODC Open Database License v1.0",
+ "url": "http://www.opendatacommons.org/licenses/odbl/1.0/",
+ },
+ "PDDL-1.0": {
+ "name": "ODC Public Domain Dedication & License 1.0",
+ "url": "http://opendatacommons.org/licenses/pddl/1.0/",
+ },
+ "OGTSL": {
+ "name": "Open Group Test Suite License",
+ "url": "http://www.opensource.org/licenses/OGTSL",
+ },
+ "OML": {
+ "name": "Open Market License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Open_Market_License",
+ },
+ "OPL-1.0": {
+ "name": "Open Public License v1.0",
+ "url": "https://fedoraproject.org/wiki/Licensing/Open_Public_License",
+ },
+ "OSL-1.0": {
+ "name": "Open Software License 1.0",
+ "url": "http://opensource.org/licenses/OSL-1.0",
+ },
+ "OSL-1.1": {
+ "name": "Open Software License 1.1",
+ "url": "https://fedoraproject.org/wiki/Licensing/OSL1.1",
+ },
+ "PHP-3.01": {
+ "name": "PHP License v3.01",
+ "url": "http://www.php.net/license/3_01.txt",
+ },
+ "Plexus": {
+ "name": "Plexus Classworlds License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Plexus_Classworlds_License",
+ },
+ "PostgreSQL": {
+ "name": "PostgreSQL License",
+ "url": "http://www.opensource.org/licenses/PostgreSQL",
+ },
+ "psfrag": {
+ "name": "psfrag License",
+ "url": "https://fedoraproject.org/wiki/Licensing/psfrag",
+ },
+ "psutils": {
+ "name": "psutils License",
+ "url": "https://fedoraproject.org/wiki/Licensing/psutils",
+ },
+ "Python-2.0": {
+ "name": "Python License 2.0",
+ "url": "http://www.opensource.org/licenses/Python-2.0",
+ },
+ "QPL-1.0": {
+ "name": "Q Public License 1.0",
+ "url": "http://www.opensource.org/licenses/QPL-1.0",
+ },
+ "Qhull": {
+ "name": "Qhull License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Qhull",
+ },
+ "Rdisc": {
+ "name": "Rdisc License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Rdisc_License",
+ },
+ "RPSL-1.0": {
+ "name": "RealNetworks Public Source License v1.0",
+ "url": "http://www.opensource.org/licenses/RPSL-1.0",
+ },
+ "RPL-1.1": {
+ "name": "Reciprocal Public License 1.1",
+ "url": "http://opensource.org/licenses/RPL-1.1",
+ },
+ "RPL-1.5": {
+ "name": "Reciprocal Public License 1.5",
+ "url": "http://www.opensource.org/licenses/RPL-1.5",
+ },
+ "RHeCos-1.1": {
+ "name": "Red Hat eCos Public License v1.1",
+ "url": "http://ecos.sourceware.org/old-license.html",
+ },
+ "RSCPL": {
+ "name": "Ricoh Source Code Public License",
+ "url": "http://www.opensource.org/licenses/RSCPL",
+ },
+ "RSA-MD": {
+ "name": "RSA Message-Digest License",
+ "url": "http://www.faqs.org/rfcs/rfc1321.html",
+ },
+ "Ruby": {
+ "name": "Ruby License",
+ "url": "http://www.ruby-lang.org/en/LICENSE.txt",
+ },
+ "SAX-PD": {
+ "name": "Sax Public Domain Notice",
+ "url": "http://www.saxproject.org/copying.html",
+ },
+ "Saxpath": {
+ "name": "Saxpath License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Saxpath_License",
+ },
+ "SCEA": {
+ "name": "SCEA Shared Source License",
+ "url": "http://research.scea.com/scea_shared_source_license.html",
+ },
+ "SWL": {
+ "name": "Scheme Widget Library (SWL) Software License Agreement",
+ "url": "https://fedoraproject.org/wiki/Licensing/SWL",
+ },
+ "Sendmail": {
+ "name": "Sendmail License",
+ "url": "http://www.sendmail.com/pdfs/open_source/sendmail_license.pdf",
+ },
+ "SGI-B-1.0": {
+ "name": "SGI Free Software License B v1.0",
+ "url": "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.1.0.html",
+ },
+ "SGI-B-1.1": {
+ "name": "SGI Free Software License B v1.1",
+ "url": "http://oss.sgi.com/projects/FreeB/",
+ },
+ "SGI-B-2.0": {
+ "name": "SGI Free Software License B v2.0",
+ "url": "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.2.0.pdf",
+ },
+ "OFL-1.0": {
+ "name": "SIL Open Font License 1.0",
+ "url": "http://scripts.sil.org/cms/scripts/page.php?item_id=OFL10_web",
+ },
+ "OFL-1.1": {
+ "name": "SIL Open Font License 1.1",
+ "url": "http://www.opensource.org/licenses/OFL-1.1",
+ },
+ "SimPL-2.0": {
+ "name": "Simple Public License 2.0",
+ "url": "http://www.opensource.org/licenses/SimPL-2.0",
+ },
+ "Sleepycat": {
+ "name": "Sleepycat License",
+ "url": "http://www.opensource.org/licenses/Sleepycat",
+ },
+ "SNIA": {
+ "name": "SNIA Public License 1.1",
+ "url": "https://fedoraproject.org/wiki/Licensing/SNIA_Public_License",
+ },
+ "SMLNJ": {
+ "name": "Standard ML of New Jersey License",
+ "url": "http://www.smlnj.org//license.html",
+ },
+ "SugarCRM-1.1.3": {
+ "name": "SugarCRM Public License v1.1.3",
+ "url": "http://www.sugarcrm.com/crm/SPL",
+ },
+ "SISSL": {
+ "name": "Sun Industry Standards Source License v1.1",
+ "url": "http://opensource.org/licenses/SISSL",
+ },
+ "SISSL-1.2": {
+ "name": "Sun Industry Standards Source License v1.2",
+ "url": "http://gridscheduler.sourceforge.net/Gridengine_SISSL_license.html",
+ },
+ "SPL-1.0": {
+ "name": "Sun Public License v1.0",
+ "url": "http://www.opensource.org/licenses/SPL-1.0",
+ },
+ "Watcom-1.0": {
+ "name": "Sybase Open Watcom Public License 1.0",
+ "url": "http://www.opensource.org/licenses/Watcom-1.0",
+ },
+ "TCL": {
+ "name": "TCL/TK License",
+ "url": "https://fedoraproject.org/wiki/Licensing/TCL",
+ },
+ "Unlicense": {
+ "name": "The Unlicense",
+ "url": "http://unlicense.org/",
+ },
+ "TMate": {
+ "name": "TMate Open Source License",
+ "url": "http://svnkit.com/license.html",
+ },
+ "TORQUE-1.1": {
+ "name": "TORQUE v2.5+ Software License v1.1",
+ "url": "https://fedoraproject.org/wiki/Licensing/TORQUEv1.1",
+ },
+ "TOSL": {
+ "name": "Trusster Open Source License",
+ "url": "https://fedoraproject.org/wiki/Licensing/TOSL",
+ },
+ "Unicode-TOU": {
+ "name": "Unicode Terms of Use",
+ "url": "http://www.unicode.org/copyright.html",
+ },
+ "UPL-1.0": {
+ "name": "Universal Permissive License v1.0",
+ "url": "http://opensource.org/licenses/UPL",
+ },
+ "NCSA": {
+ "name": "University of Illinois/NCSA Open Source License",
+ "url": "http://www.opensource.org/licenses/NCSA",
+ },
+ "Vim": {
+ "name": "Vim License",
+ "url": "http://vimdoc.sourceforge.net/htmldoc/uganda.html",
+ },
+ "VOSTROM": {
+ "name": "VOSTROM Public License for Open Source",
+ "url": "https://fedoraproject.org/wiki/Licensing/VOSTROM",
+ },
+ "VSL-1.0": {
+ "name": "Vovida Software License v1.0",
+ "url": "http://www.opensource.org/licenses/VSL-1.0",
+ },
+ "W3C-19980720": {
+ "name": "W3C Software Notice and License (1998-07-20)",
+ "url": "http://www.w3.org/Consortium/Legal/copyright-software-19980720.html",
+ },
+ "W3C": {
+ "name": "W3C Software Notice and License (2002-12-31)",
+ "url": "http://www.opensource.org/licenses/W3C",
+ },
+ "Wsuipa": {
+ "name": "Wsuipa License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Wsuipa",
+ },
+ "Xnet": {
+ "name": "X.Net License",
+ "url": "http://opensource.org/licenses/Xnet",
+ },
+ "X11": {
+ "name": "X11 License",
+ "url": "http://www.xfree86.org/3.3.6/COPYRIGHT2.html#3",
+ },
+ "Xerox": {
+ "name": "Xerox License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Xerox",
+ },
+ "XFree86-1.1": {
+ "name": "XFree86 License 1.1",
+ "url": "http://www.xfree86.org/current/LICENSE4.html",
+ },
+ "xinetd": {
+ "name": "xinetd License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Xinetd_License",
+ },
+ "xpp": {
+ "name": "XPP License",
+ "url": "https://fedoraproject.org/wiki/Licensing/xpp",
+ },
+ "XSkat": {
+ "name": "XSkat License",
+ "url": "https://fedoraproject.org/wiki/Licensing/XSkat_License",
+ },
+ "YPL-1.0": {
+ "name": "Yahoo! Public License v1.0",
+ "url": "http://www.zimbra.com/license/yahoo_public_license_1.0.html",
+ },
+ "YPL-1.1": {
+ "name": "Yahoo! Public License v1.1",
+ "url": "http://www.zimbra.com/license/yahoo_public_license_1.1.html",
+ },
+ "Zed": {
+ "name": "Zed License",
+ "url": "https://fedoraproject.org/wiki/Licensing/Zed",
+ },
+ "Zlib": {
+ "name": "zlib License",
+ "url": "http://www.opensource.org/licenses/Zlib",
+ },
+ "zlib-acknowledgement": {
+ "name": "zlib/libpng License with Acknowledgement",
+ "url": "https://fedoraproject.org/wiki/Licensing/ZlibWithAcknowledgement",
+ },
+ "ZPL-1.1": {
+ "name": "Zope Public License 1.1",
+ "url": "http://old.zope.org/Resources/License/ZPL-1.1",
+ },
+ "ZPL-2.0": {
+ "name": "Zope Public License 2.0",
+ "url": "http://opensource.org/licenses/ZPL-2.0",
+ },
+ "ZPL-2.1": {
+ "name": "Zope Public License 2.1",
+ "url": "http://old.zope.org/Resources/ZPL/",
+ }
+}
+_LICENSES = [License(id_, l['name'], l['url']) for id_, l in _LICENSES.items()]
+
+
+def find(q):
+ for license_ in _LICENSES:
+ if q == license_.id or q == license_.name or q == license_.url:
+ return license_
+ if '://' in q:
+ u1 = license_.url.split('://')[1]
+ u2 = q.split('://')[1]
+ if u1.startswith(u2) or u2.startswith(u1):
+ return license_
| Support license identifaction following SPDX
There should be support for identifying licenses using [SPDX](http://spdx.org/licenses/) identifiers.
Maybe a scraped extract of the SPDX list should be included with clldutils. | clld/clldutils | diff --git a/clldutils/tests/test_licenses.py b/clldutils/tests/test_licenses.py
new file mode 100644
index 0000000..a668fd6
--- /dev/null
+++ b/clldutils/tests/test_licenses.py
@@ -0,0 +1,13 @@
+# coding: utf8
+from __future__ import unicode_literals, print_function, division
+from unittest import TestCase
+
+
+class Tests(TestCase):
+ def test_find(self):
+ from clldutils.licenses import find
+
+ self.assertEqual(
+ find('http://creativecommons.org/licenses/by/4.0').id, 'CC-BY-4.0')
+ self.assertEqual(
+ find('CC-BY-4.0').url, 'https://creativecommons.org/licenses/by/4.0/')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_added_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/clld/clldutils.git@9abc525e90db488ca3bcfa1ccc92ec3b64780447#egg=clldutils
configparser==7.2.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tomli==2.2.1
| name: clldutils
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- configparser==7.2.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/clldutils
| [
"clldutils/tests/test_licenses.py::Tests::test_find"
]
| []
| []
| []
| Apache License 2.0 | 749 | [
"clldutils/iso_639_3.py",
"clldutils/licenses.py"
]
| [
"clldutils/iso_639_3.py",
"clldutils/licenses.py"
]
|
|
zhmcclient__python-zhmcclient-65 | 2ae2e52fa2f8120e85afb119508c818f653f72cd | 2016-09-09 08:02:39 | d274d8a31aead94ad3f87b555ee07fca61946c79 | coveralls:
[](https://coveralls.io/builds/7812489)
Coverage increased (+0.2%) to 88.541% when pulling **f318fb7821b780b3fb20401d803f5f6bebaf9507 on andy/connected-nics-as-objects** into **5b90883d9934257d21ead1cbc548423b5fb682f8 on master**.
andy-maier: Rebased.
coveralls:
[](https://coveralls.io/builds/7857812)
Coverage increased (+0.1%) to 90.496% when pulling **609404d34172fa9aea9e971b49967a1f4ae12c83 on andy/connected-nics-as-objects** into **6fffd1db48f3cf0463bbd95a16838c843125f60c on master**.
coveralls:
[](https://coveralls.io/builds/7862977)
Coverage increased (+0.2%) to 90.564% when pulling **54d6c473093a419b9307520f057cfd6990aad2fe on andy/connected-nics-as-objects** into **6fffd1db48f3cf0463bbd95a16838c843125f60c on master**.
andy-maier: +1 | diff --git a/zhmcclient/_nic.py b/zhmcclient/_nic.py
index 10b65a7..c59be21 100644
--- a/zhmcclient/_nic.py
+++ b/zhmcclient/_nic.py
@@ -124,6 +124,38 @@ class NicManager(BaseManager):
props.update(result)
return Nic(self, props['element-uri'], props)
+ def nic_object(self, nic_id):
+ """
+ Return a minimalistic :class:`~zhmcclient.Nic` object for a Nic in this
+ Partition.
+
+ This method is an internal helper function and is not normally called
+ by users.
+
+ This object will be connected in the Python object tree representing
+ the resources (i.e. it has this Partition as a parent), and will have
+ the following properties set:
+
+ * `element-uri`
+ * `element-id`
+ * `parent`
+ * `class`
+
+ Parameters:
+
+ nic_id (string): `element-id` of the Nic
+
+ Returns:
+
+ :class:`~zhmcclient.Nic`: A Python object representing the Nic.
+ """
+ part_uri = self.parent.uri
+ nic_uri = part_uri + "/nics/" + nic_id
+ return Nic(self, nic_uri, {'element-uri': nic_uri,
+ 'element-id': nic_id,
+ 'parent': part_uri,
+ 'class': 'nic'})
+
class Nic(BaseResource):
"""
diff --git a/zhmcclient/_partition.py b/zhmcclient/_partition.py
index e8a337d..1610aff 100644
--- a/zhmcclient/_partition.py
+++ b/zhmcclient/_partition.py
@@ -136,6 +136,38 @@ class PartitionManager(BaseManager):
props.update(result)
return Partition(self, props['object-uri'], props)
+ def partition_object(self, part_id):
+ """
+ Return a minimalistic :class:`~zhmcclient.Partition` object for a
+ Partition in this CPC.
+
+ This method is an internal helper function and is not normally called
+ by users.
+
+ This object will be connected in the Python object tree representing
+ the resources (i.e. it has this CPC as a parent), and will have the
+ following properties set:
+
+ * `object-uri`
+ * `object-id`
+ * `parent`
+ * `class`
+
+ Parameters:
+
+ part_id (string): `object-id` of the Partition
+
+ Returns:
+
+ :class:`~zhmcclient.Partition`: A Python object representing the
+ Partition.
+ """
+ part_uri = "/api/partitions/" + part_id
+ return Partition(self, part_uri, {'object-uri': part_uri,
+ 'object-id': part_id,
+ 'parent': self.parent.uri,
+ 'class': 'partition'})
+
class Partition(BaseResource):
"""
diff --git a/zhmcclient/_virtual_switch.py b/zhmcclient/_virtual_switch.py
index 3a8a066..1add1b7 100644
--- a/zhmcclient/_virtual_switch.py
+++ b/zhmcclient/_virtual_switch.py
@@ -26,6 +26,8 @@ Virtual Switches only exist in CPCs that are in DPM mode.
from __future__ import absolute_import
+import re
+
from ._manager import BaseManager
from ._resource import BaseResource
@@ -127,11 +129,18 @@ class VirtualSwitch(BaseResource):
def get_connected_vnics(self):
"""
- List the NICs connected to this Virtual Switch.
+ List the :term:`NICs <NIC>` connected to this Virtual Switch.
Returns:
- : A list of NIC URIs.
+ : A list of :term:`Nic` objects. These objects will be connected in
+ the resource tree (i.e. have a parent :term:`Partition` object,
+ etc.) and will have the following properties set:
+
+ * `element-uri`
+ * `element-id`
+ * `parent`
+ * `class`
Raises:
@@ -141,10 +150,24 @@ class VirtualSwitch(BaseResource):
:exc:`~zhmcclient.ConnectionError`
"""
vswitch_uri = self.get_property('object-uri')
- status = self.manager.session.get(
- vswitch_uri +
- '/operations/get-connected-vnics')
- return status['connected-vnic-uris']
+ result = self.manager.session.get(
+ vswitch_uri + '/operations/get-connected-vnics')
+ nic_uris = result['connected-vnic-uris']
+ nic_list = []
+ parts = {} # Key: Partition ID; Value: Partition object
+ for nic_uri in nic_uris:
+ m = re.match(r"^/api/partitions/([^/]+)/nics/([^/]+)/?$", nic_uri)
+ part_id = m.group(1)
+ nic_id = m.group(2)
+ # We remember created Partition objects and reuse them.
+ try:
+ part = parts[part_id]
+ except KeyError:
+ part = self.manager.cpc.partitions.partition_object(part_id)
+ parts[part_id] = part
+ nic = part.nics.nic_object(nic_id)
+ nic_list.append(nic)
+ return nic_list
def update_properties(self, properties):
"""
| get_connected_vnics() returns NICs as URIs
`VirtualSwitch.get_connected_vnics()` returns the NICs that are connected to the virtual switch.
The current design returns these NICs with their URIs. This is inconsistent to other list functions that return resources as their corresponding Python object (`Nic` in this case).
Proposal: Return the connected NICs as a list of `Nic` objects that have just their `object-uri` property set. | zhmcclient/python-zhmcclient | diff --git a/tests/test_nic.py b/tests/test_nic.py
index a5d01e7..a1ba6f5 100644
--- a/tests/test_nic.py
+++ b/tests/test_nic.py
@@ -230,5 +230,24 @@ class NicTests(unittest.TestCase):
status = nic.update_properties(properties={})
self.assertEqual(status, None)
+ def test_nic_object(self):
+ """
+ This tests the `nic_object()` method.
+ """
+ nic_mgr = self.partition.nics
+ nic_id = 'fake-nic-id0711'
+
+ nic = nic_mgr.nic_object(nic_id)
+
+ nic_uri = self.partition.uri + "/nics/" + nic_id
+
+ self.assertTrue(isinstance(nic, Nic))
+ self.assertEqual(nic.uri, nic_uri)
+ self.assertEqual(nic.properties['element-uri'], nic_uri)
+ self.assertEqual(nic.properties['element-id'], nic_id)
+ self.assertEqual(nic.properties['class'], 'nic')
+ self.assertEqual(nic.properties['parent'], self.partition.uri)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/test_partition.py b/tests/test_partition.py
index 0d12b2d..5e488dd 100644
--- a/tests/test_partition.py
+++ b/tests/test_partition.py
@@ -434,5 +434,24 @@ class PartitionTests(unittest.TestCase):
status = partition.unmount_iso_image()
self.assertEqual(status, None)
+ def test_partition_object(self):
+ """
+ This tests the `partition_object()` method.
+ """
+ partition_mgr = self.cpc.partitions
+ partition_id = 'fake-partition-id42'
+
+ partition = partition_mgr.partition_object(partition_id)
+
+ partition_uri = "/api/partitions/" + partition_id
+
+ self.assertTrue(isinstance(partition, Partition))
+ self.assertEqual(partition.uri, partition_uri)
+ self.assertEqual(partition.properties['object-uri'], partition_uri)
+ self.assertEqual(partition.properties['object-id'], partition_id)
+ self.assertEqual(partition.properties['class'], 'partition')
+ self.assertEqual(partition.properties['parent'], self.cpc.uri)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/test_virtual_switch.py b/tests/test_virtual_switch.py
index 5f7a81c..5109b8d 100644
--- a/tests/test_virtual_switch.py
+++ b/tests/test_virtual_switch.py
@@ -20,9 +20,10 @@ Unit tests for _virtual_switch module.
from __future__ import absolute_import
import unittest
+import re
import requests_mock
-from zhmcclient import Session, Client
+from zhmcclient import Session, Client, Nic
class VirtualSwitchTests(unittest.TestCase):
@@ -224,8 +225,20 @@ class VirtualSwitchTests(unittest.TestCase):
"/api/virtual-switches/fake-vswitch-id1/"
"operations/get-connected-vnics",
json=result)
- status = vswitch.get_connected_vnics()
- self.assertEqual(status, result['connected-vnic-uris'])
+
+ nics = vswitch.get_connected_vnics()
+
+ self.assertTrue(isinstance(nics, list))
+ for i, nic in enumerate(nics):
+ self.assertTrue(isinstance(nic, Nic))
+ nic_uri = result['connected-vnic-uris'][i]
+ self.assertEqual(nic.uri, nic_uri)
+ self.assertEqual(nic.properties['element-uri'], nic_uri)
+ m = re.match(r"^/api/partitions/([^/]+)/nics/([^/]+)/?$",
+ nic_uri)
+ nic_id = m.group(2)
+ self.assertEqual(nic.properties['element-id'], nic_id)
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"requests-mock",
"testfixtures"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
decorator==5.2.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-mock==1.12.1
six==1.17.0
testfixtures==8.3.0
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/zhmcclient/python-zhmcclient.git@2ae2e52fa2f8120e85afb119508c818f653f72cd#egg=zhmcclient
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- decorator==5.2.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-mock==1.12.1
- six==1.17.0
- testfixtures==8.3.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/test_nic.py::NicTests::test_nic_object",
"tests/test_partition.py::PartitionTests::test_partition_object",
"tests/test_virtual_switch.py::VirtualSwitchTests::test_get_connected_vnics"
]
| []
| [
"tests/test_nic.py::NicTests::test_create",
"tests/test_nic.py::NicTests::test_delete",
"tests/test_nic.py::NicTests::test_init",
"tests/test_nic.py::NicTests::test_list_full_ok",
"tests/test_nic.py::NicTests::test_list_short_ok",
"tests/test_nic.py::NicTests::test_update_properties",
"tests/test_partition.py::PartitionTests::test_create",
"tests/test_partition.py::PartitionTests::test_delete",
"tests/test_partition.py::PartitionTests::test_dump_partition",
"tests/test_partition.py::PartitionTests::test_init",
"tests/test_partition.py::PartitionTests::test_list_full_ok",
"tests/test_partition.py::PartitionTests::test_list_short_ok",
"tests/test_partition.py::PartitionTests::test_mount_iso_image",
"tests/test_partition.py::PartitionTests::test_psw_restart",
"tests/test_partition.py::PartitionTests::test_start",
"tests/test_partition.py::PartitionTests::test_stop",
"tests/test_partition.py::PartitionTests::test_unmount_iso_image",
"tests/test_partition.py::PartitionTests::test_update_properties",
"tests/test_virtual_switch.py::VirtualSwitchTests::test_init",
"tests/test_virtual_switch.py::VirtualSwitchTests::test_list_full_ok",
"tests/test_virtual_switch.py::VirtualSwitchTests::test_list_short_ok",
"tests/test_virtual_switch.py::VirtualSwitchTests::test_update_properties"
]
| []
| Apache License 2.0 | 750 | [
"zhmcclient/_nic.py",
"zhmcclient/_partition.py",
"zhmcclient/_virtual_switch.py"
]
| [
"zhmcclient/_nic.py",
"zhmcclient/_partition.py",
"zhmcclient/_virtual_switch.py"
]
|
biocommons__biocommons.seqrepo-7 | 6c89eeea058747267db608d50b088df136aecf86 | 2016-09-10 21:31:38 | 6c89eeea058747267db608d50b088df136aecf86 | diff --git a/README.rst b/README.rst
index 0efa0ad..9a20600 100644
--- a/README.rst
+++ b/README.rst
@@ -71,14 +71,15 @@ On Ubuntu 16.04::
$ sudo apt install -y python3-dev gcc zlib1g-dev tabix
$ pip install seqrepo
- $ rsync -HRavP rsync.biocommons.org::seqrepo/20160828 /usr/local/share/seqrepo/
- $ seqrepo -d /usr/local/share/seqrepo/20160828 start-shell
- seqrepo 0.2.3.dev2+neeca95d3ae6e.d20160830
- root directory: /opt/seqrepo/20160828, 7.9 GB
+ $ seqrepo pull -i 20160906
+ $ seqrepo -i 20160906 show-status
+ seqrepo 0.2.3.post3.dev8+nb8298bd62283
+ root directory: /usr/local/share/seqrepo/20160906, 7.9 GB
backends: fastadir (schema 1), seqaliasdb (schema 1)
- sequences: 773511 sequences, 93005806376 residues, 189 files
- aliases: 5572724 aliases, 5473237 current, 9 namespaces, 773511 sequences
+ sequences: 773587 sequences, 93051609959 residues, 192 files
+ aliases: 5579572 aliases, 5480085 current, 26 namespaces, 773587 sequences
+ $ seqrepo -i 20160906 start-shell
In [1]: sr["NC_000001.11"][780000:780020]
Out[1]: 'TGGTGGCACGCGCTTGTAGT'
diff --git a/biocommons/seqrepo/cli.py b/biocommons/seqrepo/cli.py
index a38eaac..02b606b 100644
--- a/biocommons/seqrepo/cli.py
+++ b/biocommons/seqrepo/cli.py
@@ -1,10 +1,13 @@
"""command line interface to a local SeqRepo repository
+SeqRepo is a non-redundant, compressed, journalled, file-based storage
+for biological sequences
+
https://github.com/biocommons/biocommons.seqrepo
-Typical usage is via the `seqrepo` script::
+Try::
- $ seqrepo --help
+ $ seqrepo --help
"""
@@ -20,6 +23,7 @@ import re
import shutil
import stat
import sys
+import subprocess
import tempfile
import bioutils.assemblies
@@ -33,17 +37,23 @@ from .py2compat import commonpath, gzip_open_encoded, makedirs
def parse_arguments():
top_p = argparse.ArgumentParser(
- description=__doc__.split("\n\n")[0], formatter_class=argparse.ArgumentDefaultsHelpFormatter,
- epilog="seqrepo " + __version__ +
- ". See https://github.com/biocommons/biocommons.seqrepo for more information")
- top_p.add_argument("--dir", "-d", help="seqrepo data directory; created by init", required=True)
- top_p.add_argument("--verbose", "-v", action="count", default=0, help="be verbose; multiple accepted")
+ description=__doc__.split("\n\n")[0],
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ epilog="seqrepo " + __version__ + ". See https://github.com/biocommons/biocommons.seqrepo for more information"
+ )
+ top_p.add_argument("--root-directory", "-r", default="/usr/local/share/seqrepo",
+ help="seqrepo root directory")
+ top_p.add_argument("--instance-name", "-i", default="master",
+ help="name of seqrepo instance within root-directory")
+ top_p.add_argument("--verbose", "-v", action="count", default=0,
+ help="be verbose; multiple accepted")
top_p.add_argument("--version", action="version", version=__version__)
subparsers = top_p.add_subparsers(title='subcommands')
# add-assembly-names
- ap = subparsers.add_parser("add-assembly-names", help="add assembly aliases (from bioutils.assemblies) to existing sequences")
+ ap = subparsers.add_parser("add-assembly-names",
+ help="add assembly aliases (from bioutils.assemblies) to existing sequences")
ap.set_defaults(func=add_assembly_names)
# export
@@ -51,7 +61,7 @@ def parse_arguments():
ap.set_defaults(func=export)
# init
- ap = subparsers.add_parser("init", help="initialize bsa directory")
+ ap = subparsers.add_parser("init", help="initialize seqrepo directory")
ap.set_defaults(func=init)
# load
@@ -67,6 +77,24 @@ def parse_arguments():
required=True,
help="namespace name (e.g., ncbi, ensembl, lrg)", )
+ # pull
+ ap = subparsers.add_parser("pull", help="pull incremental update from seqrepo mirror")
+ ap.set_defaults(func=pull)
+ ap.add_argument("--instance-name", "-i", required=False,
+ help="instance name to pull; latest if not specified")
+ ap.add_argument(
+ "--rsync-exe",
+ default="/usr/bin/rsync",
+ help="path to rsync executable")
+ ap.add_argument(
+ "--remote-host",
+ default="dl.biocommons.org",
+ help="rsync server host")
+ ap.add_argument(
+ "--dry-run", "-n",
+ default=False,
+ action="store_true")
+
# show-status
ap = subparsers.add_parser("show-status", help="show seqrepo status")
ap.set_defaults(func=show_status)
@@ -84,7 +112,7 @@ def parse_arguments():
ap.set_defaults(func=start_shell)
# upgrade
- ap = subparsers.add_parser("upgrade", help="upgrade bsa database and directory")
+ ap = subparsers.add_parser("upgrade", help="upgrade seqrepo database and directory")
ap.set_defaults(func=upgrade)
opts = top_p.parse_args()
@@ -94,14 +122,14 @@ def parse_arguments():
def add_assembly_names(opts):
"""add assembly names as aliases to existing sequences
- Specifically, associate aliases like GRCh37:HSCHRUN_RANDOM_CTG42 with (existing) ncbi:NT_167243.1
+ Specifically, associate aliases like GRCh37.p9:1 with (existing)
"""
logger = logging.getLogger(__name__)
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
sr = SeqRepo(opts.dir, writeable=True)
ncbi_alias_map = {r["alias"]: r["seq_id"] for r in sr.aliases.find_aliases(namespace="ncbi", current_only=False)}
namespaces = [r["namespace"] for r in sr.aliases._db.execute("select distinct namespace from seqalias")]
assemblies = bioutils.assemblies.get_assemblies()
- import IPython; IPython.embed() ### TODO: Remove IPython.embed()
assemblies_to_load = sorted([k for k in assemblies if k not in namespaces])
logger.info("{} assemblies to load".format(len(assemblies_to_load)))
for assy_name in tqdm.tqdm(assemblies_to_load, unit="assembly"):
@@ -120,7 +148,7 @@ def add_assembly_names(opts):
namespace=assy_name,
alias=s["name"])
sr.commit()
-
+
def export(opts):
def convert_alias_records_to_ns_dict(records):
@@ -135,6 +163,7 @@ def export(opts):
for i in range(0, len(seq), line_width):
yield seq[i:i + line_width]
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
sr = SeqRepo(opts.dir)
for srec,arecs in sr:
nsad = convert_alias_records_to_ns_dict(arecs)
@@ -145,6 +174,7 @@ def export(opts):
def init(opts):
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
if os.path.exists(opts.dir) and len(os.listdir(opts.dir)) > 0:
raise IOError("{opts.dir} exists and is not empty".format(opts=opts))
sr = SeqRepo(opts.dir, writeable=True) # flake8: noqa
@@ -155,6 +185,7 @@ def load(opts):
disable_bar = logger.getEffectiveLevel() < logging.WARNING
defline_re = re.compile("(?P<namespace>gi|ref)\|(?P<alias>[^|]+)")
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
sr = SeqRepo(opts.dir, writeable=True)
n_seqs_seen = n_seqs_added = n_aliases_added = 0
@@ -193,7 +224,53 @@ def load(opts):
n_aliases_added += n_aa
+def pull(opts):
+ instance_name_re = re.compile('^201\d{5}$') # smells like a datestamp
+ #instance_name_re = re.compile('^[89]\d+$') # debugging
+ def _get_remote_instances(opts):
+ line_re = re.compile(r'd[-rwx]{9}\s+[\d,]+ \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (.+)')
+ lines = subprocess.check_output([opts.rsync_exe, "--no-motd", opts.remote_host + "::seqrepo"]).decode().splitlines()[1:]
+ dirs = (m.group(1) for m in (line_re.match(l) for l in lines) if m)
+ return list(filter(instance_name_re.match, dirs))
+ def _get_local_instances(opts):
+ return list(filter(instance_name_re.match, os.listdir(opts.root_directory)))
+
+ logger = logging.getLogger(__name__)
+
+ remote_instances = _get_remote_instances(opts)
+ if opts.instance_name:
+ instance_name = opts.instance_name
+ if instance_name not in remote_instances:
+ raise KeyError("{}: not in list of remote instance names".format(instance_name))
+ else:
+ instance_name = sorted(remote_instances)[-1]
+ logger.info("most recent seqrepo instance is " + instance_name)
+
+ local_instances = _get_local_instances(opts)
+ if instance_name in local_instances:
+ logger.warn("{}: instance already exists; skipping".format(instance_name))
+ return
+
+ tmp_dir = tempfile.mkdtemp(dir=opts.root_directory, prefix=instance_name + ".")
+ os.rmdir(tmp_dir) # let rsync create it the directory
+
+ cmd = [opts.rsync_exe, "-aHP", "--no-motd"]
+ if local_instances:
+ latest_local_instance = sorted(local_instances)[-1]
+ cmd += ["--link-dest=" + os.path.join(opts.root_directory, latest_local_instance) + "/"]
+ cmd += ["{h}::seqrepo/{i}/".format(h=opts.remote_host, i=instance_name),
+ tmp_dir]
+
+ logger.debug("Running: " + " ".join(cmd))
+ if not opts.dry_run:
+ subprocess.check_call(cmd)
+ dst_dir = os.path.join(opts.root_directory, instance_name)
+ os.rename(tmp_dir, dst_dir)
+ logger.info("{}: successfully updated ({})".format(instance_name, dst_dir))
+
+
def show_status(opts):
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
tot_size = sum(os.path.getsize(os.path.join(dirpath,filename))
for dirpath, dirnames, filenames in os.walk(opts.dir)
for filename in filenames)
@@ -216,17 +293,14 @@ def snapshot(opts):
"""
logger = logging.getLogger(__name__)
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
- src_dir = os.path.realpath(opts.dir)
dst_dir = opts.destination_directory
-
- if dst_dir.startswith("/"):
- # use as-is
- pass
- else:
+ if not dst_dir.startswith("/"):
# interpret dst_dir as relative to parent dir of opts.dir
- dst_dir = os.path.join(src_dir, '..', dst_dir)
+ dst_dir = os.path.join(opts.root_directory, dst_dir)
+ src_dir = os.path.realpath(opts.dir)
dst_dir = os.path.realpath(dst_dir)
if commonpath([src_dir, dst_dir]).startswith(src_dir):
@@ -285,12 +359,13 @@ def snapshot(opts):
def start_shell(opts):
- sr = show_status(opts)
+ #sr = show_status(opts)
import IPython
- IPython.embed(display_banner=False)
-
+ IPython.embed(header="seqrepo " + __version__ +
+ "\nhttps://github.com/biocommons/biocommons.seqrepo/")
def upgrade(opts):
+ opts.dir = os.path.join(opts.root_directory, opts.instance_name)
sr = SeqRepo(opts.dir, writeable=True)
print("upgraded to schema version {}".format(sr.seqinfo.schema_version()))
diff --git a/doc/cli.rst b/doc/cli.rst
index 18b49d7..a65494f 100644
--- a/doc/cli.rst
+++ b/doc/cli.rst
@@ -5,10 +5,10 @@ seqrepo includes a command line interface for loading, fetching, and exporting s
This documentation assumes that the seqrepo base directory is::
- SEQREPO=/usr/local/share/seqrepo
+ SEQREPO_ROOT=/usr/local/share/seqrepo
-Current convention is to add sequences to `$SEQREPO/master`, then
-snapshot this to a dated directory like `$SEQREPO/20160828`. (This
+Current convention is to add sequences to `$SEQREPO_ROOT/master`, then
+snapshot this to a dated directory like `$SEQREPO_ROOT/20160828`. (This
convention is conceptually similar to source code development on a
master branch with tags.)
diff --git a/doc/mirror.rst b/doc/mirror.rst
index d74f50b..b666407 100644
--- a/doc/mirror.rst
+++ b/doc/mirror.rst
@@ -2,11 +2,11 @@ Fetching existing sequence repositories
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
A public instance of seqrepo with dated snapshots is available at on
-seqrepo.biocommons.org.
+dl.biocommons.org.
You can list available snapshots like so::
- $ rsync rsync.biocommons.org::seqrepo
+ $ rsync dl.biocommons.org::seqrepo
This is the rsync service for tools and data from biocommons.org
This service is hosted by Invitae (https://invitae.com/).
@@ -17,7 +17,7 @@ You can list available snapshots like so::
You may mirror the entire seqrepo archive or a specific snapshot, as
shown below::
- $ rsync -HavP rsync.biocommons.org::seqrepo/20160828/ /tmp/seqrepo/20160828/
+ $ rsync -HavP dl.biocommons.org::seqrepo/20160828/ /tmp/seqrepo/20160828/
This is the rsync service for tools and data from biocommons.org
This service is hosted by Invitae (https://invitae.com/).
@@ -31,7 +31,7 @@ shown below::
If you have a previous snapshot, you should invoke rsync like this in
order to hard link unchanged files::
- $ rsync -HavP --link-dest=/tmp/seqrepo/20160827/ rsync.biocommons.org::seqrepo/20160828/ /tmp/seqrepo/20160828/
+ $ rsync -HavP --link-dest=/tmp/seqrepo/20160827/ dl.biocommons.org::seqrepo/20160828/ /tmp/seqrepo/20160828/
If seqrepo is already installed, you may check the repo status with::
diff --git a/sbin/seqrepo-push b/sbin/seqrepo-push
new file mode 100755
index 0000000..7e86ac2
--- /dev/null
+++ b/sbin/seqrepo-push
@@ -0,0 +1,28 @@
+#!/bin/bash
+# push a seqrepo instance to an rsync server
+# https://github.com/biocommons/biocommons.seqrepo/
+# Used by Reece to push new instances -- likely not relevant for anyone else
+
+# minion is an ssh alias
+host=minion
+
+# seqrepo root directory
+seqrepo_root=/usr/local/share/seqrepo
+
+if [ "$#" != "2" ]; then
+ echo "Usage: $0 <previous_instance> <push_instance>" 1>&2
+ exit 1
+fi
+
+prev_instance="$1"; shift
+push_instance="$1"; shift
+
+echo "prev_instance=${prev_instance}"
+echo "push_instance=${push_instance}"
+
+(
+ set -ex
+ cd $seqrepo_root
+ rsync -avHP "$push_instance/" --link-dest="../$prev_instance/" minion:"dl.biocommons.org/seqrepo/$push_instance.tmp/"
+ ssh minion /bin/mv "dl.biocommons.org/seqrepo/$push_instance.tmp/" "dl.biocommons.org/seqrepo/$push_instance/"
+)
| simplify pulling new releases
Mirroring seqrepo requires something like
rsync -HRavP rsync.biocommons.org::seqrepo/2016082[78] /tmp/seqrepo/
or
rsync -HavP --link-dest=/tmp/seqrepo/20160827/ rsync.biocommons.org::seqrepo/20160828/ /tmp/seqrepo/20160828/
Use `seqrepo` cli to simplify this process. This will likely force a rethink about the distinction between seqrepo **root** (*e.g.,* `/usr/local/share/seqrepo`) and seqrepo **dir** (`/usr/local/share/seqrepo/master`).
| biocommons/biocommons.seqrepo | diff --git a/tests/test_cli.py b/tests/test_cli.py
index b94bb7e..87b09b3 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -15,9 +15,10 @@ def opts():
test_data_dir = os.path.join(test_dir, 'data')
opts = MockOpts()
- opts.dir = os.path.join(tempfile.mkdtemp(prefix="seqrepo_pytest_"), "seqrepo")
+ opts.root_directory = os.path.join(tempfile.mkdtemp(prefix="seqrepo_pytest_"), "seqrepo")
opts.fasta_files = [os.path.join(test_data_dir, "sequences.fa.gz")]
opts.namespace = "test"
+ opts.instance_name = "test"
opts.verbose = 0
return opts
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev gcc zlib1g-dev tabix"
],
"python": "3.9",
"reqs_path": [
"etc/install.reqs"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asttokens==3.0.0
-e git+https://github.com/biocommons/biocommons.seqrepo.git@6c89eeea058747267db608d50b088df136aecf86#egg=biocommons.seqrepo
biopython==1.85
bioutils==0.2.0a1
coverage==7.8.0
decorator==5.2.1
exceptiongroup==1.2.2
executing==2.2.0
importlib_metadata==8.6.1
iniconfig==2.1.0
ipython==8.18.1
jedi==0.19.2
matplotlib-inline==0.1.7
numpy==2.0.2
packaging==24.2
parso==0.8.4
pexpect==4.9.0
pluggy==1.5.0
prompt_toolkit==3.0.50
ptyprocess==0.7.0
pure_eval==0.2.3
Pygments==2.19.1
pysam==0.23.0
pytest==8.3.5
pytest-cov==6.0.0
six==1.17.0
sqlparse==0.5.3
stack-data==0.6.3
tabulate==0.9.0
tomli==2.2.1
tqdm==4.67.1
traitlets==5.14.3
typing_extensions==4.13.0
wcwidth==0.2.13
yoyo-migrations==9.0.0
zipp==3.21.0
| name: biocommons.seqrepo
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asttokens==3.0.0
- biopython==1.85
- bioutils==0.2.0a1
- coverage==7.8.0
- decorator==5.2.1
- exceptiongroup==1.2.2
- executing==2.2.0
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipython==8.18.1
- jedi==0.19.2
- matplotlib-inline==0.1.7
- numpy==2.0.2
- packaging==24.2
- parso==0.8.4
- pexpect==4.9.0
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pygments==2.19.1
- pysam==0.23.0
- pytest==8.3.5
- pytest-cov==6.0.0
- six==1.17.0
- sqlparse==0.5.3
- stack-data==0.6.3
- tabulate==0.9.0
- tomli==2.2.1
- tqdm==4.67.1
- traitlets==5.14.3
- typing-extensions==4.13.0
- wcwidth==0.2.13
- yoyo-migrations==9.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/biocommons.seqrepo
| [
"tests/test_cli.py::test_00_init"
]
| [
"tests/test_cli.py::test_20_load"
]
| []
| []
| Apache License 2.0 | 751 | [
"README.rst",
"sbin/seqrepo-push",
"doc/cli.rst",
"doc/mirror.rst",
"biocommons/seqrepo/cli.py"
]
| [
"README.rst",
"sbin/seqrepo-push",
"doc/cli.rst",
"doc/mirror.rst",
"biocommons/seqrepo/cli.py"
]
|
|
zalando-stups__senza-349 | e0331771ea0cc64d3ba5896f31d954f832a82ba9 | 2016-09-12 07:22:12 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/ec2.py b/senza/manaus/ec2.py
index 9ea2600..6dee960 100644
--- a/senza/manaus/ec2.py
+++ b/senza/manaus/ec2.py
@@ -1,5 +1,5 @@
from collections import OrderedDict
-from typing import Dict, List, Iterator
+from typing import Dict, List, Iterator, Optional
import boto3
@@ -16,9 +16,10 @@ class EC2VPC:
def __init__(self,
vpc_id: str,
is_default: bool,
- tags: List[Dict[str, str]]):
+ tags: Optional[List[Dict[str, str]]]):
self.vpc_id = vpc_id
self.is_default = is_default
+ tags = tags or [] # type: List[Dict[str, str]]
self.tags = OrderedDict([(t['Key'], t['Value']) for t in tags]) # type: Dict[str, str]
self.name = self.tags.get('Name', self.vpc_id)
| Better error message for "create" and VPC tags
When trying to create a stack with a VPC that has no tags the user gets the following message:
```
senza create deploy-definition.yaml 1 0.1
Generating Cloud Formation template.. EXCEPTION OCCURRED: 'NoneType' object is not iterable
Unknown Error: 'NoneType' object is not iterable.
Please create an issue with the content of /var/folders/yd/p61l98fn2g9fffwgjs819gr1sprr6d/T/senza-traceback-xgrqlxbj
```
In /var/folders/yd/p61l98fn2g9fffwgjs819gr1sprr6d/T/senza-traceback-xgrqlxbj:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/error_handling.py", line 76, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 663, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 746, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 242, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/site-packages/senza/components/stups_auto_configuration.py", line 31, in component_stups_auto_configuration
vpc_id = configuration.get('VpcId', account_info.VpcID)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 329, in VpcID
vpc = ec2.get_default_vpc()
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 71, in get_default_vpc
return EC2VPC.from_boto_vpc(vpc)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 41, in from_boto_vpc
return cls(vpc.vpc_id, vpc.is_default, vpc.tags)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/ec2.py", line 22, in __init__
self.tags = OrderedDict([(t['Key'], t['Value']) for t in tags]) # type: Dict[str, str]
TypeError: 'NoneType' object is not iterable
```
The error message should be more descriptive. | zalando-stups/senza | diff --git a/tests/test_manaus/test_ec2.py b/tests/test_manaus/test_ec2.py
index 36f1588..4dd7ae6 100644
--- a/tests/test_manaus/test_ec2.py
+++ b/tests/test_manaus/test_ec2.py
@@ -37,6 +37,11 @@ def test_get_default_vpc(monkeypatch):
mock_vpc3.is_default = False
mock_vpc3.tags = []
+ mock_vpc4 = MagicMock()
+ mock_vpc4.vpc_id = 'vpc-id4'
+ mock_vpc4.is_default = True
+ mock_vpc4.tags = None
+
m_resource = MagicMock()
m_resource.return_value = m_resource
monkeypatch.setattr('boto3.resource', m_resource)
@@ -59,11 +64,16 @@ def test_get_default_vpc(monkeypatch):
ec2.get_default_vpc()
assert str(exc_info.value) == "Can't find any VPC!"
- # no vpcs
+ # multiple vpcs
m_resource.vpcs.all.return_value = [mock_vpc2, mock_vpc3]
with pytest.raises(VPCError) as exc_info:
ec2.get_default_vpc()
+ # no tags in vpc return default vpc
+ m_resource.vpcs.all.return_value = [mock_vpc4, mock_vpc2]
+ vpc3 = ec2.get_default_vpc()
+ assert vpc3.vpc_id == 'vpc-id4'
+
assert str(exc_info.value) == ("Multiple VPCs are only supported if one "
"VPC is the default VPC (IsDefault=true)!")
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@e0331771ea0cc64d3ba5896f31d954f832a82ba9#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_ec2.py::test_get_default_vpc"
]
| []
| [
"tests/test_manaus/test_ec2.py::test_from_boto_vpc",
"tests/test_manaus/test_ec2.py::test_get_all_vpc"
]
| []
| Apache License 2.0 | 752 | [
"senza/manaus/ec2.py"
]
| [
"senza/manaus/ec2.py"
]
|
|
simphony__simphony-remote-225 | 0b766c7d2243945b67e542f81a313dda12ee6d52 | 2016-09-13 17:12:59 | 849750876813d8394ccda043b854f494b19372e1 | diff --git a/remoteappmanager/restresources/application.py b/remoteappmanager/restresources/application.py
index 78ebec9..72de8fe 100644
--- a/remoteappmanager/restresources/application.py
+++ b/remoteappmanager/restresources/application.py
@@ -60,4 +60,11 @@ class Application(Resource):
container, if active, as values."""
apps = self.application.db.get_apps_for_user(self.current_user.account)
- return [mapping_id for mapping_id, _, _ in apps]
+ container_manager = self.application.container_manager
+
+ result = []
+ for mapping_id, app, policy in apps:
+ if (yield container_manager.image(app.image)) is not None:
+ result.append(mapping_id)
+
+ return result
| Investigate why the REST layer returns an application that is no longer present
Related to #221, the root issue is that the applications list is returning an entry that is not available.
While PR #223 fixes the issue client side, the entry should not be there to begin with in the initial response.
| simphony/simphony-remote | diff --git a/remoteappmanager/restresources/tests/test_application.py b/remoteappmanager/restresources/tests/test_application.py
index 971074c..daabb5b 100644
--- a/remoteappmanager/restresources/tests/test_application.py
+++ b/remoteappmanager/restresources/tests/test_application.py
@@ -30,6 +30,7 @@ class TestApplication(AsyncHTTPTestCase):
application_mock_2 = Mock()
application_mock_2.image = "hello2"
+
app.db.get_apps_for_user = Mock(return_value=[
("one", application_mock_1, Mock()),
("two", application_mock_2, Mock()),
@@ -56,6 +57,16 @@ class TestApplication(AsyncHTTPTestCase):
self.assertEqual(escape.json_decode(res.body),
{"items": ["one", "two"]})
+ # Check if nothing is returned if no images are present
+ self._app.container_manager.image = mock_coro_factory(
+ return_value=None)
+
+ res = self.fetch("/api/v1/applications/")
+
+ self.assertEqual(res.code, httpstatus.OK)
+ self.assertEqual(escape.json_decode(res.body),
+ {"items": []})
+
def test_retrieve(self):
def prepare_side_effect(*args, **kwargs):
user = Mock()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"responses",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": [
"test_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.7.7
async-generator==1.10
attrs==22.2.0
certifi==2021.5.30
certipy==0.1.3
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
coverage==6.2
cryptography==40.0.2
decorator==5.1.1
docker-py==1.10.6
docker-pycreds==0.4.0
entrypoints==0.4
escapism==1.0.1
greenlet==2.0.2
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
ipython-genutils==0.2.0
Jinja2==3.0.3
jsonschema==3.2.0
jupyter-client==7.1.2
jupyter-core==4.9.2
jupyter-telemetry==0.1.0
jupyterhub==2.3.1
Mako==1.1.6
MarkupSafe==2.0.1
mock==5.2.0
nest-asyncio==1.6.0
oauthlib==3.2.2
packaging==21.3
pamela==1.2.0
pluggy==1.0.0
prometheus-client==0.17.1
py==1.11.0
pycparser==2.21
pyOpenSSL==23.2.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
python-json-logger==2.0.7
pyzmq==25.1.2
-e git+https://github.com/simphony/simphony-remote.git@0b766c7d2243945b67e542f81a313dda12ee6d52#egg=remoteappmanager
requests==2.27.1
responses==0.17.0
ruamel.yaml==0.18.3
ruamel.yaml.clib==0.2.8
six==1.17.0
SQLAlchemy==1.4.54
tabulate==0.8.10
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
websocket-client==1.3.1
zipp==3.6.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.7.7
- async-generator==1.10
- attrs==22.2.0
- certipy==0.1.3
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- coverage==6.2
- cryptography==40.0.2
- decorator==5.1.1
- docker-py==1.10.6
- docker-pycreds==0.4.0
- entrypoints==0.4
- escapism==1.0.1
- greenlet==2.0.2
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipython-genutils==0.2.0
- jinja2==3.0.3
- jsonschema==3.2.0
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- jupyter-telemetry==0.1.0
- jupyterhub==2.3.1
- mako==1.1.6
- markupsafe==2.0.1
- mock==5.2.0
- nest-asyncio==1.6.0
- oauthlib==3.2.2
- packaging==21.3
- pamela==1.2.0
- pluggy==1.0.0
- prometheus-client==0.17.1
- py==1.11.0
- pycparser==2.21
- pyopenssl==23.2.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- python-json-logger==2.0.7
- pyzmq==25.1.2
- requests==2.27.1
- responses==0.17.0
- ruamel-yaml==0.18.3
- ruamel-yaml-clib==0.2.8
- six==1.17.0
- sqlalchemy==1.4.54
- tabulate==0.8.10
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- websocket-client==1.3.1
- zipp==3.6.0
prefix: /opt/conda/envs/simphony-remote
| [
"remoteappmanager/restresources/tests/test_application.py::TestApplication::test_items"
]
| []
| [
"remoteappmanager/restresources/tests/test_application.py::TestApplication::test_retrieve"
]
| []
| BSD 3-Clause "New" or "Revised" License | 753 | [
"remoteappmanager/restresources/application.py"
]
| [
"remoteappmanager/restresources/application.py"
]
|
|
F5Networks__f5-common-python-688 | 8c50cfada0d0101ee34c58af44ed38776818c0f9 | 2016-09-13 19:49:06 | 6a131615cfc5c17840f4f33c803365b4075646cd | diff --git a/f5/bigip/tm/ltm/__init__.py b/f5/bigip/tm/ltm/__init__.py
index 240a424..3307a7a 100644
--- a/f5/bigip/tm/ltm/__init__.py
+++ b/f5/bigip/tm/ltm/__init__.py
@@ -29,6 +29,7 @@ REST Kind
from f5.bigip.resource import OrganizingCollection
+from f5.bigip.tm.ltm.auth import Auth
from f5.bigip.tm.ltm.data_group import Data_Group
from f5.bigip.tm.ltm.ifile import Ifiles
from f5.bigip.tm.ltm.monitor import Monitor
@@ -52,6 +53,7 @@ class Ltm(OrganizingCollection):
def __init__(self, tm):
super(Ltm, self).__init__(tm)
self._meta_data['allowed_lazy_attributes'] = [
+ Auth,
Data_Group,
Ifiles,
Monitor,
diff --git a/f5/bigip/tm/ltm/auth.py b/f5/bigip/tm/ltm/auth.py
new file mode 100644
index 0000000..b764185
--- /dev/null
+++ b/f5/bigip/tm/ltm/auth.py
@@ -0,0 +1,259 @@
+# coding=utf-8
+#
+# Copyright 2014-2016 F5 Networks Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""BIG-IP® LTM auth submodule.
+
+REST URI
+ ``http://localhost/mgmt/tm/ltm/auth/``
+
+GUI Path
+ ``Local Traffic --> Profiles --> Authentication``
+
+REST Kind
+ ``tm:ltm:auth:*``
+"""
+from f5.bigip.mixins import UnsupportedMethod
+from f5.bigip.resource import Collection
+from f5.bigip.resource import OrganizingCollection
+from f5.bigip.resource import Resource
+
+
+class Auth(OrganizingCollection):
+ """BIG-IP® LTM Authentication organizing collection."""
+ def __init__(self, ltm):
+ super(Auth, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [
+ Crldp_Servers,
+ Kerberos_Delegations,
+ Ldaps,
+ Ocsp_Responders,
+ Profiles,
+ Radius_s,
+ Radius_Servers,
+ Ssl_Cc_Ldaps,
+ Ssl_Crldps,
+ Ssl_Ocsps,
+ Tacacs_s
+ ]
+
+
+class Crldp_Servers(Collection):
+ """BIG-IP® LTM Auth Crldp Server collection"""
+ def __init__(self, ltm):
+ super(Crldp_Servers, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Crldp_Server]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:crldp-server:crldp-serverstate': Crldp_Server}
+
+
+class Crldp_Server(Resource):
+ def __init__(self, crldp_servers):
+ """BIG-IP® LTM Auth Crldp Server resource"""
+ super(Crldp_Server, self).__init__(crldp_servers)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:crldp-server:crldp-serverstate'
+ self._meta_data['required_creation_parameters'].update(('host',))
+
+
+class Kerberos_Delegations(Collection):
+ """BIG-IP® LTM Auth Kerberos Delegation collection"""
+ def __init__(self, ltm):
+ super(Kerberos_Delegations, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Kerberos_Delegation]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:kerberos-delegation:kerberos-delegationstate':
+ Kerberos_Delegation}
+
+
+class Kerberos_Delegation(Resource):
+ """BIG-IP® LTM Auth Kerberos Delegation resource"""
+ def __init__(self, kerberos_delegations):
+ super(Kerberos_Delegation, self).__init__(kerberos_delegations)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:kerberos-delegation:kerberos-delegationstate'
+ self._meta_data['required_creation_parameters'].update(
+ ('serverPrincipal', 'clientPrincipal',))
+
+
+class Ldaps(Collection):
+ """BIG-IP® LTM Auth Ldap collection"""
+ def __init__(self, ltm):
+ super(Ldaps, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Ldap]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:ldap:ldapstate': Ldap}
+
+
+class Ldap(Resource):
+ """BIG-IP® LTM Auth Ldap resource"""
+ def __init__(self, ldaps):
+ super(Ldap, self).__init__(ldaps)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:ldap:ldapstate'
+ self._meta_data['required_creation_parameters'].update(('servers',))
+
+
+class Ocsp_Responders(Collection):
+ """BIG-IP® LTM Auth Ocsp Responder collection"""
+ def __init__(self, ltm):
+ super(Ocsp_Responders, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Ocsp_Responder]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:ocsp-responder:ocsp-responderstate': Ocsp_Responder}
+
+
+class Ocsp_Responder(Resource):
+ """BIG-IP® LTM Auth Ocsp Responder resource"""
+ def __init__(self, ocsp_responders):
+ super(Ocsp_Responder, self).__init__(ocsp_responders)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:ocsp-responder:ocsp-responderstate'
+
+
+class Profiles(Collection):
+ """BIG-IP® LTM Auth Profile collection"""
+ def __init__(self, ltm):
+ super(Profiles, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Profile]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:profile:profilestate': Profile}
+
+
+class Profile(Resource):
+ """BIG-IP® LTM Auth Profile resource"""
+ def __init__(self, profiles):
+ super(Profile, self).__init__(profiles)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:profile:profilestate'
+ self._meta_data['required_creation_parameters'].update(
+ ('defaultsFrom', 'configuration',))
+
+ def update(self, **kwargs):
+ '''Update is not supported for LTM Auth Profiles
+
+ :raises: UnsupportedOperation
+ '''
+ raise UnsupportedMethod(
+ "%s does not support the modify method" % self.__class__.__name__
+ )
+
+
+class Radius_s(Collection):
+ """BIG-IP® LTM Auth Radius collection"""
+ def __init__(self, ltm):
+ super(Radius_s, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Radius]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:radius:radiusstate': Radius}
+
+
+class Radius(Resource):
+ """BIG-IP® LTM Auth Radius resource"""
+ def __init__(self, radius_s):
+ super(Radius, self).__init__(radius_s)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:radius:radiusstate'
+
+
+class Radius_Servers(Collection):
+ """BIG-IP® LTM Auth Radius Server collection"""
+ def __init__(self, ltm):
+ super(Radius_Servers, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Radius_Server]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:radius-server:radius-serverstate': Radius_Server}
+
+
+class Radius_Server(Resource):
+ """BIG-IP® LTM Auth Radius Server resource"""
+ def __init__(self, radius_server_s):
+ super(Radius_Server, self).__init__(radius_server_s)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:radius-server:radius-serverstate'
+ self._meta_data['required_creation_parameters'].update(
+ ('secret', 'server',))
+
+
+class Ssl_Cc_Ldaps(Collection):
+ """BIG-IP® LTM Auth SSL CC LDAP collection"""
+ def __init__(self, ltm):
+ super(Ssl_Cc_Ldaps, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Ssl_Cc_Ldap]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:ssl-cc-ldap:ssl-cc-ldapstate': Ssl_Cc_Ldap}
+
+
+class Ssl_Cc_Ldap(Resource):
+ """BIG-IP® LTM Auth SSL CC LDAP resource"""
+ def __init__(self, ssl_cc_ldaps):
+ super(Ssl_Cc_Ldap, self).__init__(ssl_cc_ldaps)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:ssl-cc-ldap:ssl-cc-ldapstate'
+ self._meta_data['required_creation_parameters'].update(
+ ('servers', 'userKey',))
+
+
+class Ssl_Crldps(Collection):
+ """BIG-IP® LTM Auth SSL CLRDP collection"""
+ def __init__(self, ltm):
+ super(Ssl_Crldps, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Ssl_Crldp]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:ssl-crldp:ssl-crldpstate': Ssl_Crldp}
+
+
+class Ssl_Crldp(Resource):
+ """BIG-IP® LTM Auth SSL CLRDP resource"""
+ def __init__(self, ssl_crldps):
+ super(Ssl_Crldp, self).__init__(ssl_crldps)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:ssl-crldp:ssl-crldpstate'
+
+
+class Ssl_Ocsps(Collection):
+ """BIG-IP® LTM Auth SSL OCSP collection"""
+ def __init__(self, ltm):
+ super(Ssl_Ocsps, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Ssl_Ocsp]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:ssl-ocsp:ssl-ocspstate': Ssl_Ocsp}
+
+
+class Ssl_Ocsp(Resource):
+ """BIG-IP® LTM Auth SSL OCSP resource"""
+ def __init__(self, ssl_ocsps):
+ super(Ssl_Ocsp, self).__init__(ssl_ocsps)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:ssl-ocsp:ssl-ocspstate'
+
+
+class Tacacs_s(Collection):
+ """BIG-IP® LTM Auth TACACS+ Server collection"""
+ def __init__(self, ltm):
+ super(Tacacs_s, self).__init__(ltm)
+ self._meta_data['allowed_lazy_attributes'] = [Tacacs]
+ self._meta_data['attribute_registry'] =\
+ {'tm:ltm:auth:tacacs:tacacsstate': Tacacs}
+
+
+class Tacacs(Resource):
+ """BIG-IP® LTM Auth TACACS+ Server resource"""
+ def __init__(self, tacacs_s):
+ super(Tacacs, self).__init__(tacacs_s)
+ self._meta_data['required_json_kind'] =\
+ 'tm:ltm:auth:tacacs:tacacsstate'
+ self._meta_data['required_creation_parameters'].update(
+ ('secret', 'server',))
| Add LTM authentication API to the SDK
I would like to explore the feasibility of adding LTM authentication API into SDK given the planned changes to that part of LTM. | F5Networks/f5-common-python | diff --git a/f5/bigip/tm/ltm/test/test_auth.py b/f5/bigip/tm/ltm/test/test_auth.py
new file mode 100644
index 0000000..068557a
--- /dev/null
+++ b/f5/bigip/tm/ltm/test/test_auth.py
@@ -0,0 +1,179 @@
+# coding=utf-8
+#
+# Copyright 2014-2016 F5 Networks Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from f5.bigip import ManagementRoot
+from f5.bigip.resource import MissingRequiredCreationParameter
+from f5.bigip.resource import UnsupportedMethod
+from f5.bigip.tm.ltm.auth import Crldp_Server
+from f5.bigip.tm.ltm.auth import Kerberos_Delegation
+from f5.bigip.tm.ltm.auth import Ldap
+from f5.bigip.tm.ltm.auth import Ocsp_Responder
+from f5.bigip.tm.ltm.auth import Profile
+from f5.bigip.tm.ltm.auth import Radius
+from f5.bigip.tm.ltm.auth import Radius_Server
+from f5.bigip.tm.ltm.auth import Ssl_Cc_Ldap
+from f5.bigip.tm.ltm.auth import Ssl_Crldp
+from f5.bigip.tm.ltm.auth import Ssl_Ocsp
+from f5.bigip.tm.ltm.auth import Tacacs
+import mock
+import pytest
+from six import iterkeys
+
+
+class HelperTest(object):
+ def __init__(self, collection_name):
+ self.partition = 'Common'
+ self.lowered = collection_name.lower()
+ self.test_name = 'fake_' + self.urielementname()
+ self.authkinds = {
+ 'crldp_server': 'tm:ltm:auth:crldp-server:crldp-serverstate',
+ 'kerberos_delegation':
+ 'tm:ltm:auth:kerberos-delegation:kerberos-delegationstate',
+ 'ldap': 'tm:ltm:auth:ldap:ldapstate',
+ 'ocsp_responder': 'tm:ltm:auth:ocsp-responder:ocsp-responderstate',
+ 'profile': 'tm:ltm:auth:profile:profilestate',
+ 'radius': 'tm:ltm:auth:radius:radiusstate',
+ 'radius_server': 'tm:ltm:auth:radius-server:radius-serverstate',
+ 'ssl_cc_ldap': 'tm:ltm:auth:ssl-cc-ldap:ssl-cc-ldapstate',
+ 'ssl_crldp': 'tm:ltm:auth:ssl-crldp:ssl-crldpstate',
+ 'ssl_ocsp': 'tm:ltm:auth:ssl-ocsp:ssl-ocspstate',
+ 'tacacs': 'tm:ltm:auth:tacacs:tacacsstate'
+ }
+
+ def urielementname(self):
+ if self.lowered[-2:] == '_s':
+ endind = 2
+ else:
+ endind = 1
+ return self.lowered[:-endind]
+
+ def set_resources(self, fakeicontrolsession_v12):
+ mr = ManagementRoot('192.168.1.1', 'admin', 'admin')
+ resourcecollection =\
+ getattr(getattr(getattr(mr.tm, 'ltm'), 'auth'),
+ self.lowered)
+ resource1 = getattr(resourcecollection, self.urielementname())
+ resource2 = getattr(resourcecollection, self.urielementname())
+ return resource1, resource2
+
+ def set_collections(self, fakeicontrolsession_v12):
+ mr = ManagementRoot('192.168.1.1', 'admin', 'admin')
+ resourcecollection =\
+ getattr(getattr(getattr(mr.tm, 'ltm'), 'auth'),
+ self.lowered)
+ return resourcecollection
+
+ def test_collections(self, fakeicontrolsession_v12, klass):
+ rc = self.set_collections(fakeicontrolsession_v12)
+ test_meta = rc._meta_data['attribute_registry']
+ test_meta2 = rc._meta_data['allowed_lazy_attributes']
+ kind = self.authkinds[self.urielementname()]
+ assert kind in list(iterkeys(test_meta))
+ assert klass in test_meta2
+
+ def test_create_two(self, fakeicontrolsession_v12):
+ r1, r2, = self.set_resources(fakeicontrolsession_v12)
+ assert r1 is not r2
+
+ def test_create_no_args(self, fakeicontrolsession_v12):
+ r1, r2, = self.set_resources(fakeicontrolsession_v12)
+ del r2
+ with pytest.raises(MissingRequiredCreationParameter):
+ r1.create()
+
+
+def test_crldp_server(fakeicontrolsession_v12):
+ a = HelperTest('Crldp_Servers')
+ a.test_collections(fakeicontrolsession_v12, Crldp_Server)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_kerberos_kelegation(fakeicontrolsession_v12):
+ a = HelperTest('Kerberos_Delegations')
+ a.test_collections(fakeicontrolsession_v12, Kerberos_Delegation)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_ldap(fakeicontrolsession_v12):
+ a = HelperTest('Ldaps')
+ a.test_collections(fakeicontrolsession_v12, Ldap)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_ocsp_responder(fakeicontrolsession_v12):
+ a = HelperTest('Ocsp_Responders')
+ a.test_collections(fakeicontrolsession_v12, Ocsp_Responder)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+class TestProfile(object):
+ def test_update_profile_raises(self):
+ profile_resource = Profile(mock.MagicMock())
+ with pytest.raises(UnsupportedMethod):
+ profile_resource.update()
+
+ def test_profile(self, fakeicontrolsession_v12):
+ a = HelperTest('Profiles')
+ a.test_collections(fakeicontrolsession_v12, Profile)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_radius(fakeicontrolsession_v12):
+ a = HelperTest('Radius_s')
+ a.test_collections(fakeicontrolsession_v12, Radius)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_radius_server(fakeicontrolsession_v12):
+ a = HelperTest('Radius_Servers')
+ a.test_collections(fakeicontrolsession_v12, Radius_Server)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_ssl_cc_ldap(fakeicontrolsession_v12):
+ a = HelperTest('Ssl_Cc_Ldaps')
+ a.test_collections(fakeicontrolsession_v12, Ssl_Cc_Ldap)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_ssl_clrdp(fakeicontrolsession_v12):
+ a = HelperTest('Ssl_Crldps')
+ a.test_collections(fakeicontrolsession_v12, Ssl_Crldp)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_ssl_ocsp(fakeicontrolsession_v12):
+ a = HelperTest('Ssl_Ocsps')
+ a.test_collections(fakeicontrolsession_v12, Ssl_Ocsp)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
+
+
+def test_tacacs(fakeicontrolsession_v12):
+ a = HelperTest('Tacacs_s')
+ a.test_collections(fakeicontrolsession_v12, Tacacs)
+ a.test_create_two(fakeicontrolsession_v12)
+ a.test_create_no_args(fakeicontrolsession_v12)
diff --git a/test/functional/tm/ltm/test_auth.py b/test/functional/tm/ltm/test_auth.py
new file mode 100644
index 0000000..73adce0
--- /dev/null
+++ b/test/functional/tm/ltm/test_auth.py
@@ -0,0 +1,346 @@
+# coding=utf-8
+#
+# Copyright 2014-2016 F5 Networks Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+import copy
+from f5.bigip.tm.ltm.auth import Crldp_Server
+from f5.bigip.tm.ltm.auth import Kerberos_Delegation
+from f5.bigip.tm.ltm.auth import Ldap
+from f5.bigip.tm.ltm.auth import Ocsp_Responder
+from f5.bigip.tm.ltm.auth import Profile
+from f5.bigip.tm.ltm.auth import Radius
+from f5.bigip.tm.ltm.auth import Radius_Server
+from f5.bigip.tm.ltm.auth import Ssl_Cc_Ldap
+from f5.bigip.tm.ltm.auth import Ssl_Crldp
+from f5.bigip.tm.ltm.auth import Ssl_Ocsp
+from f5.bigip.tm.ltm.auth import Tacacs
+from pprint import pprint as pp
+import pytest
+from requests.exceptions import HTTPError
+from six import iteritems
+TESTDESCRIPTION = "TESTDESCRIPTION"
+pp(__file__)
+
+
+def delete_dependency(mgmt_root, name):
+ try:
+ foo = mgmt_root.tm.ltm.auth.ssl_cc_ldaps.ssl_cc_ldap.load(name=name)
+ except HTTPError as err:
+ if err.response.status_code != 404:
+ raise
+ return
+ foo.delete()
+
+
+def setup_dependency(request, mgmt_root, name, **kwargs):
+ def teardown():
+ delete_dependency(mgmt_root, name)
+ delete_dependency(mgmt_root, name)
+ res = mgmt_root.tm.ltm.auth.ssl_cc_ldaps.ssl_cc_ldap.create(name=name,
+ **kwargs)
+ request.addfinalizer(teardown)
+ return res
+
+
+# Helper class to limit code repetition
+class HelperTest(object):
+ def __init__(self, collection_name):
+ self.partition = 'Common'
+ self.lowered = collection_name.lower()
+ self.test_name = 'fake_' + self.urielementname()
+ self.authkinds = {
+ 'crldp_server': 'tm:ltm:auth:crldp-server:crldp-serverstate',
+ 'kerberos_delegation':
+ 'tm:ltm:auth:kerberos-delegation:kerberos-delegationstate',
+ 'ldap': 'tm:ltm:auth:ldap:ldapstate',
+ 'ocsp_responder': 'tm:ltm:auth:ocsp-responder:ocsp-responderstate',
+ 'profile': 'tm:ltm:auth:profile:profilestate',
+ 'radius': 'tm:ltm:auth:radius:radiusstate',
+ 'radius_server': 'tm:ltm:auth:radius-server:radius-serverstate',
+ 'ssl_cc_ldap': 'tm:ltm:auth:ssl-cc-ldap:ssl-cc-ldapstate',
+ 'ssl_crldp': 'tm:ltm:auth:ssl-crldp:ssl-crldpstate',
+ 'ssl_ocsp': 'tm:ltm:auth:ssl-ocsp:ssl-ocspstate',
+ 'tacacs': 'tm:ltm:auth:tacacs:tacacsstate'
+ }
+
+ def urielementname(self):
+ if self.lowered[-2:] == '_s':
+ endind = 2
+ else:
+ endind = 1
+ return self.lowered[:-endind]
+
+ def delete_resource(self, resource):
+ try:
+ foo = resource.load(name=self.test_name, partition=self.partition)
+ except HTTPError as err:
+ if err.response.status_code != 404:
+ raise
+ return
+ foo.delete()
+
+ def setup_test(self, request, mgmt_root, **kwargs):
+ def teardown():
+ self.delete_resource(resource)
+
+ resourcecollection = \
+ getattr(getattr(getattr(mgmt_root.tm, 'ltm'), 'auth'),
+ self.lowered)
+ resource = getattr(resourcecollection, self.urielementname())
+ self.delete_resource(resource)
+ created = resource.create(name=self.test_name,
+ partition=self.partition,
+ **kwargs)
+ request.addfinalizer(teardown)
+ return created, resourcecollection
+
+ def test_MCURDL(self, request, mgmt_root, **kwargs):
+ # Testing create
+ authres, authcollection = self.setup_test(request, mgmt_root, **kwargs)
+ assert authres.name == self.test_name
+ assert authres.fullPath == '/Common/'+self.test_name
+ assert authres.generation and isinstance(authres.generation, int)
+ assert authres.kind == self.authkinds[self.urielementname()]
+
+ # Testing update
+ authres.description = TESTDESCRIPTION
+ pp(authres.raw)
+ authres.update()
+ assert hasattr(authres, 'description')
+ assert authres.description == TESTDESCRIPTION
+
+ # Testing refresh
+ authres.description = ''
+ authres.refresh()
+ assert hasattr(authres, 'description')
+ assert authres.description == TESTDESCRIPTION
+
+ # Testing modify
+ meta_data = authres.__dict__.pop('_meta_data')
+ start_dict = copy.deepcopy(authres.__dict__)
+ authres.__dict__['_meta_data'] = meta_data
+ authres.modify(description='MODIFIED')
+ desc = 'description'
+ for k, v in iteritems(authres.__dict__):
+ if k != desc:
+ start_dict[k] = authres.__dict__[k]
+ assert getattr(authres, k) == start_dict[k]
+ elif k == desc:
+ assert getattr(authres, desc) == 'MODIFIED'
+
+ # Testing load
+ a2 = getattr(authcollection, self.urielementname())
+ authres2 = a2.load(partition=self.partition, name=self.test_name)
+ assert authres.selfLink == authres2.selfLink
+
+ def test_collection(self, request, mgmt_root, **kwargs):
+ authres, authcollection = self.setup_test(request, mgmt_root, **kwargs)
+ assert authres.name == self.test_name
+ assert authres.fullPath == '/Common/' + self.test_name
+ assert authres.generation and isinstance(authres.generation, int)
+ assert authres.kind == self.authkinds[self.urielementname()]
+
+ coll = authcollection.get_collection()
+ assert isinstance(coll, list)
+ assert len(coll)
+
+ if self.lowered == 'crldp_servers':
+ assert isinstance(coll[0], Crldp_Server)
+ elif self.lowered == 'kerberos_delegations':
+ assert isinstance(coll[0], Kerberos_Delegation)
+ elif self.lowered == 'ldaps':
+ assert isinstance(coll[0], Ldap)
+ elif self.lowered == 'ocsp_responders':
+ assert isinstance(coll[0], Ocsp_Responder)
+ elif self.lowered == 'profiles':
+ assert isinstance(coll[0], Profile)
+ elif self.lowered == 'radius_s':
+ assert isinstance(coll[0], Radius)
+ elif self.lowered == 'radius_server_s':
+ assert isinstance(coll[0], Radius_Server)
+ elif self.lowered == 'ssl_cc_ldaps':
+ assert isinstance(coll[0], Ssl_Cc_Ldap)
+ elif self.lowered == 'ssl_crldps':
+ assert isinstance(coll[0], Ssl_Crldp)
+ elif self.lowered == 'ssl_ocsps':
+ assert isinstance(coll[0], Ssl_Ocsp)
+ elif self.lowered == 'tacacs':
+ assert isinstance(coll[0], Tacacs)
+
+ def test_profile_MCRDL(self, request, mgmt_root, **kwargs):
+ # Testing create
+ authres, authcollection = self.setup_test(request, mgmt_root, **kwargs)
+ assert authres.name == self.test_name
+ assert authres.fullPath == '/Common/' + self.test_name
+ assert authres.generation and isinstance(authres.generation, int)
+ assert authres.kind == self.authkinds[self.urielementname()]
+ assert authres.idleTimeout == 300
+
+ # Testing refresh
+ authres.idleTimeout = 0
+ authres.refresh()
+ assert hasattr(authres, 'idleTimeout')
+ assert authres.idleTimeout == 300
+
+ # Testing modify
+ meta_data = authres.__dict__.pop('_meta_data')
+ start_dict = copy.deepcopy(authres.__dict__)
+ authres.__dict__['_meta_data'] = meta_data
+ authres.modify(idleTimeout=100)
+ desc = 'idleTimeout'
+ for k, v in iteritems(authres.__dict__):
+ if k != desc:
+ start_dict[k] = authres.__dict__[k]
+ assert getattr(authres, k) == start_dict[k]
+ elif k == desc:
+ assert getattr(authres, desc) == 100
+
+ # Testing load
+ a2 = getattr(authcollection, self.urielementname())
+ authres2 = a2.load(partition=self.partition, name=self.test_name)
+ assert authres.selfLink == authres2.selfLink
+
+
+class TestCrldpServer(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Crldp_Servers')
+ auth.test_MCURDL(request, mgmt_root, host='10.10.10.10')
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Crldp_Servers')
+ auth.test_collection(request, mgmt_root, host='10.10.10.10')
+
+
[email protected](True, reason='this depends on an optional module')
+class TestKerberosDelegation(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Kerberos_Delegations')
+ auth.test_MCURDL(request, mgmt_root,
+ serverPrincipal='HTTP/fake.com',
+ clientPrincipal='HTTP/faketoo.com')
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Kerberos_Delegations')
+ auth.test_collection(request, mgmt_root,
+ serverPrincipal='HTTP/fake.com',
+ clientPrincipal='HTTP/faketoo.com')
+
+
[email protected](True, reason='this depends on an optional module')
+class TestLdap(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Ldaps')
+ auth.test_MCURDL(request, mgmt_root, servers=['10.10.10.10'])
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Ldaps')
+ auth.test_collection(request, mgmt_root, servers=['10.10.10.10'])
+
+
+class TestOcspResponder(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Ocsp_Responders')
+ auth.test_MCURDL(request, mgmt_root)
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Ocsp_Responders')
+ auth.test_collection(request, mgmt_root)
+
+
+class TestProfile(object):
+ def test_MCURDL(self, request, mgmt_root):
+ setup_dependency(request, mgmt_root, 'fakeldap', servers=[
+ '10.10.10.10'], userKey=12345)
+ auth = HelperTest('Profiles')
+ auth.test_profile_MCRDL(request, mgmt_root,
+ defaultsFrom='/Common/ssl_cc_ldap',
+ configuration='/Common/fakeldap')
+
+ def test_collection(self, request, mgmt_root):
+ setup_dependency(request, mgmt_root, 'fakeldap', servers=[
+ '10.10.10.10'], userKey=12345)
+ auth = HelperTest('Profiles')
+ auth.test_profile_MCRDL(request, mgmt_root,
+ defaultsFrom='/Common/ssl_cc_ldap',
+ configuration='/Common/fakeldap')
+
+
[email protected](True, reason='this depends on an optional module')
+class TestRadius(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Radius_s')
+ auth.test_MCURDL(request, mgmt_root)
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Radius_s')
+ auth.test_collection(request, mgmt_root)
+
+
+class TestRadiusServer(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Radius_Servers')
+ auth.test_MCURDL(request, mgmt_root, server='10.10.10.10',
+ secret='sekrit')
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Radius_Servers')
+ auth.test_collection(request, mgmt_root, server='10.10.10.10',
+ secret='sekrit')
+
+
+class TestSSLCcLdap(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Cc_Ldaps')
+ auth.test_MCURDL(request, mgmt_root, servers=['10.10.10.10'],
+ userKey=12345)
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Cc_Ldaps')
+ auth.test_collection(request, mgmt_root, servers=['10.10.10.10'],
+ userKey=12345)
+
+
+class TestSSLClrdp(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Crldps')
+ auth.test_MCURDL(request, mgmt_root)
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Crldps')
+ auth.test_collection(request, mgmt_root)
+
+
+class TestSSLOcsp(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Ocsps')
+ auth.test_MCURDL(request, mgmt_root)
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Ssl_Ocsps')
+ auth.test_collection(request, mgmt_root)
+
+
+class TestTacacs(object):
+ def test_MCURDL(self, request, mgmt_root):
+ auth = HelperTest('Radius_Servers')
+ auth.test_MCURDL(request, mgmt_root, server='10.10.10.10',
+ secret='fortytwo')
+
+ def test_collection(self, request, mgmt_root):
+ auth = HelperTest('Radius_Servers')
+ auth.test_collection(request, mgmt_root, server='10.10.10.10',
+ secret='fortytwo')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pytest-bdd",
"pytest-benchmark",
"pytest-randomly",
"responses",
"mock",
"hypothesis",
"freezegun",
"trustme",
"requests-mock",
"requests",
"tomlkit"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
coverage==7.8.0
cryptography==44.0.2
exceptiongroup==1.2.2
execnet==2.1.1
f5-icontrol-rest==1.0.9
-e git+https://github.com/F5Networks/f5-common-python.git@8c50cfada0d0101ee34c58af44ed38776818c0f9#egg=f5_sdk
freezegun==1.5.1
gherkin-official==29.0.0
hypothesis==6.130.6
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
Mako==1.3.9
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
parse==1.20.2
parse_type==0.6.4
pluggy==1.5.0
py-cpuinfo==9.0.0
pycparser==2.22
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-bdd==8.1.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-randomly==3.16.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
responses==0.25.7
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
tomlkit==0.13.2
trustme==1.2.1
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: f5-common-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- coverage==7.8.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- f5-icontrol-rest==1.0.9
- freezegun==1.5.1
- gherkin-official==29.0.0
- hypothesis==6.130.6
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- mako==1.3.9
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- parse==1.20.2
- parse-type==0.6.4
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pycparser==2.22
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-bdd==8.1.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-randomly==3.16.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- responses==0.25.7
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- tomlkit==0.13.2
- trustme==1.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/f5-common-python
| [
"f5/bigip/tm/ltm/test/test_auth.py::test_crldp_server",
"f5/bigip/tm/ltm/test/test_auth.py::test_kerberos_kelegation",
"f5/bigip/tm/ltm/test/test_auth.py::test_ldap",
"f5/bigip/tm/ltm/test/test_auth.py::test_ocsp_responder",
"f5/bigip/tm/ltm/test/test_auth.py::test_tacacs",
"f5/bigip/tm/ltm/test/test_auth.py::test_ssl_ocsp",
"f5/bigip/tm/ltm/test/test_auth.py::test_ssl_clrdp",
"f5/bigip/tm/ltm/test/test_auth.py::test_radius",
"f5/bigip/tm/ltm/test/test_auth.py::test_radius_server",
"f5/bigip/tm/ltm/test/test_auth.py::test_ssl_cc_ldap",
"f5/bigip/tm/ltm/test/test_auth.py::TestProfile::test_profile",
"f5/bigip/tm/ltm/test/test_auth.py::TestProfile::test_update_profile_raises"
]
| []
| []
| []
| Apache License 2.0 | 754 | [
"f5/bigip/tm/ltm/__init__.py",
"f5/bigip/tm/ltm/auth.py"
]
| [
"f5/bigip/tm/ltm/__init__.py",
"f5/bigip/tm/ltm/auth.py"
]
|
|
zalando-stups__senza-353 | 936b62cd98ad20892c30b8771b5db80a14c19aae | 2016-09-15 08:32:40 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/route53.py b/senza/manaus/route53.py
index 6aab215..38c177e 100644
--- a/senza/manaus/route53.py
+++ b/senza/manaus/route53.py
@@ -349,9 +349,22 @@ class Route53:
if name is not None and not name.endswith('.'):
name += '.'
for zone in cls.get_hosted_zones():
- # TODO use paginator
response = client.list_resource_record_sets(HostedZoneId=zone.id)
resources = response["ResourceRecordSets"] # type: List[Dict[str, Any]]
+
+ # If the response includes more than maxitems resource record sets,
+ # the value of the IsTruncated element in the response is true,
+ # and the values of the NextRecordName and NextRecordType elements
+ # in the response identify the first resource record set in the
+ # next group of maxitems resource record sets.
+ while response.get('IsTruncated', False):
+ next_name = response['NextRecordName']
+ next_type = response['NextRecordType']
+ response = client.list_resource_record_sets(HostedZoneId=zone.id,
+ StartRecordName=next_name,
+ StartRecordType=next_type)
+ resources.extend(response['ResourceRecordSets'])
+
for resource in resources:
record = Route53Record.from_boto_dict(resource,
hosted_zone=zone)
| senza traffic displays 0.0% weight for all stacks
When executing `senza traffic [name]` the results displays a weight of 0.0% for all stacks.
`senza domains` shows the weight correctly. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 651b318..e7207a3 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -28,7 +28,8 @@ from senza.components.weighted_dns_elastic_load_balancer import \
from senza.components.weighted_dns_elastic_load_balancer_v2 import \
component_weighted_dns_elastic_load_balancer_v2
-from fixtures import HOSTED_ZONE_ZO_NE_COM, HOSTED_ZONE_ZO_NE_DEV, boto_resource
+from fixtures import (HOSTED_ZONE_ZO_NE_COM, HOSTED_ZONE_ZO_NE_DEV, # noqa: F401
+ boto_resource, boto_client)
def test_invalid_component():
@@ -180,7 +181,7 @@ def test_component_load_balancer_http_only(monkeypatch):
"Name": "test_lb",
"SecurityGroups": "",
"HTTPPort": "9999",
- "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
+ "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
"Listeners": [{"Foo": "Bar"}]
}
info = {'StackName': 'foobar', 'StackVersion': '0.1'}
@@ -281,9 +282,11 @@ def test_component_stups_auto_configuration_vpc_id(monkeypatch):
sn3.tags = [{'Key': 'Name', 'Value': 'internal-3'}]
sn3.availability_zone = 'az-1'
ec2 = MagicMock()
+
def get_subnets(Filters):
assert Filters == [{'Name': 'vpc-id', 'Values': ['vpc-123']}]
return [sn1, sn2, sn3]
+
ec2.subnets.filter = get_subnets
image = MagicMock()
ec2.images.filter.return_value = [image]
@@ -350,7 +353,7 @@ def test_component_redis_cluster(monkeypatch):
assert 'RedisReplicationGroup' in result['Resources']
assert mock_string == result['Resources']['RedisReplicationGroup']['Properties']['SecurityGroupIds']
assert 2 == result['Resources']['RedisReplicationGroup']['Properties']['NumCacheClusters']
- assert True == result['Resources']['RedisReplicationGroup']['Properties']['AutomaticFailoverEnabled']
+ assert result['Resources']['RedisReplicationGroup']['Properties']['AutomaticFailoverEnabled']
assert 'Engine' in result['Resources']['RedisReplicationGroup']['Properties']
assert 'EngineVersion' in result['Resources']['RedisReplicationGroup']['Properties']
assert 'CacheNodeType' in result['Resources']['RedisReplicationGroup']['Properties']
@@ -361,26 +364,15 @@ def test_component_redis_cluster(monkeypatch):
assert 'SubnetIds' in result['Resources']['RedisSubnetGroup']['Properties']
-def test_weighted_dns_load_balancer(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer(monkeypatch, boto_client, boto_resource): # noqa: F811
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
-
configuration = {
"Name": "test_lb",
"SecurityGroups": "",
"HTTPPort": "9999",
- 'MainDomain': 'great.api.zo.ne.com',
- 'VersionDomain': 'version.api.zo.ne.com'
+ 'MainDomain': 'great.api.zo.ne',
+ 'VersionDomain': 'version.api.zo.ne'
}
info = {'StackName': 'foobar', 'StackVersion': '0.1'}
definition = {"Resources": {}}
@@ -408,20 +400,16 @@ def test_weighted_dns_load_balancer(monkeypatch, boto_resource):
assert 'MainDomain' not in result["Resources"]["test_lb"]["Properties"]
-def test_weighted_dns_load_balancer_with_different_domains(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer_with_different_domains(monkeypatch, # noqa: F811
+ boto_client,
+ boto_resource):
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_DEV,
- HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
+ boto_client['route53'].list_hosted_zones.return_value = {
+ 'HostedZones': [HOSTED_ZONE_ZO_NE_DEV,
+ HOSTED_ZONE_ZO_NE_COM],
+ 'IsTruncated': False,
+ 'MaxItems': '100'}
configuration = {
"Name": "test_lb",
@@ -589,8 +577,8 @@ def test_component_auto_scaling_group_custom_tags():
'InstanceType': 't2.micro',
'Image': 'foo',
'Tags': [
- { 'Key': 'Tag1', 'Value': 'alpha' },
- { 'Key': 'Tag2', 'Value': 'beta' }
+ {'Key': 'Tag1', 'Value': 'alpha'},
+ {'Key': 'Tag2', 'Value': 'beta'}
]
}
@@ -619,6 +607,7 @@ def test_component_auto_scaling_group_custom_tags():
assert ts is not None
assert ts["Value"] == 'FooStack-FooVersion'
+
def test_component_auto_scaling_group_configurable_properties2():
definition = {"Resources": {}}
configuration = {
@@ -911,26 +900,15 @@ def test_get_load_balancer_name():
'1') == 'toolong12345678901234567890123-1'
-def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
+def test_weighted_dns_load_balancer_v2(monkeypatch, boto_client, boto_resource): # noqa: F811
senza.traffic.DNS_ZONE_CACHE = {}
- def my_client(rtype, *args):
- if rtype == 'route53':
- route53 = MagicMock()
- route53.list_hosted_zones.return_value = {'HostedZones': [HOSTED_ZONE_ZO_NE_COM],
- 'IsTruncated': False,
- 'MaxItems': '100'}
- return route53
- return MagicMock()
-
- monkeypatch.setattr('boto3.client', my_client)
-
configuration = {
"Name": "MyLB",
"SecurityGroups": "",
"HTTPPort": "9999",
- 'MainDomain': 'great.api.zo.ne.com',
- 'VersionDomain': 'version.api.zo.ne.com',
+ 'MainDomain': 'great.api.zo.ne',
+ 'VersionDomain': 'version.api.zo.ne',
# test overwritting specific properties in one of the resources
'TargetGroupAttributes': [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}],
# test that Security Groups are resolved
@@ -961,10 +939,18 @@ def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
assert 'MyLBListener' in result["Resources"]
assert 'MyLBTargetGroup' in result["Resources"]
- assert result['Resources']['MyLBTargetGroup']['Properties']['HealthCheckPort'] == '9999'
- assert result['Resources']['MyLBListener']['Properties']['Certificates'] == [{'CertificateArn': 'arn:aws:42'}]
+ target_group = result['Resources']['MyLBTargetGroup']
+ lb_listener = result['Resources']['MyLBListener']
+
+ assert target_group['Properties']['HealthCheckPort'] == '9999'
+ assert lb_listener['Properties']['Certificates'] == [
+ {'CertificateArn': 'arn:aws:42'}
+ ]
# test that our custom drain setting works
- assert result['Resources']['MyLBTargetGroup']['Properties']['TargetGroupAttributes'] == [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}]
+ assert target_group['Properties']['TargetGroupAttributes'] == [
+ {'Key': 'deregistration_delay.timeout_seconds',
+ 'Value': '123'}
+ ]
assert result['Resources']['MyLB']['Properties']['SecurityGroups'] == ['sg-foo']
diff --git a/tests/test_manaus/test_route53.py b/tests/test_manaus/test_route53.py
index 24c5441..658195a 100644
--- a/tests/test_manaus/test_route53.py
+++ b/tests/test_manaus/test_route53.py
@@ -164,6 +164,46 @@ def test_get_records(monkeypatch):
assert records[0].name == 'domain.example.net.'
+def test_get_records_paginated(monkeypatch):
+ m_client = MagicMock()
+ m_client.return_value = m_client
+ hosted_zone1 = {'Config': {'PrivateZone': False},
+ 'CallerReference': '0000',
+ 'ResourceRecordSetCount': 42,
+ 'Id': '/hostedzone/random1',
+ 'Name': 'example.com.'}
+ mock_records = [{'Name': 'domain.example.com.',
+ 'ResourceRecords': [{'Value': '127.0.0.1'}],
+ 'TTL': 600,
+ 'Type': 'A'},
+ {'Name': 'domain.example.net.',
+ 'ResourceRecords': [{'Value': '127.0.0.1'}],
+ 'TTL': 600,
+ 'Type': 'A'}
+ ]
+ m_client.list_hosted_zones.return_value = {'MaxItems': '100',
+ 'ResponseMetadata': {
+ 'HTTPStatusCode': 200,
+ 'RequestId': 'FakeId'
+ },
+ 'HostedZones': [hosted_zone1],
+ 'IsTruncated': False}
+
+ m_client.list_resource_record_sets.side_effect = [
+ {'ResourceRecordSets': mock_records,
+ 'IsTruncated': True,
+ 'NextRecordName': 'doesnt.matter.example.com',
+ 'NextRecordType': 'A'},
+ {'ResourceRecordSets': mock_records,
+ 'IsTruncated': False},
+ ]
+ monkeypatch.setattr('boto3.client', m_client)
+
+ route53 = Route53()
+ records = list(route53.get_records())
+ assert len(records) == 4
+
+
def test_route53_record_boto_dict():
record1 = Route53Record(name='test1', type='A')
assert record1.boto_dict == {'Name': 'test1',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@936b62cd98ad20892c30b8771b5db80a14c19aae#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_route53.py::test_get_records_paginated"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_cert_arn",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2",
"tests/test_components.py::test_max_description_length",
"tests/test_components.py::test_component_load_balancer_default_internal_scheme",
"tests/test_components.py::test_component_load_balancer_v2_default_internal_scheme",
"tests/test_manaus/test_route53.py::test_hosted_zone_from_boto_dict",
"tests/test_manaus/test_route53.py::test_record_from_boto_dict",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones_paginated",
"tests/test_manaus/test_route53.py::test_get_records",
"tests/test_manaus/test_route53.py::test_route53_record_boto_dict",
"tests/test_manaus/test_route53.py::test_hosted_zone_upsert",
"tests/test_manaus/test_route53.py::test_hosted_zone_create",
"tests/test_manaus/test_route53.py::test_hosted_zone_delete",
"tests/test_manaus/test_route53.py::test_to_alias",
"tests/test_manaus/test_route53.py::test_convert_domain_records_to_alias",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_domain_name",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_id",
"tests/test_manaus/test_route53.py::test_get_by_domain_name"
]
| []
| Apache License 2.0 | 755 | [
"senza/manaus/route53.py"
]
| [
"senza/manaus/route53.py"
]
|
|
XD-embedded__xd-docker-60 | 2b82bcc8ca4ebdcfdc719865c4910de092052be1 | 2016-09-15 20:28:33 | 2b82bcc8ca4ebdcfdc719865c4910de092052be1 | diff --git a/CHANGELOG b/CHANGELOG
index 2d7da75..238dbfc 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,5 +1,10 @@
-0.2.0 (2016-08-28)
+0.3.0 (in development)
----------------------
+* Add container_upload() method.
+* Add commit() method.
+
+0.2.0 (2016-08-28)
+------------------
* Add container_remove() method.
* Add container_start() method.
* Add container_wait() method.
diff --git a/xd/docker/client.py b/xd/docker/client.py
index 879dea2..f16bc78 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -643,3 +643,40 @@ class DockerClient(object):
raise PermissionDenied(
"Volume or container rootfs is marked as read-only") \
from exc
+
+ def commit(self,
+ container: Union[Container, ContainerName, str],
+ repo: Optional[Union[Repository, str]]=None,
+ comment: Optional[str]=None,
+ author: Optional[str]=None,
+ pause: Optional[bool]=None):
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+ if isinstance(repo, str):
+ repo = Repository(repo)
+
+ params = {'container': id_or_name}
+ if repo:
+ params['repo'] = repo.name
+ if repo.tag is not None:
+ params['tag'] = repo.tag
+ if comment is not None:
+ params['comment'] = comment
+ if author is not None:
+ params['author'] = author
+ if pause is not None:
+ params['pause'] = pause
+
+ # TODO: add support for 'config' JSON parameter
+ # The ContainerConfig class should be changed to allow image to be
+ # optional, so we can simply pass a instance of that
+
+ # TODO: add support for 'changes' query parameter
+ r = self._post('/commit', params=params)
+ return Image(self, id=r.json()['Id'])
| client.commit()
Client API command to create an image from a container. | XD-embedded/xd-docker | diff --git a/tests/integration/commit_test.py b/tests/integration/commit_test.py
new file mode 100644
index 0000000..b2009b1
--- /dev/null
+++ b/tests/integration/commit_test.py
@@ -0,0 +1,57 @@
+import pytest
+import os
+import re
+import json
+
+
+from xd.docker.client import *
+
+
+def test_basic(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test')
+ assert foo.id
+ assert os.system('docker inspect {}'.format(foo.id) +
+ '|grep \'"Comment": "foobar"\'') != 0
+ assert os.system('docker inspect {}'.format(foo.id) +
+ '|grep \'"Author": "foobar"\'') != 0
+
+
+def test_with_repo(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', repo='foo')
+ assert foo.id
+
+
+def test_with_tag(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', repo='foo:bar')
+ assert foo.id
+
+
+def test_comment(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', comment='foobar')
+ assert foo.id
+ assert os.system('docker inspect {}'.format(foo.id) +
+ '|grep \'"Comment": "foobar"\'') == 0
+
+
+def test_author(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', author='foobar')
+ assert foo.id
+ assert os.system('docker inspect {}'.format(foo.id) +
+ '|grep \'"Author": "foobar"\'') == 0
+
+
+def test_pause_true(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', pause=True)
+ assert foo.id
+
+
+def test_pause_false(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ foo = docker.commit('xd-docker-test', pause=False)
+ assert foo.id
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 4e7fbb6..aef1579 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1450,3 +1450,86 @@ class container_upload_tests(ContextClientTestCase):
return_value=requests_mock.version_response("1.19", "1.7.1"))
with pytest.raises(IncompatibleRemoteAPI):
self.client.container_upload('foo', self.tar_file, 'bar')
+
+
+class commit_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_str(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foo')
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit(ContainerName('foo'))
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit(Container('foo'))
+
+ @mock.patch('requests.post')
+ def test_repo_str(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foobar', repo='foo')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'repo' in params
+ assert params['repo'] == 'foo'
+
+ @mock.patch('requests.post')
+ def test_repotag_str(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foobar', repo='foo:bar')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'repo' in params
+ assert params['repo'] == 'foo'
+ assert 'tag' in params
+ assert params['tag'] == 'bar'
+
+ @mock.patch('requests.post')
+ def test_comment(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foobar', comment='foo')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'comment' in params
+ assert params['comment'] == 'foo'
+
+ @mock.patch('requests.post')
+ def test_author(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foobar', author='foo')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'author' in params
+ assert params['author'] == 'foo'
+
+ @mock.patch('requests.post')
+ def test_pause_true(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foo', pause=True)
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'pause' in params
+ assert params['pause'] == True
+
+ @mock.patch('requests.post')
+ def test_pause_false(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ '{"Id": "596069db4bf5"}', 201)
+ self.client.commit('foo', pause=False)
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'pause' in params
+ assert params['pause'] == False
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@2b82bcc8ca4ebdcfdc719865c4910de092052be1#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::commit_tests::test_author",
"tests/unit/client_test.py::commit_tests::test_comment",
"tests/unit/client_test.py::commit_tests::test_container",
"tests/unit/client_test.py::commit_tests::test_containername",
"tests/unit/client_test.py::commit_tests::test_pause_false",
"tests/unit/client_test.py::commit_tests::test_pause_true",
"tests/unit/client_test.py::commit_tests::test_repo_str",
"tests/unit/client_test.py::commit_tests::test_repotag_str",
"tests/unit/client_test.py::commit_tests::test_str"
]
| [
"tests/integration/commit_test.py::test_basic",
"tests/integration/commit_test.py::test_with_repo",
"tests/integration/commit_test.py::test_with_tag",
"tests/integration/commit_test.py::test_comment",
"tests/integration/commit_test.py::test_author",
"tests/integration/commit_test.py::test_pause_true",
"tests/integration/commit_test.py::test_pause_false"
]
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str",
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_already_stopped",
"tests/unit/client_test.py::container_stop_tests::test_container",
"tests/unit/client_test.py::container_stop_tests::test_containername",
"tests/unit/client_test.py::container_stop_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_normal",
"tests/unit/client_test.py::container_stop_tests::test_timeout",
"tests/unit/client_test.py::container_restart_tests::test_container",
"tests/unit/client_test.py::container_restart_tests::test_containername",
"tests/unit/client_test.py::container_restart_tests::test_no_such_container",
"tests/unit/client_test.py::container_restart_tests::test_normal",
"tests/unit/client_test.py::container_restart_tests::test_timeout",
"tests/unit/client_test.py::container_kill_tests::test_container",
"tests/unit/client_test.py::container_kill_tests::test_containername",
"tests/unit/client_test.py::container_kill_tests::test_no_such_container",
"tests/unit/client_test.py::container_kill_tests::test_normal",
"tests/unit/client_test.py::container_kill_tests::test_sighup",
"tests/unit/client_test.py::container_kill_tests::test_sigint",
"tests/unit/client_test.py::container_upload_tests::test_container",
"tests/unit/client_test.py::container_upload_tests::test_containername",
"tests/unit/client_test.py::container_upload_tests::test_incompatible_remote_api",
"tests/unit/client_test.py::container_upload_tests::test_overwritedirnondir",
"tests/unit/client_test.py::container_upload_tests::test_readonly",
"tests/unit/client_test.py::container_upload_tests::test_str"
]
| []
| MIT License | 756 | [
"CHANGELOG",
"xd/docker/client.py"
]
| [
"CHANGELOG",
"xd/docker/client.py"
]
|
|
erikrose__parsimonious-98 | 538aac8454425e72f059fb2ca196a4187be4653f | 2016-09-16 08:33:34 | 30b94f1be71a7be640f3f4285a34cc495e18b87a | diff --git a/parsimonious/nodes.py b/parsimonious/nodes.py
index a60e7f4..26d873d 100644
--- a/parsimonious/nodes.py
+++ b/parsimonious/nodes.py
@@ -90,8 +90,10 @@ class Node(StrAndRepr):
def __eq__(self, other):
"""Support by-value deep comparison with other nodes for testing."""
- return (other is not None and
- self.expr_name == other.expr_name and
+ if not isinstance(other, Node):
+ return NotImplemented
+
+ return (self.expr_name == other.expr_name and
self.full_text == other.full_text and
self.start == other.start and
self.end == other.end and
| Node __eq__ method internal error
`Node`'s `__eq__` method doesn't check the type of the object it's being compared to, which leads to an exception if you try to compare Nodes to non-nodes. For example, I can do `5 == "hello"` in Python, but if I try `5 == grammar.parse(text)`, I get the following exception:
```
File "/Users/wiseman/Dropbox/Disney/src/cca_dialogengine/env/lib/python2.7/site-packages/parsimonious/nodes.py", line 90, in __eq__
self.expr_name == other.expr_name and
AttributeError: 'int' object has no attribute 'expr_name'
```
| erikrose/parsimonious | diff --git a/parsimonious/tests/test_nodes.py b/parsimonious/tests/test_nodes.py
index 404aa36..caf9f68 100644
--- a/parsimonious/tests/test_nodes.py
+++ b/parsimonious/tests/test_nodes.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
from nose import SkipTest
-from nose.tools import eq_, assert_raises
+from nose.tools import eq_, ok_, assert_raises
from parsimonious import Grammar, NodeVisitor, VisitationError, rule
from parsimonious.nodes import Node
@@ -142,3 +142,9 @@ def test_unwrapped_exceptions():
raise PrimalScream('This should percolate up!')
assert_raises(PrimalScream, Screamer().parse, 'howdy')
+
+
+def test_node_inequality():
+ node = Node('text', 'o hai', 0, 5)
+ ok_(node != 5)
+ ok_(node != None)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "six",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
-e git+https://github.com/erikrose/parsimonious.git@538aac8454425e72f059fb2ca196a4187be4653f#egg=parsimonious
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six @ file:///tmp/build/80754af9/six_1644875935023/work
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: parsimonious
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/parsimonious
| [
"parsimonious/tests/test_nodes.py::test_node_inequality"
]
| []
| [
"parsimonious/tests/test_nodes.py::test_visitor",
"parsimonious/tests/test_nodes.py::test_visitation_exception",
"parsimonious/tests/test_nodes.py::test_str",
"parsimonious/tests/test_nodes.py::test_repr",
"parsimonious/tests/test_nodes.py::test_parse_shortcut",
"parsimonious/tests/test_nodes.py::test_match_shortcut",
"parsimonious/tests/test_nodes.py::test_rule_decorator",
"parsimonious/tests/test_nodes.py::test_unwrapped_exceptions"
]
| []
| MIT License | 757 | [
"parsimonious/nodes.py"
]
| [
"parsimonious/nodes.py"
]
|
|
DataKind-SG__test-driven-data-cleaning-34 | f21e7b563c1e49803a098586930f2e4682eeb06f | 2016-09-17 10:17:14 | d1e67b345b5982b41560817ef859bd0157852913 | diff --git a/tddc/__main__.py b/tddc/__main__.py
index 0749707..208b1c5 100644
--- a/tddc/__main__.py
+++ b/tddc/__main__.py
@@ -1,6 +1,58 @@
+"""Test driven data cleaning
+Usage:
+ tddc summarize <input_file> [--output=<dir>] [--null=<NA>]
+ tddc build_trello <input_file> [--output=<dir>]
+ tddc build <input_file> [--output=<dir>]
+ tddc -h | --help
+ tddc --version
+
+Options:
+ -h --help Show this screen.
+ --version Show version.
+ --output=<dir> Output directory [default: output]
+ --null=<NA> Null string [default: NA]
+"""
+from docopt import docopt
import os
import sys
-import tddc.run_tddc
+
+import tddc
+from tddc import summarize, build_trello, build
+
+
+def get_input_root_dir():
+ return os.getcwd()
+
+
+def get_output_root_dir():
+ return os.getcwd()
+
+
+def execute(cli_args):
+ arguments = docopt(__doc__, cli_args, version=tddc.__version__)
+ if arguments['summarize']:
+ summarize.go(
+ input_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ output_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output'],
+ null_string=arguments['--null'],
+ )
+ elif arguments['build_trello']:
+ build_trello.go(
+ summary_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ trello_summary_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output']
+ )
+ elif arguments['build']:
+ build.go(
+ summaries_root_dir=get_input_root_dir(),
+ input_file=arguments['<input_file>'],
+ scripts_root_dir=get_output_root_dir(),
+ output_dir=arguments['--output']
+ )
+
if __name__ == '__main__':
- tddc.run_tddc.execute(sys.argv[1:])
+ execute(sys.argv[1:])
diff --git a/tddc/run_tddc.py b/tddc/run_tddc.py
index 208b1c5..e69de29 100644
--- a/tddc/run_tddc.py
+++ b/tddc/run_tddc.py
@@ -1,58 +0,0 @@
-"""Test driven data cleaning
-Usage:
- tddc summarize <input_file> [--output=<dir>] [--null=<NA>]
- tddc build_trello <input_file> [--output=<dir>]
- tddc build <input_file> [--output=<dir>]
- tddc -h | --help
- tddc --version
-
-Options:
- -h --help Show this screen.
- --version Show version.
- --output=<dir> Output directory [default: output]
- --null=<NA> Null string [default: NA]
-"""
-from docopt import docopt
-import os
-import sys
-
-import tddc
-from tddc import summarize, build_trello, build
-
-
-def get_input_root_dir():
- return os.getcwd()
-
-
-def get_output_root_dir():
- return os.getcwd()
-
-
-def execute(cli_args):
- arguments = docopt(__doc__, cli_args, version=tddc.__version__)
- if arguments['summarize']:
- summarize.go(
- input_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- output_root_dir=get_output_root_dir(),
- output_dir=arguments['--output'],
- null_string=arguments['--null'],
- )
- elif arguments['build_trello']:
- build_trello.go(
- summary_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- trello_summary_root_dir=get_output_root_dir(),
- output_dir=arguments['--output']
- )
- elif arguments['build']:
- build.go(
- summaries_root_dir=get_input_root_dir(),
- input_file=arguments['<input_file>'],
- scripts_root_dir=get_output_root_dir(),
- output_dir=arguments['--output']
- )
-
-
-if __name__ == '__main__':
- execute(sys.argv[1:])
| run_tddc and __main__ are not both needed
move everything in run_tddc to \_\_main\_\_ | DataKind-SG/test-driven-data-cleaning | diff --git a/tddc/tests/test_run_tddc.py b/tddc/tests/test_main.py
similarity index 83%
rename from tddc/tests/test_run_tddc.py
rename to tddc/tests/test_main.py
index 68ba0d2..a575056 100644
--- a/tddc/tests/test_run_tddc.py
+++ b/tddc/tests/test_main.py
@@ -2,7 +2,7 @@ import pytest
import os
from mock import patch
-from tddc import run_tddc, common
+from tddc import __main__, common
@pytest.fixture(scope='module')
@@ -11,15 +11,15 @@ def root_dir():
def test_get_input_root_dir(root_dir):
- assert run_tddc.get_input_root_dir() == root_dir
+ assert __main__.get_input_root_dir() == root_dir
def test_get_output_root_dir(root_dir):
- assert run_tddc.get_output_root_dir() == root_dir
+ assert __main__.get_output_root_dir() == root_dir
-@patch('tddc.run_tddc.get_input_root_dir')
-@patch('tddc.run_tddc.get_output_root_dir')
+@patch('tddc.__main__.get_input_root_dir')
+@patch('tddc.__main__.get_output_root_dir')
def test_cli_summarize(
mock_output_root_dir, mock_input_root_dir, fixtures_dir, input_filename, null_string, tmpdir
):
@@ -37,12 +37,12 @@ def test_cli_summarize(
# test_summarize.py already tests the content of the file. This just tests that the CLI works properly and
# generates a file at the expected location.
assert not os.path.isfile(expected_output_loc)
- run_tddc.execute(cli_args)
+ __main__.execute(cli_args)
assert os.path.isfile(expected_output_loc)
-@patch('tddc.run_tddc.get_input_root_dir')
-@patch('tddc.run_tddc.get_output_root_dir')
+@patch('tddc.__main__.get_input_root_dir')
+@patch('tddc.__main__.get_output_root_dir')
@patch('tddc.build_trello.Trello.client')
def test_cli_build_trello(
mock_client, mock_output_root_dir, mock_input_root_dir, fixtures_dir, input_filename, tmpdir
@@ -64,5 +64,5 @@ def test_cli_build_trello(
# test_build_trello.py already tests the content of the file. This just tests that the CLI works properly and
# generates a file at the expected location.
assert not os.path.isfile(expected_output_loc)
- run_tddc.execute(cli_args)
+ __main__.execute(cli_args)
assert os.path.isfile(expected_output_loc)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_removed_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backports.functools-lru-cache==1.2.1
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
docopt==0.6.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==5.2.0
oauthlib==3.2.2
packaging==21.3
pluggy==1.0.0
py==1.11.0
py-trello==0.6.1
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==3.12
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
-e git+https://github.com/DataKind-SG/test-driven-data-cleaning.git@f21e7b563c1e49803a098586930f2e4682eeb06f#egg=tddc
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: test-driven-data-cleaning
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backports-functools-lru-cache==1.2.1
- charset-normalizer==2.0.12
- coverage==6.2
- docopt==0.6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==5.2.0
- oauthlib==3.2.2
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- py-trello==0.6.1
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==3.12
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/test-driven-data-cleaning
| [
"tddc/tests/test_main.py::test_get_input_root_dir",
"tddc/tests/test_main.py::test_get_output_root_dir",
"tddc/tests/test_main.py::test_cli_summarize",
"tddc/tests/test_main.py::test_cli_build_trello"
]
| []
| []
| []
| MIT License | 758 | [
"tddc/__main__.py",
"tddc/run_tddc.py"
]
| [
"tddc/__main__.py",
"tddc/run_tddc.py"
]
|
|
marshmallow-code__apispec-92 | 14be6d8be8608dec4af9e215a154e521f9d8ff78 | 2016-09-18 18:51:35 | 14be6d8be8608dec4af9e215a154e521f9d8ff78 | diff --git a/apispec/core.py b/apispec/core.py
index 66028b1..170ca0b 100644
--- a/apispec/core.py
+++ b/apispec/core.py
@@ -154,12 +154,19 @@ class APISpec(object):
:param dict|None operations: describes the http methods and options for `path`
:param dict kwargs: parameters used by any path helpers see :meth:`register_path_helper`
"""
+ def normalize_path(path):
+ if path and 'basePath' in self.options:
+ pattern = '^{0}'.format(re.escape(self.options['basePath']))
+ path = re.sub(pattern, '', path)
+
+ return path
+
p = path
if isinstance(path, Path):
p = path.path
- if p and 'basePath' in self.options:
- pattern = '^{0}'.format(re.escape(self.options['basePath']))
- p = re.sub(pattern, '', p)
+
+ p = normalize_path(p)
+
if isinstance(path, Path):
path.path = p
else:
@@ -173,6 +180,7 @@ class APISpec(object):
except TypeError:
continue
if isinstance(ret, Path):
+ ret.path = normalize_path(ret.path)
path.update(ret)
if not path.path:
| Core path does not support swagger basePath
Related to #69.
Flask allows configuring the `APPLICATION_ROOT`, which provides the base path for the routes. #70 fixed this issue so that the flask extension reports the fully qualified path.
When the specs are dumped as swagger specs using `to_dict()`, the full path is shown on every route, even though the swagger specification supports the [`basePath`](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#fixed-fields) field on the root object.
I think it is important to keep the meaningful portion of the route URL separate from the base path to maintain readable documentation. #70 was the right fix for the flask extension since the `apispec` core assumes paths are absolute, but the fact that both `flask` and `swagger` support a configurable base path indicates that the `apispec` core should probably internalize the concept of base paths and expose them to extensions.
Examples of base path in similar libraries:
- [Bottle: ResourceManager (default) path](http://bottlepy.org/docs/dev/api.html#bottle.ResourceManager)
- [RAML: Base URI](https://github.com/raml-org/raml-spec/blob/master/versions/raml-10/raml-10.md/#base-uri-and-base-uri-parameters)
> Edit: Updated to refect that generating swagger specs is a core functionality of `apispec`, not an extension.
| marshmallow-code/apispec | diff --git a/tests/test_ext_flask.py b/tests/test_ext_flask.py
index eeb4a03..5bbfcfd 100644
--- a/tests/test_ext_flask.py
+++ b/tests/test_ext_flask.py
@@ -108,7 +108,8 @@ class TestPathHelpers:
def test_path_includes_app_root(self, app, spec):
- app.config['APPLICATION_ROOT'] = '/app/root'
+ spec.options['basePath'] = '/v1'
+ app.config['APPLICATION_ROOT'] = '/v1/app/root'
@app.route('/partial/path/pet')
def get_pet():
@@ -119,7 +120,8 @@ class TestPathHelpers:
def test_path_with_args_includes_app_root(self, app, spec):
- app.config['APPLICATION_ROOT'] = '/app/root'
+ spec.options['basePath'] = '/v1'
+ app.config['APPLICATION_ROOT'] = '/v1/app/root'
@app.route('/partial/path/pet/{pet_id}')
def get_pet(pet_id):
@@ -130,7 +132,8 @@ class TestPathHelpers:
def test_path_includes_app_root_with_right_slash(self, app, spec):
- app.config['APPLICATION_ROOT'] = '/app/root/'
+ spec.options['basePath'] = '/v1'
+ app.config['APPLICATION_ROOT'] = '/v1/app/root/'
@app.route('/partial/path/pet')
def get_pet():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/marshmallow-code/apispec.git@14be6d8be8608dec4af9e215a154e521f9d8ff78#egg=apispec
backports.tarfile==1.2.0
blinker==1.9.0
cachetools==5.5.2
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
cryptography==44.0.2
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
flake8==2.5.4
Flask==3.1.0
id==1.5.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
invoke==2.2.0
itsdangerous==2.2.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
Jinja2==3.1.6
keyring==25.6.0
markdown-it-py==3.0.0
MarkupSafe==3.0.2
marshmallow==3.26.1
mccabe==0.4.0
mdurl==0.1.2
mock==5.2.0
more-itertools==10.6.0
nh3==0.2.21
packaging==24.2
pep8==1.7.1
platformdirs==4.3.7
pluggy==1.5.0
pycparser==2.22
pyflakes==1.0.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
PyYAML==6.0.2
readme_renderer==44.0
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
SecretStorage==3.3.3
tomli==2.2.1
tornado==6.4.2
tox==4.25.0
twine==6.1.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
Werkzeug==3.1.3
zipp==3.21.0
| name: apispec
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-tarfile==1.2.0
- blinker==1.9.0
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- cryptography==44.0.2
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==2.5.4
- flask==3.1.0
- id==1.5.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- invoke==2.2.0
- itsdangerous==2.2.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- jinja2==3.1.6
- keyring==25.6.0
- markdown-it-py==3.0.0
- markupsafe==3.0.2
- marshmallow==3.26.1
- mccabe==0.4.0
- mdurl==0.1.2
- mock==5.2.0
- more-itertools==10.6.0
- nh3==0.2.21
- packaging==24.2
- pep8==1.7.1
- platformdirs==4.3.7
- pluggy==1.5.0
- pycparser==2.22
- pyflakes==1.0.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pyyaml==6.0.2
- readme-renderer==44.0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- secretstorage==3.3.3
- tomli==2.2.1
- tornado==6.4.2
- tox==4.25.0
- twine==6.1.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- werkzeug==3.1.3
- zipp==3.21.0
prefix: /opt/conda/envs/apispec
| [
"tests/test_ext_flask.py::TestPathHelpers::test_path_includes_app_root",
"tests/test_ext_flask.py::TestPathHelpers::test_path_with_args_includes_app_root",
"tests/test_ext_flask.py::TestPathHelpers::test_path_includes_app_root_with_right_slash"
]
| [
"tests/test_ext_flask.py::TestPathHelpers::test_integration_with_docstring_introspection"
]
| [
"tests/test_ext_flask.py::TestPathHelpers::test_path_from_view",
"tests/test_ext_flask.py::TestPathHelpers::test_path_with_multiple_methods",
"tests/test_ext_flask.py::TestPathHelpers::test_path_is_translated_to_swagger_template"
]
| []
| MIT License | 759 | [
"apispec/core.py"
]
| [
"apispec/core.py"
]
|
|
googleapis__gax-python-132 | 7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a | 2016-09-20 16:24:28 | 7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a | geigerj: Note: this is a breaking change, but since we haven't released `0.14.0` yet, I'm not incrementing the version.
codecov-io: ## [Current coverage](https://codecov.io/gh/googleapis/gax-python/pull/132?src=pr) is 98.05% (diff: 100%)
> Merging [#132](https://codecov.io/gh/googleapis/gax-python/pull/132?src=pr) into [master](https://codecov.io/gh/googleapis/gax-python/branch/master?src=pr) will increase coverage by **0.02%**
```diff
@@ master #132 diff @@
==========================================
Files 8 8
Lines 608 616 +8
Methods 0 0
Messages 0 0
Branches 0 0
==========================================
+ Hits 596 604 +8
Misses 12 12
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [7b3172b...db0bf04](https://codecov.io/gh/googleapis/gax-python/compare/7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a...db0bf049ff5ba03dc15aa40dc4da7ab02e4efed3?src=pr)
bjwatson: LGTM | diff --git a/google/gax/api_callable.py b/google/gax/api_callable.py
index 622c08e..00c7763 100644
--- a/google/gax/api_callable.py
+++ b/google/gax/api_callable.py
@@ -36,8 +36,8 @@ import time
from future import utils
from . import (BackoffSettings, BundleOptions, bundling, _CallSettings, config,
- PageIterator, ResourceIterator, RetryOptions)
-from .errors import GaxError, RetryError
+ errors, PageIterator, ResourceIterator, RetryOptions)
+from .errors import RetryError
_MILLIS_PER_SECOND = 1000
@@ -414,12 +414,12 @@ def construct_settings(
return defaults
-def _catch_errors(a_func, errors):
+def _catch_errors(a_func, to_catch):
"""Updates a_func to wrap exceptions with GaxError
Args:
a_func (callable): A callable.
- retry (list[Exception]): Configures the exceptions to wrap.
+ to_catch (list[Exception]): Configures the exceptions to wrap.
Returns:
A function that will wrap certain exceptions with GaxError
@@ -429,8 +429,9 @@ def _catch_errors(a_func, errors):
try:
return a_func(*args, **kwargs)
# pylint: disable=catching-non-exception
- except tuple(errors) as exception:
- utils.raise_with_traceback(GaxError('RPC failed', cause=exception))
+ except tuple(to_catch) as exception:
+ utils.raise_with_traceback(
+ errors.create_error('RPC failed', cause=exception))
return inner
diff --git a/google/gax/config.py b/google/gax/config.py
index 2d94ebe..3a1d34d 100644
--- a/google/gax/config.py
+++ b/google/gax/config.py
@@ -49,6 +49,10 @@ the client constants configuration for retrying into the correct gRPC objects.
"""
+NAME_STATUS_CODES = grpc.NAME_STATUS_CODES
+"""Inverse map for STATUS_CODE_NAMES"""
+
+
create_stub = grpc.create_stub # pylint: disable=invalid-name,
"""The function to use to create stubs."""
diff --git a/google/gax/errors.py b/google/gax/errors.py
index c118b1e..79dc12f 100644
--- a/google/gax/errors.py
+++ b/google/gax/errors.py
@@ -29,6 +29,9 @@
"""Provides GAX exceptions."""
+from __future__ import absolute_import
+from . import config
+
class GaxError(Exception):
"""Common base class for exceptions raised by GAX.
@@ -51,6 +54,25 @@ class GaxError(Exception):
return 'GaxError({}, caused by {})'.format(msg, self.cause)
+def create_error(msg, cause=None):
+ """Creates an error.
+
+ Uses a Python built-in exception if one is available, and a
+ GaxError otherwise.
+
+ Attributes:
+ msg (string): describes the error that occurred.
+ cause (Exception, optional): the exception raised by a lower
+ layer of the RPC stack (for example, gRPC) that caused this
+ exception, or None if this exception originated in GAX.
+ """
+ if config.NAME_STATUS_CODES.get(
+ config.exc_to_code(cause)) == 'INVALID_ARGUMENT':
+ return ValueError('{}: {}'.format(msg, cause))
+ else:
+ return GaxError(msg, cause)
+
+
class RetryError(GaxError):
"""Indicates an error during automatic GAX retrying."""
pass
diff --git a/google/gax/grpc.py b/google/gax/grpc.py
index 9ec50fb..bb38550 100644
--- a/google/gax/grpc.py
+++ b/google/gax/grpc.py
@@ -58,6 +58,10 @@ STATUS_CODE_NAMES = {
"""Maps strings used in client config to gRPC status codes."""
+NAME_STATUS_CODES = dict([(v, k) for (k, v) in STATUS_CODE_NAMES.items()])
+"""Inverse map for STATUS_CODE_NAMES"""
+
+
def exc_to_code(exc):
"""Retrieves the status code from an exception"""
if not isinstance(exc, RpcError):
| Make it clear that the call may fail with the INVALID_ARGUMENT status code
### What:
Wrap the gRPC INVALID_ARGUMENT error with a pythonic VALUE_ERROR. We can do this with a special attribute that we consistently use for the wrapped gRPC error.
### Why:
Be more Pythonic.
See https://github.com/googleapis/googleapis/issues/15 | googleapis/gax-python | diff --git a/test/test_api_callable.py b/test/test_api_callable.py
index f45c5f4..f30ab66 100644
--- a/test/test_api_callable.py
+++ b/test/test_api_callable.py
@@ -38,6 +38,7 @@ from google.gax import (
api_callable, bundling, BackoffSettings, BundleDescriptor, BundleOptions,
_CallSettings, CallOptions, INITIAL_PAGE, PageDescriptor, RetryOptions)
from google.gax.errors import GaxError, RetryError
+import grpc
_SERVICE_NAME = 'test.interface.v1.api'
@@ -484,3 +485,15 @@ class TestCreateApiCallable(unittest2.TestCase):
other_error_callable = api_callable.create_api_call(
other_error_func, _CallSettings())
self.assertRaises(AnotherException, other_error_callable, None)
+
+ def test_wrap_value_error(self):
+
+ invalid_attribute_exc = grpc.RpcError()
+ invalid_attribute_exc.code = lambda: grpc.StatusCode.INVALID_ARGUMENT
+
+ def value_error_func(*dummy_args, **dummy_kwargs):
+ raise invalid_attribute_exc
+
+ value_error_callable = api_callable.create_api_call(
+ value_error_func, _CallSettings())
+ self.assertRaises(ValueError, value_error_callable, None)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 4
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
exceptiongroup==1.2.2
future==1.0.0
-e git+https://github.com/googleapis/gax-python.git@7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a#egg=google_gax
grpcio==1.62.3
grpcio-tools==1.62.3
httplib2==0.22.0
importlib-metadata==6.7.0
iniconfig==2.0.0
linecache2==1.0.0
mock==5.2.0
oauth2client==4.1.3
packaging==24.0
pluggy==1.2.0
ply==3.8
protobuf==4.24.4
pyasn1==0.5.1
pyasn1-modules==0.3.0
pyparsing==3.1.4
pytest==7.4.4
pytest-cov==4.1.0
pytest-timeout==2.3.1
rsa==4.9
six==1.17.0
tomli==2.0.1
traceback2==1.4.0
typing_extensions==4.7.1
unittest2==1.1.0
zipp==3.15.0
| name: gax-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- coverage==7.2.7
- exceptiongroup==1.2.2
- future==1.0.0
- grpcio==1.62.3
- grpcio-tools==1.62.3
- httplib2==0.22.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- linecache2==1.0.0
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.0
- pluggy==1.2.0
- ply==3.8
- protobuf==4.24.4
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pyparsing==3.1.4
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-timeout==2.3.1
- rsa==4.9
- six==1.17.0
- tomli==2.0.1
- traceback2==1.4.0
- typing-extensions==4.7.1
- unittest2==1.1.0
- zipp==3.15.0
prefix: /opt/conda/envs/gax-python
| [
"test/test_api_callable.py::TestCreateApiCallable::test_wrap_value_error"
]
| []
| [
"test/test_api_callable.py::TestCreateApiCallable::test_bundling",
"test/test_api_callable.py::TestCreateApiCallable::test_bundling_page_streaming_error",
"test/test_api_callable.py::TestCreateApiCallable::test_call_api_call",
"test/test_api_callable.py::TestCreateApiCallable::test_call_kwargs",
"test/test_api_callable.py::TestCreateApiCallable::test_call_override",
"test/test_api_callable.py::TestCreateApiCallable::test_catch_error",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings_override",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings_override2",
"test/test_api_callable.py::TestCreateApiCallable::test_no_retry_if_no_codes",
"test/test_api_callable.py::TestCreateApiCallable::test_page_streaming",
"test/test_api_callable.py::TestCreateApiCallable::test_retry",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_aborts_on_unexpected_exception",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_aborts_simple",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_exponential_backoff",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_times_out_no_response",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_times_out_simple"
]
| []
| BSD 3-Clause "New" or "Revised" License | 762 | [
"google/gax/api_callable.py",
"google/gax/errors.py",
"google/gax/config.py",
"google/gax/grpc.py"
]
| [
"google/gax/api_callable.py",
"google/gax/errors.py",
"google/gax/config.py",
"google/gax/grpc.py"
]
|
drdoctr__doctr-123 | 45305afb454eb5dda06fd0deafce6ce70a0e5cee | 2016-09-20 19:35:45 | 45305afb454eb5dda06fd0deafce6ce70a0e5cee | diff --git a/doctr/__main__.py b/doctr/__main__.py
index 25889d4c..a50c3236 100644
--- a/doctr/__main__.py
+++ b/doctr/__main__.py
@@ -182,14 +182,15 @@ def configure(args, parser):
login_kwargs = {'auth': None, 'headers': None}
build_repo = input("What repo do you want to build the docs for (org/reponame, like 'drdoctr/doctr')? ")
- is_private = check_repo_exists(build_repo, **login_kwargs)
+ is_private = check_repo_exists(build_repo, service='github', **login_kwargs)
+ check_repo_exists(build_repo, service='travis')
deploy_repo = input("What repo do you want to deploy the docs to? [{build_repo}] ".format(build_repo=build_repo))
if not deploy_repo:
deploy_repo = build_repo
if deploy_repo != build_repo:
- check_repo_exists(deploy_repo, **login_kwargs)
+ check_repo_exists(deploy_repo, service='github', **login_kwargs)
N = IncrementingInt(1)
diff --git a/doctr/local.py b/doctr/local.py
index 7fbf9004..8fe0b6ef 100644
--- a/doctr/local.py
+++ b/doctr/local.py
@@ -212,7 +212,7 @@ def generate_ssh_key(note, keypath='github_deploy_key'):
with open(keypath + ".pub") as f:
return f.read()
-def check_repo_exists(deploy_repo, *, auth=None, headers=None):
+def check_repo_exists(deploy_repo, service='github', *, auth=None, headers=None):
"""
Checks that the repository exists on GitHub.
@@ -227,11 +227,19 @@ def check_repo_exists(deploy_repo, *, auth=None, headers=None):
raise RuntimeError('"{deploy_repo}" should be in the form username/repo'.format(deploy_repo=deploy_repo))
user, repo = deploy_repo.split('/')
- REPO_URL = 'https://api.github.com/repos/{user}/{repo}'
+ if service == 'github':
+ REPO_URL = 'https://api.github.com/repos/{user}/{repo}'
+ elif service == 'travis':
+ REPO_URL = 'https://api.travis-ci.org/repos/{user}/{repo}'
+ else:
+ raise RuntimeError('Invalid service specified for repo check (neither "travis" nor "github")')
+
r = requests.get(REPO_URL.format(user=user, repo=repo), auth=auth, headers=headers)
if r.status_code == requests.codes.not_found:
- raise RuntimeError('"{user}/{repo}" not found on GitHub. Exiting'.format(user=user, repo=repo))
+ raise RuntimeError('"{user}/{repo}" not found on {service}. Exiting'.format(user=user,
+ repo=repo,
+ service=service))
r.raise_for_status()
| Check for travis repo before generating keys.
Otherwise it will just fail and you'll have to regenerate.
It's a small optimisation though, that will mostly affect users that make typos.
Note that github is not case sensitive for usernames, travis is so for example, I regularly get into trouble when I write my username lowercase and it involves travis.
| drdoctr/doctr | diff --git a/doctr/tests/test_local.py b/doctr/tests/test_local.py
index ba07bb4b..897cfa27 100644
--- a/doctr/tests/test_local.py
+++ b/doctr/tests/test_local.py
@@ -11,20 +11,33 @@ else:
HEADERS = None
-def test_bad_user():
+def test_github_bad_user():
with raises(RuntimeError):
check_repo_exists('---/invaliduser', headers=HEADERS)
-def test_bad_repo():
+def test_github_bad_repo():
with raises(RuntimeError):
check_repo_exists('drdoctr/---', headers=HEADERS)
-def test_repo_exists():
+def test_github_repo_exists():
assert not check_repo_exists('drdoctr/doctr', headers=HEADERS)
-def test_invalid_repo():
+def test_github_invalid_repo():
with raises(RuntimeError):
check_repo_exists('fdsf', headers=HEADERS)
with raises(RuntimeError):
check_repo_exists('fdsf/fdfs/fd', headers=HEADERS)
+
+def test_travis_bad_user():
+ with raises(RuntimeError):
+ # Travis is case-sensitive
+ check_repo_exists('dRdoctr/doctr', service='travis')
+
+def test_travis_bad_repo():
+ with raises(RuntimeError):
+ # Travis is case-sensitive
+ check_repo_exists('drdoctr/DoCtR', service='travis')
+
+def test_travis_repo_exists():
+ assert not check_repo_exists('drdoctr/doctr', service='travis')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 1.3 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pyflakes"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cryptography==44.0.2
-e git+https://github.com/drdoctr/doctr.git@45305afb454eb5dda06fd0deafce6ce70a0e5cee#egg=doctr
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pycparser==2.22
pyflakes==3.3.2
pytest==8.3.5
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
| name: doctr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pyflakes==3.3.2
- pytest==8.3.5
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/doctr
| [
"doctr/tests/test_local.py::test_travis_bad_user",
"doctr/tests/test_local.py::test_travis_bad_repo"
]
| [
"doctr/tests/test_local.py::test_travis_repo_exists"
]
| [
"doctr/tests/test_local.py::test_github_bad_user",
"doctr/tests/test_local.py::test_github_bad_repo",
"doctr/tests/test_local.py::test_github_repo_exists",
"doctr/tests/test_local.py::test_github_invalid_repo"
]
| []
| MIT License | 763 | [
"doctr/__main__.py",
"doctr/local.py"
]
| [
"doctr/__main__.py",
"doctr/local.py"
]
|
|
scrapy__scrapy-2275 | a19af5b164a23547f0327e1392526c7af67876a5 | 2016-09-21 13:32:49 | a975a50558cd78a1573bee2e957afcb419fd1bd6 | diff --git a/scrapy/http/response/__init__.py b/scrapy/http/response/__init__.py
index 983154001..58ad414f1 100644
--- a/scrapy/http/response/__init__.py
+++ b/scrapy/http/response/__init__.py
@@ -9,6 +9,8 @@ from six.moves.urllib.parse import urljoin
from scrapy.http.headers import Headers
from scrapy.utils.trackref import object_ref
from scrapy.http.common import obsolete_setter
+from scrapy.exceptions import NotSupported
+
class Response(object_ref):
@@ -80,3 +82,22 @@ class Response(object_ref):
"""Join this Response's url with a possible relative url to form an
absolute interpretation of the latter."""
return urljoin(self.url, url)
+
+ @property
+ def text(self):
+ """For subclasses of TextResponse, this will return the body
+ as text (unicode object in Python 2 and str in Python 3)
+ """
+ raise AttributeError("Response content isn't text")
+
+ def css(self, *a, **kw):
+ """Shortcut method implemented only by responses whose content
+ is text (subclasses of TextResponse).
+ """
+ raise NotSupported("Response content isn't text")
+
+ def xpath(self, *a, **kw):
+ """Shortcut method implemented only by responses whose content
+ is text (subclasses of TextResponse).
+ """
+ raise NotSupported("Response content isn't text")
| Idea: warn users when trying to use TextResponse functionality with plain Response
Currently, if we try to use TextResponse functionality like response.text or css()/xpath() methods with a plain Response (e.g. in case of binary content), we get an AttributeError:
```
>>> response.css
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-1-7d6e256164d4> in <module>()
----> 1 response.css
AttributeError: 'Response' object has no attribute 'css'
>>> response.xpath
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-2-4f61f6e9fc6e> in <module>()
----> 1 response.xpath
AttributeError: 'Response' object has no attribute 'xpath'
>>> response.text
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-be6a4a00df5e> in <module>()
----> 1 response.text
AttributeError: 'Response' object has no attribute 'text'
```
Would it make sense to add a few methods/properties to explain what's going on for new users?
I was thinking instead of AttributeError, a better behavior could be a ValueError with a message giving a bit more context.
So, in plain `Response`, we could have:
```
def css(self, *args, **kw):
raise ValueError('Response content is not text')
def xpath(self, *args, **kw):
raise ValueError('Response content is not text')
@property
def text(self, *args, **kw):
raise ValueError('Response content is not text')
```
This would be nice, because we'd had to explain fewer things when teaching people about responses and also about using `.css` and `.xpath` methods.
What do you think?
| scrapy/scrapy | diff --git a/tests/test_http_response.py b/tests/test_http_response.py
index c7f36687a..7624aa4c4 100644
--- a/tests/test_http_response.py
+++ b/tests/test_http_response.py
@@ -7,6 +7,7 @@ from scrapy.http import (Request, Response, TextResponse, HtmlResponse,
XmlResponse, Headers)
from scrapy.selector import Selector
from scrapy.utils.python import to_native_str
+from scrapy.exceptions import NotSupported
class BaseResponseTest(unittest.TestCase):
@@ -127,6 +128,18 @@ class BaseResponseTest(unittest.TestCase):
absolute = 'http://www.example.com/test'
self.assertEqual(joined, absolute)
+ def test_shortcut_attributes(self):
+ r = self.response_class("http://example.com", body=b'hello')
+ if self.response_class == Response:
+ msg = "Response content isn't text"
+ self.assertRaisesRegexp(AttributeError, msg, getattr, r, 'text')
+ self.assertRaisesRegexp(NotSupported, msg, r.css, 'body')
+ self.assertRaisesRegexp(NotSupported, msg, r.xpath, '//body')
+ else:
+ r.text
+ r.css('body')
+ r.xpath('//body')
+
class TextResponseTest(BaseResponseTest):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
coverage==7.8.0
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
execnet==2.1.1
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@a19af5b164a23547f0327e1392526c7af67876a5#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- coverage==7.8.0
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_http_response.py::BaseResponseTest::test_shortcut_attributes"
]
| [
"tests/test_http_response.py::TextResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::TextResponseTest::test_selector",
"tests/test_http_response.py::TextResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::TextResponseTest::test_shortcut_attributes",
"tests/test_http_response.py::HtmlResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::HtmlResponseTest::test_selector",
"tests/test_http_response.py::HtmlResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::HtmlResponseTest::test_shortcut_attributes",
"tests/test_http_response.py::XmlResponseTest::test_invalid_utf8_encoded_body_with_valid_utf8_BOM",
"tests/test_http_response.py::XmlResponseTest::test_selector",
"tests/test_http_response.py::XmlResponseTest::test_selector_shortcuts",
"tests/test_http_response.py::XmlResponseTest::test_shortcut_attributes"
]
| [
"tests/test_http_response.py::BaseResponseTest::test_copy",
"tests/test_http_response.py::BaseResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::BaseResponseTest::test_copy_meta",
"tests/test_http_response.py::BaseResponseTest::test_immutable_attributes",
"tests/test_http_response.py::BaseResponseTest::test_init",
"tests/test_http_response.py::BaseResponseTest::test_replace",
"tests/test_http_response.py::BaseResponseTest::test_urljoin",
"tests/test_http_response.py::TextResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::TextResponseTest::test_copy",
"tests/test_http_response.py::TextResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::TextResponseTest::test_copy_meta",
"tests/test_http_response.py::TextResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::TextResponseTest::test_encoding",
"tests/test_http_response.py::TextResponseTest::test_immutable_attributes",
"tests/test_http_response.py::TextResponseTest::test_init",
"tests/test_http_response.py::TextResponseTest::test_replace",
"tests/test_http_response.py::TextResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::TextResponseTest::test_unicode_body",
"tests/test_http_response.py::TextResponseTest::test_unicode_url",
"tests/test_http_response.py::TextResponseTest::test_urljoin",
"tests/test_http_response.py::TextResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::TextResponseTest::test_utf16",
"tests/test_http_response.py::HtmlResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::HtmlResponseTest::test_copy",
"tests/test_http_response.py::HtmlResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::HtmlResponseTest::test_copy_meta",
"tests/test_http_response.py::HtmlResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::HtmlResponseTest::test_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_html5_meta_charset",
"tests/test_http_response.py::HtmlResponseTest::test_html_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_immutable_attributes",
"tests/test_http_response.py::HtmlResponseTest::test_init",
"tests/test_http_response.py::HtmlResponseTest::test_replace",
"tests/test_http_response.py::HtmlResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::HtmlResponseTest::test_unicode_body",
"tests/test_http_response.py::HtmlResponseTest::test_unicode_url",
"tests/test_http_response.py::HtmlResponseTest::test_urljoin",
"tests/test_http_response.py::HtmlResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::HtmlResponseTest::test_utf16",
"tests/test_http_response.py::XmlResponseTest::test_bom_is_removed_from_body",
"tests/test_http_response.py::XmlResponseTest::test_copy",
"tests/test_http_response.py::XmlResponseTest::test_copy_inherited_classes",
"tests/test_http_response.py::XmlResponseTest::test_copy_meta",
"tests/test_http_response.py::XmlResponseTest::test_declared_encoding_invalid",
"tests/test_http_response.py::XmlResponseTest::test_encoding",
"tests/test_http_response.py::XmlResponseTest::test_immutable_attributes",
"tests/test_http_response.py::XmlResponseTest::test_init",
"tests/test_http_response.py::XmlResponseTest::test_replace",
"tests/test_http_response.py::XmlResponseTest::test_replace_encoding",
"tests/test_http_response.py::XmlResponseTest::test_replace_wrong_encoding",
"tests/test_http_response.py::XmlResponseTest::test_unicode_body",
"tests/test_http_response.py::XmlResponseTest::test_unicode_url",
"tests/test_http_response.py::XmlResponseTest::test_urljoin",
"tests/test_http_response.py::XmlResponseTest::test_urljoin_with_base_url",
"tests/test_http_response.py::XmlResponseTest::test_utf16",
"tests/test_http_response.py::XmlResponseTest::test_xml_encoding"
]
| []
| BSD 3-Clause "New" or "Revised" License | 764 | [
"scrapy/http/response/__init__.py"
]
| [
"scrapy/http/response/__init__.py"
]
|
|
simphony__tornado-webapi-17 | 5dd0c146089cc0a348a9798e2021447aa4ee311e | 2016-09-21 13:35:16 | 95ad403cd70109fb94727a119545567593d8203d | diff --git a/tornadowebapi/__init__.py b/tornadowebapi/__init__.py
index 12f7bda..9cf510d 100644
--- a/tornadowebapi/__init__.py
+++ b/tornadowebapi/__init__.py
@@ -1,5 +1,4 @@
-from .handler import ResourceHandler, CollectionHandler
-from .utils import url_path_join, with_end_slash
+from . import registry
MAJOR = 0
MINOR = 1
@@ -30,13 +29,4 @@ def api_handlers(base_urlpath, version="v1"):
The current implementation does not support multiple API versions yet.
The version option is only provided for futureproofing.
"""
- return [
- (with_end_slash(
- url_path_join(base_urlpath, "api", version, "(.*)", "(.*)")),
- ResourceHandler
- ),
- (with_end_slash(
- url_path_join(base_urlpath, "api", version, "(.*)")),
- CollectionHandler
- ),
- ]
+ return registry.registry.api_handlers(base_urlpath, version)
diff --git a/tornadowebapi/handler.py b/tornadowebapi/handler.py
index f9c8956..609a1c5 100644
--- a/tornadowebapi/handler.py
+++ b/tornadowebapi/handler.py
@@ -4,11 +4,14 @@ from tornado.log import app_log
from . import exceptions
from .http import httpstatus
from .http.payloaded_http_error import PayloadedHTTPError
-from .registry import registry
from .utils import url_path_join, with_end_slash
class BaseHandler(web.RequestHandler):
+ def initialize(self, registry):
+ """Initialization method for when the class is instantiated."""
+ self._registry = registry
+
@gen.coroutine
def prepare(self):
"""Runs before any specific handler. """
@@ -17,8 +20,8 @@ class BaseHandler(web.RequestHandler):
@property
def registry(self):
- """Returns the global class vs Resource registry"""
- return registry
+ """Returns the class vs Resource registry"""
+ return self._registry
@property
def log(self):
diff --git a/tornadowebapi/registry.py b/tornadowebapi/registry.py
index c5bac97..ee6bfe0 100644
--- a/tornadowebapi/registry.py
+++ b/tornadowebapi/registry.py
@@ -1,3 +1,5 @@
+from .handler import ResourceHandler, CollectionHandler
+from .utils import url_path_join, with_end_slash
from .resource import Resource
from .authenticator import NullAuthenticator
@@ -63,5 +65,36 @@ class Registry:
"""If the registry contains the given item"""
return item in self._registered_types
+ def api_handlers(self, base_urlpath, version="v1"):
+ """Returns the API handlers for the interface.
+ Add these handlers to your application to provide an
+ interface to your Resources.
+
+
+ Parameters
+ ----------
+ base_urlpath: str
+ The base url path to serve
+ version: str
+ A string identifying the version of the API.
+
+ Notes
+ -----
+ The current implementation does not support multiple API versions yet.
+ The version option is only provided for futureproofing.
+ """
+ return [
+ (with_end_slash(
+ url_path_join(base_urlpath, "api", version, "(.*)", "(.*)")),
+ ResourceHandler,
+ dict(registry=self)
+ ),
+ (with_end_slash(
+ url_path_join(base_urlpath, "api", version, "(.*)")),
+ CollectionHandler,
+ dict(registry=self)
+ ),
+ ]
+
#: global registry for registration of the classes.
registry = Registry()
| Support multiple registries
The current global registry is convenient for a single application, but it's a global object. This introduces major problems when testing. We need to be able to handle multiple registers, each one providing its own api.
| simphony/tornado-webapi | diff --git a/tornadowebapi/tests/test_registry.py b/tornadowebapi/tests/test_registry.py
index 1dc5433..0b793df 100644
--- a/tornadowebapi/tests/test_registry.py
+++ b/tornadowebapi/tests/test_registry.py
@@ -42,3 +42,11 @@ class TestRegistry(unittest.TestCase):
reg = Registry()
self.assertIsNotNone(reg.authenticator)
+
+ def test_api_handlers(self):
+ reg = Registry()
+ api_handlers = reg.api_handlers("/foo")
+ self.assertEqual(len(api_handlers), 2)
+
+ self.assertEqual(api_handlers[0][2]["registry"], reg)
+ self.assertEqual(api_handlers[1][2]["registry"], reg)
diff --git a/tornadowebapi/tests/test_webapi.py b/tornadowebapi/tests/test_webapi.py
index d756ece..3a3c8e5 100644
--- a/tornadowebapi/tests/test_webapi.py
+++ b/tornadowebapi/tests/test_webapi.py
@@ -6,6 +6,7 @@ from unittest import mock
import tornadowebapi
from tornadowebapi import registry, exceptions
from tornadowebapi.http import httpstatus
+from tornadowebapi.registry import Registry
from tornadowebapi.resource import Resource
from tornadowebapi.handler import ResourceHandler, CollectionHandler
from tornadowebapi.tests.utils import AsyncHTTPTestCase
@@ -56,6 +57,16 @@ class Student(Resource):
return list(self.collection.keys())
+class Teacher(Resource):
+ @gen.coroutine
+ def retrieve(self, identifier):
+ return {}
+
+ @gen.coroutine
+ def items(self):
+ return []
+
+
class UnsupportAll(Resource):
pass
@@ -399,3 +410,23 @@ class TestRESTFunctions(unittest.TestCase):
self.assertEqual(handlers[0][1], ResourceHandler)
self.assertEqual(handlers[1][0], "/foo/api/v1/(.*)/")
self.assertEqual(handlers[1][1], CollectionHandler)
+
+
+class TestNonGlobalRegistry(AsyncHTTPTestCase):
+ def setUp(self):
+ super().setUp()
+ Student.collection = OrderedDict()
+ Student.id = 0
+
+ def get_app(self):
+ self.registry = Registry()
+ self.registry.register(Teacher)
+ handlers = self.registry.api_handlers('/')
+ app = web.Application(handlers=handlers)
+ return app
+
+ def test_non_global_registry(self):
+ res = self.fetch("/api/v1/teachers/")
+ self.assertEqual(res.code, httpstatus.OK)
+ self.assertEqual(escape.json_decode(res.body),
+ {"items": []})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"tornado"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
tomli==2.2.1
tornado==6.4.2
-e git+https://github.com/simphony/tornado-webapi.git@5dd0c146089cc0a348a9798e2021447aa4ee311e#egg=tornadowebapi
| name: tornado-webapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- tomli==2.2.1
- tornado==6.4.2
prefix: /opt/conda/envs/tornado-webapi
| [
"tornadowebapi/tests/test_registry.py::TestRegistry::test_api_handlers",
"tornadowebapi/tests/test_webapi.py::TestNonGlobalRegistry::test_non_global_registry"
]
| []
| [
"tornadowebapi/tests/test_registry.py::TestRegistry::test_authenticator",
"tornadowebapi/tests/test_registry.py::TestRegistry::test_instantiation",
"tornadowebapi/tests/test_webapi.py::TestREST::test_broken",
"tornadowebapi/tests/test_webapi.py::TestREST::test_create",
"tornadowebapi/tests/test_webapi.py::TestREST::test_delete",
"tornadowebapi/tests/test_webapi.py::TestREST::test_items",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_non_json",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_on_resource",
"tornadowebapi/tests/test_webapi.py::TestREST::test_retrieve",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unexistent_resource_type",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unprocessable",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupported_methods",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupports_collections",
"tornadowebapi/tests/test_webapi.py::TestREST::test_update",
"tornadowebapi/tests/test_webapi.py::TestRESTFunctions::test_api_handlers"
]
| []
| BSD 3-Clause "New" or "Revised" License | 765 | [
"tornadowebapi/__init__.py",
"tornadowebapi/handler.py",
"tornadowebapi/registry.py"
]
| [
"tornadowebapi/__init__.py",
"tornadowebapi/handler.py",
"tornadowebapi/registry.py"
]
|
|
Azure__azure-data-lake-store-python-76 | 46645c61e903d76585c410baadb1f49885fdbb2e | 2016-09-21 18:16:23 | 46645c61e903d76585c410baadb1f49885fdbb2e | jbcrail: All benchmarks completed without errors:
```
[bench_upload_1_100gb] starting...
[bench_upload_1_100gb] finished in 612.4005s
[bench_download_1_100gb] starting...
[bench_download_1_100gb] finished in 1234.5771s
[bench_upload_100_1gb] starting...
[bench_upload_100_1gb] finished in 762.1828s
[bench_download_100_1gb] starting...
[bench_download_100_1gb] finished in 1207.7382s
```
jbcrail: Updated benchmark results:
```
[bench_upload_1_50gb] starting...
local file : D:\ingress\50gbfile.txt
local file size : 52425000000
remote file : joseph/50gbfile.txt
remote file size: 52425000000
[bench_upload_1_50gb] finished in 263.0346s
[bench_download_1_50gb] starting...
local file : D:\ingress\50gbfile.txt.out
local file size : 52425000000
remote file : joseph/50gbfile.txt
remote file size: 52425000000
[bench_download_1_50gb] finished in 397.7892s
>>> md5sum('D:\\ingress\\50gbfile.txt')
'60f5fba5f5eb30ab75f84214e184acae'
>>> md5sum('D:\\ingress\\50gbfile.txt.out')
'6c37bf52d23560df955d36b6d32b822a'
```
jbcrail: Updated benchmark results:
```
md5sum('D:\ingress\50gbfile.txt') = 60f5fba5f5eb30ab75f84214e184acae
[bench_upload_1_50gb] starting...
local file : D:\ingress\50gbfile.txt
local file size : 52425000000
remote file : joseph/50gbfile.txt
remote file size: 52425000000
[bench_upload_1_50gb] finished in 281.7676s
[bench_download_1_50gb] starting...
local file : D:\ingress\50gbfile.txt.out
local file size : 52425000000
remote file : joseph/50gbfile.txt
remote file size: 52425000000
[bench_download_1_50gb] finished in 466.3828s
md5sum('D:\ingress\50gbfile.txt.out') = 60f5fba5f5eb30ab75f84214e184acae
```
begoldsm: Thank you for the updated benchmark test @jbcrail! | diff --git a/adlfs/cli.py b/adlfs/cli.py
index 0c6c2ca..f8d2a1f 100644
--- a/adlfs/cli.py
+++ b/adlfs/cli.py
@@ -179,7 +179,7 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
parser = argparse.ArgumentParser(prog="get", add_help=False)
parser.add_argument('remote_path', type=str)
parser.add_argument('local_path', type=str, nargs='?', default='.')
- parser.add_argument('-b', '--chunksize', type=int, default=2**22)
+ parser.add_argument('-b', '--chunksize', type=int, default=2**28)
parser.add_argument('-c', '--threads', type=int, default=None)
args = parser.parse_args(line.split())
@@ -303,7 +303,7 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
parser = argparse.ArgumentParser(prog="put", add_help=False)
parser.add_argument('local_path', type=str)
parser.add_argument('remote_path', type=str, nargs='?', default='.')
- parser.add_argument('-b', '--chunksize', type=int, default=2**22)
+ parser.add_argument('-b', '--chunksize', type=int, default=2**28)
parser.add_argument('-c', '--threads', type=int, default=None)
args = parser.parse_args(line.split())
diff --git a/adlfs/core.py b/adlfs/core.py
index b36944c..a803176 100644
--- a/adlfs/core.py
+++ b/adlfs/core.py
@@ -24,7 +24,7 @@ import time
# local imports
from .lib import DatalakeRESTInterface, auth, refresh_token
-from .utils import FileNotFoundError, PY2, ensure_writable, read_block
+from .utils import FileNotFoundError, PermissionError, PY2, ensure_writable, read_block
if sys.version_info >= (3, 4):
import pathlib
@@ -619,10 +619,12 @@ class AzureDLFile(object):
if self.mode in {'wb', 'ab'} and not self.closed:
if self.buffer.tell() == 0:
if force and self.first_write:
- self.azure.azure.call('CREATE',
- path=self.path.as_posix(),
- overwrite='true',
- write='true')
+ _put_data(self.azure.azure,
+ 'CREATE',
+ path=self.path.as_posix(),
+ data=None,
+ overwrite='true',
+ write='true')
self.first_write = False
return
self.buffer.seek(0)
@@ -636,37 +638,44 @@ class AzureDLFile(object):
else:
limit = place + len(self.delimiter)
if self.first_write:
- out = self.azure.azure.call('CREATE',
- path=self.path.as_posix(),
- data=data[:limit],
- overwrite='true',
- write='true')
+ out = _put_data(self.azure.azure,
+ 'CREATE',
+ path=self.path.as_posix(),
+ data=data[:limit],
+ overwrite='true',
+ write='true')
self.first_write = False
else:
- out = self.azure.azure.call('APPEND',
- path=self.path.as_posix(),
- data=data[:limit],
- append='true')
+ out = _put_data(self.azure.azure,
+ 'APPEND',
+ path=self.path.as_posix(),
+ data=data[:limit],
+ append='true')
logger.debug('Wrote %d bytes to %s' % (limit, self))
data = data[limit:]
self.buffer = io.BytesIO(data)
self.buffer.seek(0, 2)
if not self.delimiter or force:
+ zero_offset = self.tell() - len(data)
offsets = range(0, len(data), self.blocksize)
for o in offsets:
+ offset = zero_offset + o
d2 = data[o:o+self.blocksize]
if self.first_write:
- out = self.azure.azure.call('CREATE',
- path=self.path.as_posix(),
- data=d2,
- overwrite='true',
- write='true')
+ out = _put_data(self.azure.azure,
+ 'CREATE',
+ path=self.path.as_posix(),
+ data=d2,
+ overwrite='true',
+ write='true')
self.first_write = False
else:
- out = self.azure.azure.call('APPEND',
- path=self.path.as_posix(),
- data=d2,
- append='true')
+ out = _put_data(self.azure.azure,
+ 'APPEND',
+ path=self.path.as_posix(),
+ data=d2,
+ offset=offset,
+ append='true')
logger.debug('Wrote %d bytes to %s' % (len(d2), self))
self.buffer = io.BytesIO()
return out
@@ -725,6 +734,20 @@ def _fetch_range(rest, path, start, end, max_attempts=10):
raise RuntimeError("Max number of ADL retries exceeded")
+def _put_data(rest, op, path, data, max_attempts=10, **kwargs):
+ logger.debug("Put: %s %s, %s", op, path, kwargs)
+ for i in range(max_attempts):
+ try:
+ resp = rest.call(op, path=path, data=data, **kwargs)
+ return resp
+ except (PermissionError, FileNotFoundError):
+ raise
+ except Exception as e:
+ logger.debug('Exception %s on ADL upload, retrying', e,
+ exc_info=True)
+ raise RuntimeError("Max number of ADL retries exceeded")
+
+
class AzureDLPath(type(pathlib.PurePath())):
"""
Subclass of native object-oriented filesystem path.
diff --git a/adlfs/lib.py b/adlfs/lib.py
index fac6e4f..862a42f 100644
--- a/adlfs/lib.py
+++ b/adlfs/lib.py
@@ -25,6 +25,8 @@ import time
import adal
import azure
+from .utils import FileNotFoundError, PermissionError
+
client_id = "1950a258-227b-4e31-a9cf-717495945fc2"
logger = logging.getLogger(__name__)
@@ -181,7 +183,7 @@ class DatalakeRESTInterface:
ends = {
# OP: (HTTP method, required fields, allowed fields)
- 'APPEND': ('post', set(), {'append'}),
+ 'APPEND': ('post', set(), {'append', 'offset'}),
'CHECKACCESS': ('get', set(), {'fsaction'}),
'CONCAT': ('post', {'sources'}, {'sources'}),
'MSCONCAT': ('post', set(), {'deleteSourceDirectory'}),
@@ -246,7 +248,11 @@ class DatalakeRESTInterface:
r = func(url, params=params, headers=self.head, data=data)
except requests.exceptions.RequestException as e:
raise DatalakeRESTException('HTTP error: %s', str(e))
- if r.status_code >= 400:
+ if r.status_code == 403:
+ raise PermissionError(path)
+ elif r.status_code == 404:
+ raise FileNotFoundError(path)
+ elif r.status_code >= 400:
raise DatalakeRESTException("Data-lake REST exception: %s, %s, %s" %
(op, r.status_code, r.content.decode()))
if r.content:
diff --git a/adlfs/multithread.py b/adlfs/multithread.py
index aceb05e..7f6997a 100644
--- a/adlfs/multithread.py
+++ b/adlfs/multithread.py
@@ -135,7 +135,7 @@ class ADLDownloader(object):
with open(dst, 'wb'):
pass
- self.client.run(nthreads, monitor, before_scatter=touch)
+ self.client.run(nthreads, monitor, before_start=touch)
@staticmethod
def load():
@@ -160,6 +160,7 @@ def get_chunk(adlfs, src, dst, offset, size, blocksize, retries=MAXRETRIES,
Internal function used by `download`.
"""
+ nbytes = 0
with adlfs.open(src, 'rb') as fin:
end = offset + size
miniblock = min(size, blocksize)
@@ -168,21 +169,23 @@ def get_chunk(adlfs, src, dst, offset, size, blocksize, retries=MAXRETRIES,
fin.seek(offset)
for o in range(offset, end, miniblock):
if shutdown_event and shutdown_event.is_set():
- return
+ return nbytes, None
tries = 0
while True:
try:
- fout.write(fin.read(miniblock))
+ data = fin.read(miniblock)
+ nbytes += fout.write(data)
break
except Exception as e:
# TODO : only some exceptions should be retriable
logger.debug('Download failed %s, byte offset %s; %s, %s', dst,
- o, e, e.args)
+ o, e, e.args)
tries += 1
if tries >= retries:
logger.debug('Aborting %s, byte offset %s', dst, o)
- raise
+ return nbytes, str(e)
logger.debug('Downloaded to %s, byte offset %s', dst, offset)
+ return nbytes, None
class ADLUploader(object):
@@ -306,17 +309,19 @@ def put_chunk(adlfs, src, dst, offset, size, blocksize, retries=MAXRETRIES,
Internal function used by `upload`.
"""
+ nbytes = 0
with adlfs.open(dst, 'wb', delimiter=delimiter) as fout:
end = offset + size
miniblock = min(size, blocksize)
with open(src, 'rb') as fin:
for o in range(offset, end, miniblock):
if shutdown_event and shutdown_event.is_set():
- return False
+ return nbytes, None
tries = 0
while True:
try:
- fout.write(read_block(fin, o, miniblock, delimiter))
+ data = read_block(fin, o, miniblock, delimiter)
+ nbytes += fout.write(data)
break
except Exception as e:
# TODO : only some exceptions should be retriable
@@ -325,9 +330,9 @@ def put_chunk(adlfs, src, dst, offset, size, blocksize, retries=MAXRETRIES,
tries += 1
if tries >= retries:
logger.debug('Aborting %s, byte offset %s', src, o)
- raise
+ return nbytes, str(e)
logger.debug('Uploaded from %s, byte offset %s', src, offset)
- return True
+ return nbytes, None
def merge_chunks(adlfs, outfile, files, shutdown_event=None):
diff --git a/adlfs/transfer.py b/adlfs/transfer.py
index b1c938a..07f1aeb 100644
--- a/adlfs/transfer.py
+++ b/adlfs/transfer.py
@@ -102,8 +102,8 @@ class StateManager(object):
# Named tuples used to serialize client progress
-File = namedtuple('File', 'src dst state nbytes start stop chunks')
-Chunk = namedtuple('Chunk', 'name state offset retries')
+File = namedtuple('File', 'src dst state length start stop chunks')
+Chunk = namedtuple('Chunk', 'name state offset retries expected actual exception')
class ADLTransferClient(object):
@@ -157,8 +157,7 @@ class ADLTransferClient(object):
When a merge step is available, the client will write chunks to temporary
files before merging. The exact temporary file is dependent upon on two
parameters (`tmp_path`, `tmp_unique`). Given those values, the full
- temporary file can be accessed via the `temporary_path` property and looks
- like this in pseudo-BNF:
+ temporary file looks like this in pseudo-BNF:
>>> # /{tmp_path}[/{unique_str}]/{basename}_{offset}
@@ -175,7 +174,8 @@ class ADLTransferClient(object):
and destination of the respective file transfer. `offset` is the location
in `src` to read `size` bytes from. `blocksize` is the number of bytes in a
chunk to write at one time. `retries` is the number of time an Azure query
- will be tried.
+ will be tried. The callable should return an integer representing the
+ number of bytes written.
The `merge` callable has the function signature,
`fn(adlfs, outfile, files, delete_source, shutdown_event)`. `adlfs` is
@@ -187,13 +187,37 @@ class ADLTransferClient(object):
The event will be set when a shutdown is requested. It is good practice
to listen for this.
+ Internal State
+ --------------
+
+ self._fstates: StateManager
+ This captures the current state of each transferred file.
+ self._files: dict
+ Using a tuple of the file source/destination as the key, this
+ dictionary stores the file metadata and all chunk states. The
+ dictionary key is `(src, dst)` and the value is
+ `dict(length, start, stop, cstates)`.
+ self._chunks: dict
+ Using a tuple of the chunk name/offset as the key, this dictionary
+ stores the chunk metadata and has a reference to the chunk's parent
+ file. The dictionary key is `(name, offset)` and the value is
+ `dict(parent=(src, dst), retries, expected, actual, exception)`.
+ self._ffutures: dict
+ Using a Future object as the key, this dictionary provides a reverse
+ lookup for the file associated with the given future. The returned
+ value is the file's primary key, `(src, dst)`.
+ self._cfutures: dict
+ Using a Future object as the key, this dictionary provides a reverse
+ lookup for the chunk associated with the given future. The returned
+ value is the chunk's primary key, `(name, offset)`.
+
See Also
--------
adlfs.multithread.ADLDownloader
adlfs.multithread.ADLUploader
"""
- DEFAULT_TMP_PATH = os.path.join(os.path.sep, 'tmp')
+ DEFAULT_TMP_PATH = 'tmp'
def __init__(self, adlfs, name, transfer, merge=None, nthreads=None,
chunksize=2**28, blocksize=2**25, tmp_path=DEFAULT_TMP_PATH,
@@ -211,54 +235,74 @@ class ADLTransferClient(object):
self._persist_path = persist_path
self._pool = ThreadPoolExecutor(self._nthreads)
self._shutdown_event = threading.Event()
+
+ # Internal state tracking files/chunks/futures
self._files = {}
+ self._chunks = {}
+ self._ffutures = {}
+ self._cfutures = {}
self._fstates = StateManager(
'pending', 'transferring', 'merging', 'finished', 'cancelled',
'errored')
- def submit(self, src, dst, nbytes):
+ def submit(self, src, dst, length):
"""
- All submitted files start in the `pending` state until `run()` is
- called.
+ Split a given file into chunks.
+
+ All submitted files/chunks start in the `pending` state until `run()`
+ is called.
"""
- self._fstates[(src, dst)] = 'pending'
- self._files[(src, dst)] = dict(
- nbytes=nbytes,
- start=None,
- stop=None,
- chunks={},
- cstates=StateManager('running', 'finished', 'cancelled', 'errored'),
- merge=None)
+ cstates = StateManager(
+ 'pending', 'running', 'finished', 'cancelled', 'errored')
- def _submit(self, fn, *args, **kwargs):
- kwargs['shutdown_event'] = self._shutdown_event
- return self._pool.submit(fn, *args, **kwargs)
+ # Create unique temporary directory for each file
+ if self._tmp_unique and self._tmp_path:
+ tmpdir = os.path.join(self._tmp_path, uuid.uuid4().hex)
+ else:
+ tmpdir = self._tmp_path
- def _scatter(self, src, dst, transfer):
- """ Split a given file into chunks """
- dic = self._files[(src, dst)]
- self._fstates[(src, dst)] = 'transferring'
- offsets = list(range(0, dic['nbytes'], self._chunksize))
+ offsets = list(range(0, length, self._chunksize))
for offset in offsets:
if self._tmp_path and len(offsets) > 1:
name = os.path.join(
- self.temporary_path,
+ os.path.sep,
+ tmpdir,
dst.name + '_' + str(offset))
else:
name = dst
- logger.debug("Submitted %s, byte offset %d", name, offset)
- dic['cstates'][name] = 'running'
- dic['chunks'][name] = dict(
- future=self._submit(transfer, self._adlfs, src, name, offset,
- self._chunksize, self._blocksize),
+ cstates[(name, offset)] = 'pending'
+ self._chunks[(name, offset)] = dict(
+ parent=(src, dst),
retries=self._chunkretries,
- offset=offset)
+ expected=min(length - offset, self._chunksize),
+ actual=0,
+ exception=None)
+ logger.debug("Submitted %s, byte offset %d", name, offset)
- @property
- def temporary_path(self):
- """ Return temporary path used to store chunks before merging """
- subdir = uuid.uuid1().hex[:10] if self._tmp_unique else ''
- return os.path.join(self._tmp_path, subdir)
+ self._fstates[(src, dst)] = 'pending'
+ self._files[(src, dst)] = dict(
+ length=length,
+ start=None,
+ stop=None,
+ cstates=cstates)
+
+ def _submit(self, fn, *args, **kwargs):
+ kwargs['shutdown_event'] = self._shutdown_event
+ future = self._pool.submit(fn, *args, **kwargs)
+ future.add_done_callback(self._update)
+ return future
+
+ def _start(self, src, dst):
+ key = (src, dst)
+ self._fstates[key] = 'transferring'
+ self._files[key]['start'] = time.time()
+ for obj in self._files[key]['cstates'].objects:
+ name, offset = obj
+ self._files[key]['cstates'][obj] = 'running'
+ future = self._submit(
+ self._transfer, self._adlfs, src, name, offset,
+ self._chunksize, self._blocksize)
+ self._cfutures[future] = obj
@property
def progress(self):
@@ -267,110 +311,110 @@ class ADLTransferClient(object):
for key in self._files:
src, dst = key
chunks = []
- for name in self._files[key]['chunks']:
+ for obj in self._files[key]['cstates'].objects:
+ name, offset = obj
chunks.append(Chunk(
name=name,
- state=self._files[key]['cstates'][name],
- offset=self._files[key]['chunks'][name]['offset'],
- retries=self._files[key]['chunks'][name]['retries']))
+ offset=offset,
+ state=self._files[key]['cstates'][obj],
+ retries=self._chunks[obj]['retries'],
+ expected=self._chunks[obj]['expected'],
+ actual=self._chunks[obj]['actual'],
+ exception=self._chunks[obj]['exception']))
files.append(File(
src=src,
dst=dst,
state=self._fstates[key],
- nbytes=self._files[key]['nbytes'],
+ length=self._files[key]['length'],
start=self._files[key]['start'],
stop=self._files[key]['stop'],
chunks=chunks))
return files
- def _status(self, src, dst, nbytes, start, stop):
- elapsed = stop - start
- rate = nbytes / elapsed / 1024 / 1024
+ def _status(self, src, dst):
+ dic = self._files[(src, dst)]
+ elapsed = dic['stop'] - dic['start']
+ rate = dic['length'] / elapsed / 1024 / 1024
logger.info("Transferred %s -> %s in %f seconds at %f MB/s",
src, dst, elapsed, rate)
- def _update(self):
- for (src, dst), dic in self._files.items():
- if self._fstates[(src, dst)] == 'transferring':
- for name in list(dic['chunks']):
- future = dic['chunks'][name]['future']
- if not future.done():
- continue
- if future.cancelled():
- dic['cstates'][name] = 'cancelled'
- elif future.exception():
- dic['cstates'][name] = 'errored'
- else:
- dic['cstates'][name] = 'finished'
- if dic['cstates'].contains_all('finished'):
- logger.debug("Chunks transferred")
- chunks = list(dic['chunks'])
- if self._merge and len(chunks) > 1:
- logger.debug("Merging file: %s", self._fstates[(src, dst)])
- self._fstates[(src, dst)] = 'merging'
- dic['merge'] = self._submit(self._merge, self._adlfs,
- dst, chunks)
- else:
- dic['stop'] = time.time()
- self._fstates[(src, dst)] = 'finished'
- self._status(src, dst, dic['nbytes'], dic['start'], dic['stop'])
- elif dic['cstates'].contains_none('running'):
- logger.debug("Transfer failed: %s", dic['cstates'])
- self._fstates[(src, dst)] = 'errored'
+ def _update(self, future):
+ if future in self._cfutures:
+ obj = self._cfutures[future]
+ parent = self._chunks[obj]['parent']
+ cstates = self._files[parent]['cstates']
+
+ if future.cancelled():
+ cstates[obj] = 'cancelled'
+ elif future.exception():
+ cstates[obj] = 'errored'
+ else:
+ nbytes, exception = future.result()
+ self._chunks[obj]['actual'] = nbytes
+ self._chunks[obj]['exception'] = exception
+ if exception:
+ cstates[obj] = 'errored'
+ elif self._chunks[obj]['expected'] != nbytes:
+ cstates[obj] = 'errored'
else:
- logger.debug("Transferring chunks: %s", dic['cstates'])
- elif self._fstates[(src, dst)] == 'merging':
- future = dic['merge']
- if not future.done():
- continue
- if future.cancelled():
- self._fstates[(src, dst)] = 'cancelled'
- elif future.exception():
- self._fstates[(src, dst)] = 'errored'
+ cstates[obj] = 'finished'
+
+ if cstates.contains_all('finished'):
+ logger.debug("Chunks transferred")
+ src, dst = parent
+ if self._merge and len(cstates.objects) > 1:
+ logger.debug("Merging file: %s", self._fstates[parent])
+ self._fstates[parent] = 'merging'
+ merge_future = self._submit(
+ self._merge, self._adlfs, dst,
+ [name for name, _ in sorted(cstates.objects,
+ key=lambda obj: obj[1])])
+ self._ffutures[merge_future] = parent
else:
- dic['stop'] = time.time()
- self._fstates[(src, dst)] = 'finished'
- self._status(src, dst, dic['nbytes'], dic['start'], dic['stop'])
+ self._fstates[parent] = 'finished'
+ self._files[parent]['stop'] = time.time()
+ self._status(src, dst)
+ elif cstates.contains_none('running'):
+ logger.debug("Transfer failed: %s", cstates)
+ self._fstates[parent] = 'errored'
+ elif future in self._ffutures:
+ src, dst = self._ffutures[future]
+
+ if future.cancelled():
+ self._fstates[(src, dst)] = 'cancelled'
+ elif future.exception():
+ self._fstates[(src, dst)] = 'errored'
+ else:
+ result = future.result()
+ self._fstates[(src, dst)] = 'finished'
+ self._files[(src, dst)]['stop'] = time.time()
+ self._status(src, dst)
self.save()
- def run(self, nthreads=None, monitor=True, before_scatter=None):
+ def run(self, nthreads=None, monitor=True, before_start=None):
self._nthreads = nthreads or self._nthreads
for src, dst in self._files:
- self._files[(src, dst)]['start'] = time.time()
- self._fstates[(src, dst)] = 'transferring'
- if before_scatter:
- before_scatter(self._adlfs, src, dst)
- self._scatter(src, dst, self._transfer)
+ if before_start:
+ before_start(self._adlfs, src, dst)
+ self._start(src, dst)
if monitor:
self.monitor()
- def _cancel(self):
- for dic in self._files.values():
- for transfer in dic['chunks'].values():
- transfer['future'].cancel()
- if dic['merge']:
- dic['merge'].cancel()
-
def _wait(self, poll=0.1, timeout=0):
start = time.time()
while not self._fstates.contains_none('pending', 'transferring', 'merging'):
if timeout > 0 and time.time() - start > timeout:
break
time.sleep(poll)
- self._update()
def _clear(self):
- for dic in self._files.values():
- for name in dic['chunks']:
- dic['chunks'][name]['future'] = None
- dic['merge'] = None
+ self._cfutures = {}
+ self._ffutures = {}
self._pool = None
def shutdown(self):
self._shutdown_event.set()
- self._cancel()
self._pool.shutdown(wait=True)
- self._update()
def monitor(self, poll=0.1, timeout=0):
""" Wait for download to happen """
@@ -384,20 +428,16 @@ class ADLTransferClient(object):
def __getstate__(self):
dic2 = self.__dict__.copy()
+ dic2.pop('_cfutures', None)
+ dic2.pop('_ffutures', None)
dic2.pop('_transfer', None)
dic2.pop('_merge', None)
dic2.pop('_pool', None)
dic2.pop('_shutdown_event', None)
+
dic2['_files'] = dic2.get('_files', {}).copy()
- for k, v in list(dic2['_files'].items()):
- v = v.copy()
- v['chunks'] = v['chunks'].copy()
- for ck, cv in list(v['chunks'].items()):
- cv = cv.copy()
- cv['future'] = None
- v['chunks'][ck] = cv
- v['merge'] = None
- dic2['_files'][k] = v
+ dic2['_chunks'] = dic2.get('_chunks', {}).copy()
+
return dic2
@staticmethod
diff --git a/adlfs/utils.py b/adlfs/utils.py
index 89b8327..a295f24 100644
--- a/adlfs/utils.py
+++ b/adlfs/utils.py
@@ -21,6 +21,12 @@ except NameError:
class FileNotFoundError(IOError):
pass
+try:
+ PermissionError = PermissionError
+except NameError:
+ class PermissionError(OSError):
+ pass
+
WIN = platform.platform() == 'Windows'
if WIN:
| PRI 0: Use the Offset parameter in append to ensure we are always appending at the offset we expect
The append API allows for a query parameter called offset, which allows you to specify where you think the end of the file is to append data to. This is very useful for us, since it allows us to submit an append call where we "think" the file should currently be. If data has already been uploaded at that location, you will receive a 400 error with a BadOffsetException, which you can catch. This will indicate that data is already at that location and we can move on to the next 4mb chunk to upload. This will help preserve the uploaded file and ensure that its final length is the same as the source file.
Here is a sample request/response where the offset is not the end of the file (meaning there is already data at or ahead of the offset specified):
**Request:**
POST https://adlspysample01.azuredatalakestore.net/webhdfs/v1/sample.txt?op=APPEND&append=true&**offset=4**&api-version=2015-10-01-preview HTTP/1.1
x-ms-client-request-id: 33401497-4ac0-451e-ab13-eccd305e3706
accept-language: en-US
Authorization:
User-Agent: Microsoft.Azure.Management.DataLake.Store.DataLakeStoreFileSystemManagementClient/0.12.6-preview AzurePowershell/v2.1.0.0
CommandName: Add-AzureRmDataLakeStoreItemContent
ParameterSetName: __AllParameterSets
Content-Type: application/octet-stream
Host: adlspysample01.azuredatalakestore.net
Content-Length: 4
Expect: 100-continue
test
**Response**
HTTP/1.1 400 Bad Request
Cache-Control: no-cache
Pragma: no-cache
Content-Length: 260
Content-Type: application/json; charset=utf-8
Expires: -1
x-ms-request-id: a8b65992-21f3-4c5d-8fe0-f0b49d0320f1
Server-Perf: [a8b65992-21f3-4c5d-8fe0-f0b49d0320f1][ AuthTime::0::PostAuthTime::0 ][S-FsOpenStream :: 00:00:010 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:036 ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:269 ms]%0a
x-ms-webhdfs-version: 16.07.18.01
Status: 0x83090015
X-Content-Type-Options: nosniff
Strict-Transport-Security: max-age=15724800; includeSubDomains
Date: Thu, 15 Sep 2016 01:21:15 GMT
**{"RemoteException":{"exception":"BadOffsetException","message":"FsAppendStream failed with error 0x83090015 (Bad offset). [a8b65992-21f3-4c5d-8fe0-f0b49d0320f1][2016-09-14T18:21:16.0457431-07:00]","javaClassName":"org.apache.hadoop.fs.adl.BadOffsetException"}}** | Azure/azure-data-lake-store-python | diff --git a/tests/recordings/test_core/test_append.yaml b/tests/recordings/test_core/test_append.yaml
index 8dca3db..cd71edc 100644
--- a/tests/recordings/test_core/test_append.yaml
+++ b/tests/recordings/test_core/test_append.yaml
@@ -14,15 +14,15 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['34']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:42 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:10 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[5930a2bd-5921-4aec-9001-0ba8a7d0368f][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:006 ms]%0a[LISTSTATUS :: 00:00:006 ms]%0a']
+ Server-Perf: ['[52cdd20a-6dfe-4a2d-8199-952ced0c7fa7][ AuthTime::937.845196953906::PostAuthTime::226.228959958329
+ ][S-HdfsListStatus :: 00:00:007 ms]%0a[LISTSTATUS :: 00:00:008 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [5930a2bd-5921-4aec-9001-0ba8a7d0368f]
+ x-ms-request-id: [52cdd20a-6dfe-4a2d-8199-952ced0c7fa7]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -34,27 +34,27 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&write=true&overwrite=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:42 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:10 GMT']
Expires: ['-1']
- Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&write=true&overwrite=true']
Pragma: [no-cache]
- Server-Perf: ['[9d3585ce-4e6b-4245-ace6-b30cf3b1a9ea][ AuthTime::0::PostAuthTime::0
- ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
- ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:049 ms]%0a[S-FsAppendStream
- :: 00:00:171 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:171
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[CREATE :: 00:00:274 ms]%0a']
+ Server-Perf: ['[eff4ebee-c325-4c1c-b1ee-ed76c8a75c2e][ AuthTime::948.536952313994::PostAuthTime::210.405852361806
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:003
+ ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:027 ms]%0a[S-FsAppendStream
+ :: 00:00:130 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:130
+ ms]%0a[S-FsAppendStream :: 00:00:017 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:208 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [9d3585ce-4e6b-4245-ace6-b30cf3b1a9ea]
+ x-ms-request-id: [eff4ebee-c325-4c1c-b1ee-ed76c8a75c2e]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -67,20 +67,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":3,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976962773,"modificationTime":1473976962904,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":3,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565111424,"modificationTime":1474565111521,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:42 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:12 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[5b52049c-f4f0-4479-8e11-eb50537eb7b0][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:046 ms]%0a[LISTSTATUS :: 00:00:046 ms]%0a']
+ Server-Perf: ['[bc6ebbcd-e407-4f40-ad36-acfd494cf87d][ AuthTime::977.615345567353::PostAuthTime::212.971322000237
+ ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:013 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [5b52049c-f4f0-4479-8e11-eb50537eb7b0]
+ x-ms-request-id: [bc6ebbcd-e407-4f40-ad36-acfd494cf87d]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -92,24 +92,23 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?append=true&OP=APPEND&offset=3
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:43 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:11 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[c63a6917-9a87-405b-a5a0-b315ec85f2ae][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:010 ms]%0a[S-FsAppendStream
- :: 00:00:064 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:064
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:122 ms]%0a']
+ Server-Perf: ['[2f5f6b4b-7509-4883-a773-cc1333a6ee89][ AuthTime::942.122542644623::PostAuthTime::206.129398799232
+ ][S-FsOpenStream :: 00:00:011 ms]%0a[S-FsAppendStream :: 00:00:072 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:072 ms]%0a[S-FsAppendStream :: 00:00:033
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:119 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [c63a6917-9a87-405b-a5a0-b315ec85f2ae]
+ x-ms-request-id: [2f5f6b4b-7509-4883-a773-cc1333a6ee89]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -122,20 +121,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976962773,"modificationTime":1473976963576,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565111424,"modificationTime":1474565112724,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:43 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:12 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[aaa54e80-7ec5-4bc1-80d8-50ccf2a87678][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Server-Perf: ['[1e1a7931-76f2-4c24-b35a-4bcf15af8df2][ AuthTime::903.63287786758::PostAuthTime::201.425028620743
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:011 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [aaa54e80-7ec5-4bc1-80d8-50ccf2a87678]
+ x-ms-request-id: [1e1a7931-76f2-4c24-b35a-4bcf15af8df2]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -146,23 +145,23 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=6
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?length=6&OP=OPEN&offset=0&read=true
response:
body: {string: '123456'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:02:44 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[d7c1f8b4-11bc-4813-b034-7c6e8b74306f][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:013 ms]%0a[OPEN
- :: 00:00:027 ms]%0a']
+ Server-Perf: ['[8dbd408c-30b1-4e62-8fcf-569331323d72][ AuthTime::841.62520906988::PostAuthTime::163.364242817426
+ ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:024 ms]%0a[OPEN
+ :: 00:00:037 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [d7c1f8b4-11bc-4813-b034-7c6e8b74306f]
+ x-ms-request-id: [8dbd408c-30b1-4e62-8fcf-569331323d72]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -174,24 +173,23 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?append=true&OP=APPEND&offset=6
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:44 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[2ca0ce0a-1e25-477d-8b75-1b677810687d][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:289 ms]%0a[S-FsAppendStream
- :: 00:00:085 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:085
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:420 ms]%0a']
+ Server-Perf: ['[80b029d9-208d-4038-9a74-05870958c31e][ AuthTime::920.311776997166::PostAuthTime::209.550544018872
+ ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsAppendStream :: 00:00:064 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:064 ms]%0a[S-FsAppendStream :: 00:00:017
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:095 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [2ca0ce0a-1e25-477d-8b75-1b677810687d]
+ x-ms-request-id: [80b029d9-208d-4038-9a74-05870958c31e]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -204,20 +202,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976962773,"modificationTime":1473976964701,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565111424,"modificationTime":1474565113506,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:44 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[b6aed1e4-4108-4244-9853-dd9edede0711][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:067 ms]%0a[LISTSTATUS :: 00:00:067 ms]%0a']
+ Server-Perf: ['[e12c453b-93b0-490e-9714-0584cb4521b3][ AuthTime::939.985476839493::PostAuthTime::221.097584861701
+ ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:013 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [b6aed1e4-4108-4244-9853-dd9edede0711]
+ x-ms-request-id: [e12c453b-93b0-490e-9714-0584cb4521b3]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -228,23 +226,23 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=9
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?length=9&OP=OPEN&offset=0&read=true
response:
body: {string: '123456789'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:02:44 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[63bbb4da-931c-4d5e-9fb5-9f4eae28ff7b][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:015 ms]%0a[S-FsReadStream :: 00:00:018 ms]%0a[OPEN
- :: 00:00:034 ms]%0a']
+ Server-Perf: ['[5e8456e5-2bbd-421f-9184-0f636f035774][ AuthTime::1365.07386666154::PostAuthTime::247.184428236331
+ ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:017 ms]%0a[OPEN
+ :: 00:00:031 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [63bbb4da-931c-4d5e-9fb5-9f4eae28ff7b]
+ x-ms-request-id: [5e8456e5-2bbd-421f-9184-0f636f035774]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -256,22 +254,22 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: DELETE
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=DELETE&recursive=True
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?recursive=True&OP=DELETE
response:
body: {string: '{"boolean":true}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:45 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[fe69bdb6-aa91-4547-850a-46a3d7574db8][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:094 ms]%0a[DELETE :: 00:00:095 ms]%0a']
+ Server-Perf: ['[201d5c1f-bbcb-445f-95bd-22f9979c565a][ AuthTime::938.702913571335::PostAuthTime::216.393473470203
+ ][S-FsDelete :: 00:00:046 ms]%0a[DELETE :: 00:00:053 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [fe69bdb6-aa91-4547-850a-46a3d7574db8]
+ x-ms-request-id: [201d5c1f-bbcb-445f-95bd-22f9979c565a]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
version: 1
diff --git a/tests/recordings/test_core/test_chmod.yaml b/tests/recordings/test_core/test_chmod.yaml
index 32bbc8c..26d47a4 100644
--- a/tests/recordings/test_core/test_chmod.yaml
+++ b/tests/recordings/test_core/test_chmod.yaml
@@ -8,26 +8,26 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:04 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:17 GMT']
Expires: ['-1']
- Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true']
Pragma: [no-cache]
- Server-Perf: ['[4f7ad0b4-6bf9-4b83-be33-d2dedfb0ad94][ AuthTime::0::PostAuthTime::0
- ][S-HdfsGetFileStatusV2 :: 00:00:010 ms]%0a[S-HdfsCheckAccess :: 00:00:006
+ Server-Perf: ['[99ddd83c-d47d-4cbf-b60a-ea3f27a1dd4b][ AuthTime::9475.11797924592::PostAuthTime::235.210096072633
+ ][S-HdfsGetFileStatusV2 :: 00:00:009 ms]%0a[S-HdfsCheckAccess :: 00:00:004
ms]%0a[S-FsDelete :: 00:00:008 ms]%0a[S-FsOpenStream :: 00:00:052 ms]%0a[BufferingTime
- :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:038
- ms]%0a[S-FsCloseHandle :: 00:00:006 ms]%0a[CREATE :: 00:00:129 ms]%0a']
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:043
+ ms]%0a[S-FsCloseHandle :: 00:00:002 ms]%0a[CREATE :: 00:00:188 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [4f7ad0b4-6bf9-4b83-be33-d2dedfb0ad94]
+ x-ms-request-id: [99ddd83c-d47d-4cbf-b60a-ea3f27a1dd4b]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -40,20 +40,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976984965,"modificationTime":1473976984965,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565178086,"modificationTime":1474565178086,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:04 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:18 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[af7f71ad-8204-4f92-9d07-75ac8d809809][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:049 ms]%0a[LISTSTATUS :: 00:00:049 ms]%0a']
+ Server-Perf: ['[5eeffd1b-395f-417f-9740-806cf8844706][ AuthTime::1194.86677705856::PostAuthTime::287.383849027685
+ ][S-HdfsListStatus :: 00:00:014 ms]%0a[LISTSTATUS :: 00:00:014 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [af7f71ad-8204-4f92-9d07-75ac8d809809]
+ x-ms-request-id: [5eeffd1b-395f-417f-9740-806cf8844706]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -65,21 +65,21 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETPERMISSION&permission=0555
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?permission=0555&OP=SETPERMISSION
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:04 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:19 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[c785adfb-1572-4463-8a5e-0e32124cab1a][ AuthTime::0::PostAuthTime::0
- ][S-HdfsSetPermission :: 00:00:031 ms]%0a[SETPERMISSION :: 00:00:033 ms]%0a']
+ Server-Perf: ['[8789f155-60af-4551-82ee-8685e2f3f9eb][ AuthTime::934.852018827047::PostAuthTime::213.827085733542
+ ][S-HdfsSetPermission :: 00:00:021 ms]%0a[SETPERMISSION :: 00:00:024 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [c785adfb-1572-4463-8a5e-0e32124cab1a]
+ x-ms-request-id: [8789f155-60af-4551-82ee-8685e2f3f9eb]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -92,20 +92,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976984965,"modificationTime":1473976984965,"replication":1,"permission":"555","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565178086,"modificationTime":1474565178086,"replication":1,"permission":"555","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:05 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:19 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[5c6f6fef-a28e-4305-a62e-42828c244ea1][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:052 ms]%0a[LISTSTATUS :: 00:00:053 ms]%0a']
+ Server-Perf: ['[510632c1-841e-4fde-a741-4697cd290e00][ AuthTime::1052.4569159805::PostAuthTime::279.258173968006
+ ][S-HdfsListStatus :: 00:00:070 ms]%0a[LISTSTATUS :: 00:00:071 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [5c6f6fef-a28e-4305-a62e-42828c244ea1]
+ x-ms-request-id: [510632c1-841e-4fde-a741-4697cd290e00]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -117,24 +117,24 @@ interactions:
Content-Length: ['4']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?offset=0&append=true&OP=APPEND
response:
body: {string: '{"RemoteException":{"exception":"AccessControlException","message":"Operation
failed. failed with error 0x83090aa2 (Either the resource does not exist or
- the current user is not authorized to perform the requested operation). [3d881627-792b-4b62-ab4a-8885703a0873][2016-09-15T15:03:06.1254451-07:00]","javaClassName":"org.apache.hadoop.security.AccessControlException"}}'}
+ the current user is not authorized to perform the requested operation). [ed9b7720-b939-45d5-99dd-76fc051c6076][2016-09-22T10:26:20.1195352-07:00]","javaClassName":"org.apache.hadoop.security.AccessControlException"}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['370']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:05 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:19 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[3d881627-792b-4b62-ab4a-8885703a0873][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:005 ms]%0a[APPEND :: 00:00:009 ms]%0a']
+ Server-Perf: ['[ed9b7720-b939-45d5-99dd-76fc051c6076][ AuthTime::9665.85198442482::PostAuthTime::215.110324226426
+ ][S-FsOpenStream :: 00:00:007 ms]%0a[APPEND :: 00:00:022 ms]%0a']
Status: ['0x83090AA2']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [3d881627-792b-4b62-ab4a-8885703a0873]
+ x-ms-request-id: [ed9b7720-b939-45d5-99dd-76fc051c6076]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 403, message: Forbidden}
- request:
@@ -146,21 +146,21 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETPERMISSION&permission=0770
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?permission=0770&OP=SETPERMISSION
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:05 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:19 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[3ca631ca-8e42-467e-88ab-b4054392613d][ AuthTime::0::PostAuthTime::0
- ][S-HdfsSetPermission :: 00:00:023 ms]%0a[SETPERMISSION :: 00:00:025 ms]%0a']
+ Server-Perf: ['[dd9f3644-d08d-4e85-9583-b52efe1ecd96][ AuthTime::916.891719963136::PostAuthTime::210.833777025105
+ ][S-HdfsSetPermission :: 00:00:018 ms]%0a[SETPERMISSION :: 00:00:035 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [3ca631ca-8e42-467e-88ab-b4054392613d]
+ x-ms-request-id: [dd9f3644-d08d-4e85-9583-b52efe1ecd96]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -173,20 +173,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976984965,"modificationTime":1473976984965,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565178086,"modificationTime":1474565178086,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:05 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:19 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[f70b977f-3503-46fb-a15b-507e05125840][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:013 ms]%0a[LISTSTATUS :: 00:00:013 ms]%0a']
+ Server-Perf: ['[85d4d9c9-8049-48aa-bcb0-aaff876ce422][ AuthTime::1884.67435162199::PostAuthTime::712.900418460144
+ ][S-HdfsListStatus :: 00:00:013 ms]%0a[LISTSTATUS :: 00:00:014 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [f70b977f-3503-46fb-a15b-507e05125840]
+ x-ms-request-id: [85d4d9c9-8049-48aa-bcb0-aaff876ce422]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -198,22 +198,22 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: DELETE
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=DELETE&recursive=False
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?recursive=False&OP=DELETE
response:
body: {string: '{"boolean":true}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:06 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:20 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[9feb7979-fdf9-4c6f-9a21-87e12f89b642][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:121 ms]%0a[DELETE :: 00:00:122 ms]%0a']
+ Server-Perf: ['[5221fe20-3a45-4f68-a2d3-9c9f5839b47e][ AuthTime::929.720566368321::PostAuthTime::221.952609910376
+ ][S-FsDelete :: 00:00:070 ms]%0a[DELETE :: 00:00:078 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [9feb7979-fdf9-4c6f-9a21-87e12f89b642]
+ x-ms-request-id: [5221fe20-3a45-4f68-a2d3-9c9f5839b47e]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -232,15 +232,15 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:06 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:21 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[0e93781c-14b2-46c0-bc5c-24a1e7b1a4e5][ AuthTime::0::PostAuthTime::0
- ][S-HdfsMkdirs :: 00:00:033 ms]%0a[MKDIRS :: 00:00:034 ms]%0a']
+ Server-Perf: ['[d865f65d-9de6-4092-8921-cbd6f0c600e3][ AuthTime::949.393478693173::PostAuthTime::219.386871427747
+ ][S-HdfsMkdirs :: 00:00:009 ms]%0a[MKDIRS :: 00:00:013 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [0e93781c-14b2-46c0-bc5c-24a1e7b1a4e5]
+ x-ms-request-id: [d865f65d-9de6-4092-8921-cbd6f0c600e3]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -252,26 +252,26 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep/file?OP=CREATE&overwrite=true&write=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep/file?overwrite=true&OP=CREATE&write=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:07 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:21 GMT']
Expires: ['-1']
- Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep/file?OP=CREATE&overwrite=true&write=true']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep/file?overwrite=true&OP=CREATE&write=true']
Pragma: [no-cache]
- Server-Perf: ['[3f44fdb7-1afa-46b3-bedc-32c71cc564f0][ AuthTime::0::PostAuthTime::0
+ Server-Perf: ['[1d663c6e-dd1a-4dda-a667-5e044e420d90][ AuthTime::929.294899099016::PostAuthTime::210.834047517632
][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
- ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:056 ms]%0a[BufferingTime
- :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:031
- ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[CREATE :: 00:00:109 ms]%0a']
+ ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:332 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:034
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[CREATE :: 00:00:393 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [3f44fdb7-1afa-46b3-bedc-32c71cc564f0]
+ x-ms-request-id: [1d663c6e-dd1a-4dda-a667-5e044e420d90]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -283,21 +283,21 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?OP=SETPERMISSION&permission=660
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?permission=660&OP=SETPERMISSION
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:07 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:21 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[d9a27143-a1fe-4c67-b96c-c2f70ac473a0][ AuthTime::0::PostAuthTime::0
- ][S-HdfsSetPermission :: 00:00:012 ms]%0a[SETPERMISSION :: 00:00:015 ms]%0a']
+ Server-Perf: ['[2c34850d-954b-4f73-8193-9fb9abbd73e8][ AuthTime::909.619644705218::PostAuthTime::209.122710983005
+ ][S-HdfsSetPermission :: 00:00:007 ms]%0a[SETPERMISSION :: 00:00:010 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [d9a27143-a1fe-4c67-b96c-c2f70ac473a0]
+ x-ms-request-id: [2c34850d-954b-4f73-8193-9fb9abbd73e8]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -312,22 +312,22 @@ interactions:
response:
body: {string: '{"RemoteException":{"exception":"AccessControlException","message":"ListStatus
failed with error 0x83090aa2 (Either the resource does not exist or the current
- user is not authorized to perform the requested operation). [1fe99c01-cb00-4ac3-818a-955349707d41][2016-09-15T15:03:09.3105410-07:00]","javaClassName":"org.apache.hadoop.security.AccessControlException"}}'}
+ user is not authorized to perform the requested operation). [9a329262-c9d9-4de2-8126-ef83b7926934][2016-09-22T10:26:23.4072586-07:00]","javaClassName":"org.apache.hadoop.security.AccessControlException"}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['363']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:09 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:23 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[1fe99c01-cb00-4ac3-818a-955349707d41][ AuthTime::0::PostAuthTime::0
+ Server-Perf: ['[9a329262-c9d9-4de2-8126-ef83b7926934][ AuthTime::903.63287786758::PostAuthTime::185.601831043318
][S-HdfsListStatus :: 00:00:006 ms]%0a[S-HdfsListStatus :: 00:00:006 ms]%0a[S-HdfsListStatus
:: 00:00:006 ms]%0a[S-HdfsListStatus :: 00:00:006 ms]%0a[S-HdfsListStatus
- :: 00:00:005 ms]%0a[LISTSTATUS :: 00:01:248 ms]%0a']
+ :: 00:00:006 ms]%0a[LISTSTATUS :: 00:01:257 ms]%0a']
Status: ['0x83090AA2']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [1fe99c01-cb00-4ac3-818a-955349707d41]
+ x-ms-request-id: [9a329262-c9d9-4de2-8126-ef83b7926934]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 403, message: Forbidden}
- request:
@@ -339,21 +339,21 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?OP=SETPERMISSION&permission=770
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?permission=770&OP=SETPERMISSION
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:09 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:23 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[dc6427a9-3bec-4738-af07-08b83c791316][ AuthTime::0::PostAuthTime::0
- ][S-HdfsSetPermission :: 00:00:027 ms]%0a[SETPERMISSION :: 00:00:027 ms]%0a']
+ Server-Perf: ['[ea2a2969-823f-4cf3-806b-b5500a71bbe1][ AuthTime::0::PostAuthTime::0
+ ][S-HdfsSetPermission :: 00:00:021 ms]%0a[SETPERMISSION :: 00:00:021 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [dc6427a9-3bec-4738-af07-08b83c791316]
+ x-ms-request-id: [ea2a2969-823f-4cf3-806b-b5500a71bbe1]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -366,20 +366,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"deep","type":"DIRECTORY","blockSize":0,"accessTime":1473976987201,"modificationTime":1473976987526,"replication":0,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"deep","type":"DIRECTORY","blockSize":0,"accessTime":1474565181120,"modificationTime":1474565181660,"replication":0,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:09 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:23 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[03ffce62-9926-4c59-8df5-60fb90aac6ad][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:007 ms]%0a[LISTSTATUS :: 00:00:008 ms]%0a']
+ Server-Perf: ['[6339da02-e518-4824-8046-c97be79a312c][ AuthTime::853.169342353978::PostAuthTime::182.180521224458
+ ][S-HdfsListStatus :: 00:00:006 ms]%0a[LISTSTATUS :: 00:00:007 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [03ffce62-9926-4c59-8df5-60fb90aac6ad]
+ x-ms-request-id: [6339da02-e518-4824-8046-c97be79a312c]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -391,22 +391,22 @@ interactions:
Content-Length: ['0']
User-Agent: [python-requests/2.11.1]
method: DELETE
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?OP=DELETE&recursive=True
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/deep?recursive=True&OP=DELETE
response:
body: {string: '{"boolean":true}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:09 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:23 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[2b7efbfb-a6cc-4dc6-b959-0e4a8a82c533][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:045 ms]%0a[DELETE :: 00:00:046 ms]%0a']
+ Server-Perf: ['[05164205-0afa-4669-aebf-aec027f48737][ AuthTime::935.281272906266::PostAuthTime::224.946478988887
+ ][S-FsDelete :: 00:00:054 ms]%0a[DELETE :: 00:00:064 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [2b7efbfb-a6cc-4dc6-b959-0e4a8a82c533]
+ x-ms-request-id: [05164205-0afa-4669-aebf-aec027f48737]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -418,23 +418,23 @@ interactions:
Content-Length: ['4']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?offset=0&append=true&OP=APPEND
response:
body: {string: '{"RemoteException":{"exception":"FileNotFoundException","message":"Operation
- failed. failed with error 0x8309000a (Stream not found). [7f0e5641-29ef-4a67-87f9-97c8b72f4e51][2016-09-15T15:03:10.3712624-07:00]","javaClassName":"java.io.FileNotFoundException"}}'}
+ failed. failed with error 0x8309000a (Stream not found). [3cbc2496-6cc2-4875-a41b-e8fa6a30e142][2016-09-22T10:26:24.2909766-07:00]","javaClassName":"java.io.FileNotFoundException"}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['258']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:09 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:23 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[7f0e5641-29ef-4a67-87f9-97c8b72f4e51][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:006 ms]%0a[APPEND :: 00:00:006 ms]%0a']
+ Server-Perf: ['[3cbc2496-6cc2-4875-a41b-e8fa6a30e142][ AuthTime::829.223211578505::PostAuthTime::157.377071614693
+ ][S-FsOpenStream :: 00:00:006 ms]%0a[APPEND :: 00:00:011 ms]%0a']
Status: ['0x8309000A']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [7f0e5641-29ef-4a67-87f9-97c8b72f4e51]
+ x-ms-request-id: [3cbc2496-6cc2-4875-a41b-e8fa6a30e142]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 404, message: Not Found}
version: 1
diff --git a/tests/recordings/test_core/test_delimiters_dash.yaml b/tests/recordings/test_core/test_delimiters_dash.yaml
index a27f46d..6dbaeda 100644
--- a/tests/recordings/test_core/test_delimiters_dash.yaml
+++ b/tests/recordings/test_core/test_delimiters_dash.yaml
@@ -8,27 +8,27 @@ interactions:
Content-Length: ['5']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:02 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:59 GMT']
Expires: ['-1']
- Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true']
Pragma: [no-cache]
- Server-Perf: ['[5b95c94d-a0fe-4769-9a4f-701b33318388][ AuthTime::0::PostAuthTime::0
- ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
- ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:072 ms]%0a[S-FsAppendStream
- :: 00:00:450 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:450
+ Server-Perf: ['[8a0b505d-7508-4a84-b926-3b7a09a80771][ AuthTime::1069.13634310895::PostAuthTime::274.981867447621
+ ][S-HdfsGetFileStatusV2 :: 00:00:007 ms]%0a[S-HdfsCheckAccess :: 00:00:003
+ ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:058 ms]%0a[S-FsAppendStream
+ :: 00:00:121 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:121
ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[CREATE :: 00:00:576 ms]%0a']
+ ms]%0a[CREATE :: 00:00:236 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [5b95c94d-a0fe-4769-9a4f-701b33318388]
+ x-ms-request-id: [8a0b505d-7508-4a84-b926-3b7a09a80771]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -41,20 +41,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":5,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976982129,"modificationTime":1473976982572,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":5,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565219213,"modificationTime":1474565219310,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:02 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:58 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[09e96669-fa51-4e9a-a192-6cfbf35e1489][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:011 ms]%0a']
+ Server-Perf: ['[9640250c-6808-425c-a434-c44713249df4][ AuthTime::970.347315058815::PostAuthTime::232.643869278094
+ ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [09e96669-fa51-4e9a-a192-6cfbf35e1489]
+ x-ms-request-id: [9640250c-6808-425c-a434-c44713249df4]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -65,23 +65,23 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=5
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?length=5&read=true&OP=OPEN&offset=0
response:
body: {string: 123--}
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:03:03 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:58 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[2bd0f359-622e-43a7-87b5-dc7ba9e36aa2][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:014 ms]%0a[S-FsReadStream :: 00:00:040 ms]%0a[OPEN
- :: 00:00:055 ms]%0a']
+ Server-Perf: ['[1220fd48-2d7b-4ad4-b082-6c68c91bf557][ AuthTime::1488.23778960765::PostAuthTime::219.814432143199
+ ][S-FsOpenStream :: 00:00:014 ms]%0a[S-FsReadStream :: 00:00:017 ms]%0a[OPEN
+ :: 00:00:031 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [2bd0f359-622e-43a7-87b5-dc7ba9e36aa2]
+ x-ms-request-id: [1220fd48-2d7b-4ad4-b082-6c68c91bf557]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -93,24 +93,24 @@ interactions:
Content-Length: ['5']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?append=true&OP=APPEND
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:03 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:59 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[e65fe050-b000-4cd9-992a-46f072d5c8a1][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
- :: 00:00:066 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:066
- ms]%0a[S-FsAppendStream :: 00:00:017 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:106 ms]%0a']
+ Server-Perf: ['[e7302618-0532-49e4-8dc1-b3cc85facbee][ AuthTime::1259.01549606878::PostAuthTime::295.937066331385
+ ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
+ :: 00:00:377 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:377
+ ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[APPEND :: 00:00:437 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [e65fe050-b000-4cd9-992a-46f072d5c8a1]
+ x-ms-request-id: [e7302618-0532-49e4-8dc1-b3cc85facbee]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -122,24 +122,23 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?append=true&OP=APPEND&offset=10
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:03 GMT']
+ Date: ['Thu, 22 Sep 2016 17:27:00 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[2a43de68-405f-4bb3-8eec-1201c2033dbe][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
- :: 00:00:063 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:063
- ms]%0a[S-FsAppendStream :: 00:00:017 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:104 ms]%0a']
+ Server-Perf: ['[3bba5c29-27ce-4dbf-bcb7-a340ea65cf90][ AuthTime::982.749706308372::PostAuthTime::233.926931832324
+ ][S-FsOpenStream :: 00:00:011 ms]%0a[S-FsAppendStream :: 00:00:066 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:067 ms]%0a[S-FsAppendStream :: 00:00:033
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:115 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [2a43de68-405f-4bb3-8eec-1201c2033dbe]
+ x-ms-request-id: [3bba5c29-27ce-4dbf-bcb7-a340ea65cf90]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -152,20 +151,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":13,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976982129,"modificationTime":1473976983807,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":13,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565219213,"modificationTime":1474565220998,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['289']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:03 GMT']
+ Date: ['Thu, 22 Sep 2016 17:27:01 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[c78e2edb-17ec-407f-a375-c2379e5e8334][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:013 ms]%0a']
+ Server-Perf: ['[7f1fb1e4-95fd-43cf-9045-02b31f05c754][ AuthTime::925.870489321484::PostAuthTime::212.543941428535
+ ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [c78e2edb-17ec-407f-a375-c2379e5e8334]
+ x-ms-request-id: [7f1fb1e4-95fd-43cf-9045-02b31f05c754]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -176,23 +175,23 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=13
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?length=13&read=true&OP=OPEN&offset=0
response:
body: {string: 123--456--789}
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:03:03 GMT']
+ Date: ['Thu, 22 Sep 2016 17:27:01 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[ab167687-47a9-4d73-99b4-609357e6dd70][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:020 ms]%0a[OPEN
- :: 00:00:034 ms]%0a']
+ Server-Perf: ['[7480abfc-e2db-486b-b5de-2b470cebcadb][ AuthTime::1024.65983303519::PostAuthTime::264.290390991547
+ ][S-FsOpenStream :: 00:00:015 ms]%0a[S-FsReadStream :: 00:00:043 ms]%0a[OPEN
+ :: 00:00:060 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [ab167687-47a9-4d73-99b4-609357e6dd70]
+ x-ms-request-id: [7480abfc-e2db-486b-b5de-2b470cebcadb]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -211,15 +210,15 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:04 GMT']
+ Date: ['Thu, 22 Sep 2016 17:27:01 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[5a14e236-e946-4444-b6c7-4f59a12b607f][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:094 ms]%0a[DELETE :: 00:00:095 ms]%0a']
+ Server-Perf: ['[23d05587-c727-4452-bef5-871b7523b3fa][ AuthTime::1038.34388426999::PostAuthTime::250.605237307337
+ ][S-FsDelete :: 00:00:070 ms]%0a[DELETE :: 00:00:070 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [5a14e236-e946-4444-b6c7-4f59a12b607f]
+ x-ms-request-id: [23d05587-c727-4452-bef5-871b7523b3fa]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
version: 1
diff --git a/tests/recordings/test_core/test_delimiters_newline.yaml b/tests/recordings/test_core/test_delimiters_newline.yaml
index 5aa0e59..61a6b54 100644
--- a/tests/recordings/test_core/test_delimiters_newline.yaml
+++ b/tests/recordings/test_core/test_delimiters_newline.yaml
@@ -10,27 +10,27 @@ interactions:
Content-Length: ['4']
User-Agent: [python-requests/2.11.1]
method: PUT
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:59 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:47 GMT']
Expires: ['-1']
- Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?overwrite=true&OP=CREATE&write=true']
Pragma: [no-cache]
- Server-Perf: ['[3468f47f-3a22-4b3c-b8ab-c0bd8351ed34][ AuthTime::0::PostAuthTime::0
- ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
- ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:066 ms]%0a[S-FsAppendStream
- :: 00:00:422 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:422
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[CREATE :: 00:00:542 ms]%0a']
+ Server-Perf: ['[b4ff5326-06a7-49db-9d87-262f3302232c][ AuthTime::1427.93911052095::PostAuthTime::403.278401084532
+ ][S-HdfsGetFileStatusV2 :: 00:00:008 ms]%0a[S-HdfsCheckAccess :: 00:00:003
+ ms]%0a[S-FsDelete :: 00:00:008 ms]%0a[S-FsOpenStream :: 00:00:048 ms]%0a[S-FsAppendStream
+ :: 00:00:138 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:140
+ ms]%0a[S-FsAppendStream :: 00:00:030 ms]%0a[S-FsCloseHandle :: 00:00:002
+ ms]%0a[CREATE :: 00:00:257 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [3468f47f-3a22-4b3c-b8ab-c0bd8351ed34]
+ x-ms-request-id: [b4ff5326-06a7-49db-9d87-262f3302232c]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -43,20 +43,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":4,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976978898,"modificationTime":1473976979178,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":4,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565207746,"modificationTime":1474565207873,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:59 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:47 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[df0820eb-e785-44cb-b196-00a2c0d1b896][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:043 ms]%0a[LISTSTATUS :: 00:00:043 ms]%0a']
+ Server-Perf: ['[4bd98882-2c80-414b-aad6-a7cb03d6c1d1][ AuthTime::869.422417841078::PostAuthTime::205.702008352956
+ ][S-HdfsListStatus :: 00:00:023 ms]%0a[LISTSTATUS :: 00:00:023 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [df0820eb-e785-44cb-b196-00a2c0d1b896]
+ x-ms-request-id: [4bd98882-2c80-414b-aad6-a7cb03d6c1d1]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -67,7 +67,7 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=4
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?read=true&length=4&OP=OPEN&offset=0
response:
body: {string: '123
@@ -75,17 +75,18 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:02:59 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:47 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[b66716fb-47e0-4665-9019-e938c4505793][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:012 ms]%0a[S-FsReadStream :: 00:00:032 ms]%0a[OPEN
- :: 00:00:045 ms]%0a']
+ Server-Perf: ['[84be60d5-353b-41ee-9ee9-bee7a748f054][ AuthTime::945.11612312511::PostAuthTime::209.978287988429
+ ][S-FsReadStream :: 00:00:004 ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[S-FsOpenStream
+ :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:014 ms]%0a[OPEN :: 00:00:033
+ ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [b66716fb-47e0-4665-9019-e938c4505793]
+ x-ms-request-id: [84be60d5-353b-41ee-9ee9-bee7a748f054]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -105,18 +106,18 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:59 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:49 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[f0c87027-d206-44f2-81b0-9764fbff65b5][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:038 ms]%0a[S-FsAppendStream
- :: 00:00:076 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:076
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:161 ms]%0a']
+ Server-Perf: ['[d3120b12-1e1c-4dd9-8dab-e234a934b51a][ AuthTime::939.983868884652::PostAuthTime::217.248319105279
+ ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
+ :: 00:00:059 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:059
+ ms]%0a[S-FsAppendStream :: 00:00:034 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[APPEND :: 00:00:116 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [f0c87027-d206-44f2-81b0-9764fbff65b5]
+ x-ms-request-id: [d3120b12-1e1c-4dd9-8dab-e234a934b51a]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -128,24 +129,23 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?offset=8&OP=APPEND&append=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:03:00 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:49 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[77837b1a-c9de-4720-8fd3-f9a7aad83bad][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:011 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
- :: 00:00:202 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:202
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:258 ms]%0a']
+ Server-Perf: ['[915ea582-8aaf-48b1-825a-8907e22849ae][ AuthTime::0::PostAuthTime::0
+ ][S-FsOpenStream :: 00:00:012 ms]%0a[S-FsAppendStream :: 00:00:067 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:067 ms]%0a[S-FsAppendStream :: 00:00:033
+ ms]%0a[S-FsCloseHandle :: 00:00:003 ms]%0a[APPEND :: 00:00:120 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [77837b1a-c9de-4720-8fd3-f9a7aad83bad]
+ x-ms-request-id: [915ea582-8aaf-48b1-825a-8907e22849ae]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -158,20 +158,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":11,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976978898,"modificationTime":1473976980788,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":11,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565207746,"modificationTime":1474565209358,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['289']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:01 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:48 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[1aa2d082-7c23-4438-9da0-af2d9978cee2][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:142 ms]%0a[LISTSTATUS :: 00:00:142 ms]%0a']
+ Server-Perf: ['[6599b907-b64c-46cb-9f3a-72670ca12a59][ AuthTime::897.219219137027::PostAuthTime::196.293432594802
+ ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [1aa2d082-7c23-4438-9da0-af2d9978cee2]
+ x-ms-request-id: [6599b907-b64c-46cb-9f3a-72670ca12a59]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -182,7 +182,7 @@ interactions:
Connection: [keep-alive]
User-Agent: [python-requests/2.11.1]
method: GET
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=OPEN&offset=0&read=true&length=11
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?read=true&length=11&OP=OPEN&offset=0
response:
body: {string: '123
@@ -192,17 +192,17 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Type: [application/octet-stream]
- Date: ['Thu, 15 Sep 2016 22:03:01 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:49 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[881a9734-496b-4d27-aa99-fa2dc5efb037][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:012 ms]%0a[S-FsReadStream :: 00:00:015 ms]%0a[OPEN
- :: 00:00:027 ms]%0a']
+ Server-Perf: ['[b8c119f4-2fa0-46af-a71f-cce6b98ee32a][ AuthTime::942.979867700566::PostAuthTime::224.091360850384
+ ][S-FsOpenStream :: 00:00:013 ms]%0a[S-FsReadStream :: 00:00:026 ms]%0a[OPEN
+ :: 00:00:039 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
Transfer-Encoding: [chunked]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [881a9734-496b-4d27-aa99-fa2dc5efb037]
+ x-ms-request-id: [b8c119f4-2fa0-46af-a71f-cce6b98ee32a]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -221,15 +221,15 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:03:01 GMT']
+ Date: ['Thu, 22 Sep 2016 17:26:49 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[019b2891-2e07-491d-82d2-268961387631][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:071 ms]%0a[DELETE :: 00:00:073 ms]%0a']
+ Server-Perf: ['[8362c403-6b73-4a79-9b72-d47331d2480d][ AuthTime::0::PostAuthTime::0
+ ][S-FsDelete :: 00:00:082 ms]%0a[DELETE :: 00:00:090 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [019b2891-2e07-491d-82d2-268961387631]
+ x-ms-request-id: [8362c403-6b73-4a79-9b72-d47331d2480d]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
version: 1
diff --git a/tests/recordings/test_core/test_write_blocks.yaml b/tests/recordings/test_core/test_write_blocks.yaml
index cfdc502..8c684d5 100644
--- a/tests/recordings/test_core/test_write_blocks.yaml
+++ b/tests/recordings/test_core/test_write_blocks.yaml
@@ -15,20 +15,20 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['0']
ContentLength: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:46 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:36 GMT']
Expires: ['-1']
Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
Pragma: [no-cache]
- Server-Perf: ['[f4145160-ca39-4c02-b9f0-a23d0b1a4f69][ AuthTime::0::PostAuthTime::0
- ][S-HdfsGetFileStatusV2 :: 00:00:007 ms]%0a[S-HdfsCheckAccess :: 00:00:002
- ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:075 ms]%0a[S-FsAppendStream
- :: 00:00:216 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:216
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[CREATE :: 00:00:346 ms]%0a']
+ Server-Perf: ['[c8fad1e0-4391-40ec-96c7-611833aa59b3][ AuthTime::839.912433569299::PostAuthTime::165.502093580101
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:047 ms]%0a[S-FsAppendStream
+ :: 00:00:169 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:169
+ ms]%0a[S-FsAppendStream :: 00:00:032 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:276 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [f4145160-ca39-4c02-b9f0-a23d0b1a4f69]
+ x-ms-request-id: [c8fad1e0-4391-40ec-96c7-611833aa59b3]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 201, message: Created}
- request:
@@ -40,24 +40,23 @@ interactions:
Content-Length: ['1']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&offset=5&append=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:47 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:36 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[ad6da152-dfc0-4d7e-9d05-90a98fe7cf81][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:014 ms]%0a[S-FsGetStreamLength :: 00:00:282 ms]%0a[S-FsAppendStream
- :: 00:00:187 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:188
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:006
- ms]%0a[APPEND :: 00:00:528 ms]%0a']
+ Server-Perf: ['[598dfe00-2b91-42d6-b81d-d4e3dee406fc][ AuthTime::921.168661106583::PostAuthTime::214.68276131639
+ ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsAppendStream :: 00:00:061 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:061 ms]%0a[S-FsAppendStream :: 00:00:033
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:107 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [ad6da152-dfc0-4d7e-9d05-90a98fe7cf81]
+ x-ms-request-id: [598dfe00-2b91-42d6-b81d-d4e3dee406fc]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -69,24 +68,23 @@ interactions:
Content-Length: ['3']
User-Agent: [python-requests/2.11.1]
method: POST
- uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&append=true
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=APPEND&offset=6&append=true
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 15 Sep 2016 22:02:47 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:36 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[8fdbd508-a9ce-4190-b57b-ff2075f9ad56][ AuthTime::0::PostAuthTime::0
- ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsGetStreamLength :: 00:00:009 ms]%0a[S-FsAppendStream
- :: 00:00:062 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:063
- ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
- ms]%0a[APPEND :: 00:00:119 ms]%0a']
+ Server-Perf: ['[ac0cefa5-5596-48c6-b74c-2c6d1684477c][ AuthTime::0::PostAuthTime::0
+ ][S-FsOpenStream :: 00:00:010 ms]%0a[S-FsAppendStream :: 00:00:053 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:054 ms]%0a[S-FsAppendStream :: 00:00:032
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[APPEND :: 00:00:098 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [8fdbd508-a9ce-4190-b57b-ff2075f9ad56]
+ x-ms-request-id: [ac0cefa5-5596-48c6-b74c-2c6d1684477c]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -99,20 +97,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"","type":"FILE","blockSize":268435456,"accessTime":1473976967207,"modificationTime":1473976968639,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"","type":"FILE","blockSize":268435456,"accessTime":1474565136602,"modificationTime":1474565137580,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['287']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:48 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:37 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[ed6d9f2e-67f0-4b8e-a690-add553e8f1a0][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:009 ms]%0a[LISTSTATUS :: 00:00:010 ms]%0a']
+ Server-Perf: ['[e58f478b-92bd-4414-b49e-ff80d0013040][ AuthTime::1122.16550572715::PostAuthTime::246.329013452301
+ ][S-HdfsListStatus :: 00:00:065 ms]%0a[LISTSTATUS :: 00:00:065 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [ed6d9f2e-67f0-4b8e-a690-add553e8f1a0]
+ x-ms-request-id: [e58f478b-92bd-4414-b49e-ff80d0013040]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -125,20 +123,20 @@ interactions:
method: GET
uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
response:
- body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1473976967207,"modificationTime":1473976968639,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":9,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1474565136602,"modificationTime":1474565137580,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Length: ['288']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:48 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:37 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[45f65a84-9f70-4b9c-a721-17a33b27b91a][ AuthTime::0::PostAuthTime::0
- ][S-HdfsListStatus :: 00:00:012 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Server-Perf: ['[1f4fb44e-035d-4792-9999-793bf9653f40][ AuthTime::880.539185918215::PostAuthTime::201.424942480563
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [45f65a84-9f70-4b9c-a721-17a33b27b91a]
+ x-ms-request-id: [1f4fb44e-035d-4792-9999-793bf9653f40]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
- request:
@@ -157,15 +155,15 @@ interactions:
Cache-Control: [no-cache]
Content-Length: ['16']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 15 Sep 2016 22:02:49 GMT']
+ Date: ['Thu, 22 Sep 2016 17:25:38 GMT']
Expires: ['-1']
Pragma: [no-cache]
- Server-Perf: ['[f1cf1b88-499d-4925-8802-08e7f76501ec][ AuthTime::0::PostAuthTime::0
- ][S-FsDelete :: 00:00:098 ms]%0a[DELETE :: 00:00:099 ms]%0a']
+ Server-Perf: ['[53d291bc-b35b-4c98-b65f-2263b4815167][ AuthTime::1484.81718829851::PostAuthTime::505.060224475963
+ ][S-FsDelete :: 00:00:078 ms]%0a[DELETE :: 00:00:087 ms]%0a']
Status: ['0x0']
Strict-Transport-Security: [max-age=15724800; includeSubDomains]
X-Content-Type-Options: [nosniff]
- x-ms-request-id: [f1cf1b88-499d-4925-8802-08e7f76501ec]
+ x-ms-request-id: [53d291bc-b35b-4c98-b65f-2263b4815167]
x-ms-webhdfs-version: [16.07.18.01]
status: {code: 200, message: OK}
version: 1
diff --git a/tests/test_transfer.py b/tests/test_transfer.py
index 0474ae0..f160380 100644
--- a/tests/test_transfer.py
+++ b/tests/test_transfer.py
@@ -6,11 +6,13 @@
# license information.
# --------------------------------------------------------------------------
+import os
import pytest
import time
-from tests.testing import azure, posix
+from adlfs.core import AzureDLPath
from adlfs.transfer import ADLTransferClient
+from tests.testing import azure, posix
@pytest.mark.skipif(True, reason="skip until resolve timing issue")
@@ -18,6 +20,7 @@ def test_interrupt(azure):
def transfer(adlfs, src, dst, offset, size, retries=5, shutdown_event=None):
while shutdown_event and not shutdown_event.is_set():
time.sleep(0.1)
+ return size, None
client = ADLTransferClient(azure, 'foobar', transfer=transfer, chunksize=1,
tmp_path=None)
@@ -30,9 +33,45 @@ def test_interrupt(azure):
assert client.progress[0].state != 'finished'
+def test_submit_and_run(azure):
+ def transfer(adlfs, src, dst, offset, size, retries=5, shutdown_event=None):
+ time.sleep(0.1)
+ return size, None
+
+ client = ADLTransferClient(azure, 'foobar', transfer=transfer, chunksize=8,
+ tmp_path=None)
+
+ client.submit('foo', 'bar', 16)
+ client.submit('abc', '123', 8)
+
+ nfiles = len(client.progress)
+ assert nfiles == 2
+ assert len([client.progress[i].chunks for i in range(nfiles)])
+
+ assert all([client.progress[i].state == 'pending' for i in range(nfiles)])
+ assert all([chunk.state == 'pending' for f in client.progress
+ for chunk in f.chunks])
+
+ expected = {('bar', 0), ('bar', 8), ('123', 0)}
+ assert {(chunk.name, chunk.offset) for f in client.progress
+ for chunk in f.chunks} == expected
+
+ client.run()
+
+ assert all([client.progress[i].state == 'finished' for i in range(nfiles)])
+ assert all([chunk.state == 'finished' for f in client.progress
+ for chunk in f.chunks])
+ assert all([chunk.expected == chunk.actual for f in client.progress
+ for chunk in f.chunks])
+
+
def test_temporary_path(azure):
def transfer(adlfs, src, dst, offset, size):
- pass
+ time.sleep(0.1)
+ return size, None
+
+ client = ADLTransferClient(azure, 'foobar', transfer=transfer, chunksize=8,
+ tmp_unique=False)
+ client.submit('foo', AzureDLPath('bar'), 16)
- client = ADLTransferClient(azure, 'foobar', transfer=transfer, tmp_unique=False)
- assert posix(client.temporary_path) == '/tmp'
+ assert os.path.dirname(posix(client.progress[0].chunks[0].name)) == '/tmp'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 6
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==1.2.7
-e git+https://github.com/Azure/azure-data-lake-store-python.git@46645c61e903d76585c410baadb1f49885fdbb2e#egg=adlfs
attrs==22.2.0
azure==4.0.0
azure-applicationinsights==0.1.1
azure-batch==4.1.3
azure-common==1.1.28
azure-core==1.24.2
azure-cosmosdb-nspkg==2.0.2
azure-cosmosdb-table==1.0.6
azure-datalake-store==0.0.53
azure-eventgrid==1.3.0
azure-graphrbac==0.40.0
azure-keyvault==1.1.0
azure-loganalytics==0.1.1
azure-mgmt==4.0.0
azure-mgmt-advisor==1.0.1
azure-mgmt-applicationinsights==0.1.1
azure-mgmt-authorization==0.50.0
azure-mgmt-batch==5.0.1
azure-mgmt-batchai==2.0.0
azure-mgmt-billing==0.2.0
azure-mgmt-cdn==3.1.0
azure-mgmt-cognitiveservices==3.0.0
azure-mgmt-commerce==1.0.1
azure-mgmt-compute==4.6.2
azure-mgmt-consumption==2.0.0
azure-mgmt-containerinstance==1.5.0
azure-mgmt-containerregistry==2.8.0
azure-mgmt-containerservice==4.4.0
azure-mgmt-cosmosdb==0.4.1
azure-mgmt-datafactory==0.6.0
azure-mgmt-datalake-analytics==0.6.0
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.5.0
azure-mgmt-datamigration==1.0.0
azure-mgmt-devspaces==0.1.0
azure-mgmt-devtestlabs==2.2.0
azure-mgmt-dns==2.1.0
azure-mgmt-eventgrid==1.0.0
azure-mgmt-eventhub==2.6.0
azure-mgmt-hanaonazure==0.1.1
azure-mgmt-iotcentral==0.1.0
azure-mgmt-iothub==0.5.0
azure-mgmt-iothubprovisioningservices==0.2.0
azure-mgmt-keyvault==1.1.0
azure-mgmt-loganalytics==0.2.0
azure-mgmt-logic==3.0.0
azure-mgmt-machinelearningcompute==0.4.1
azure-mgmt-managementgroups==0.1.0
azure-mgmt-managementpartner==0.1.1
azure-mgmt-maps==0.1.0
azure-mgmt-marketplaceordering==0.1.0
azure-mgmt-media==1.0.1
azure-mgmt-monitor==0.5.2
azure-mgmt-msi==0.2.0
azure-mgmt-network==2.7.0
azure-mgmt-notificationhubs==2.1.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-policyinsights==0.1.0
azure-mgmt-powerbiembedded==2.0.0
azure-mgmt-rdbms==1.9.0
azure-mgmt-recoveryservices==0.3.0
azure-mgmt-recoveryservicesbackup==0.3.0
azure-mgmt-redis==5.0.0
azure-mgmt-relay==0.1.0
azure-mgmt-reservations==0.2.1
azure-mgmt-resource==2.2.0
azure-mgmt-scheduler==2.0.0
azure-mgmt-search==2.1.0
azure-mgmt-servicebus==0.5.3
azure-mgmt-servicefabric==0.2.0
azure-mgmt-signalr==0.1.1
azure-mgmt-sql==0.9.1
azure-mgmt-storage==2.0.0
azure-mgmt-subscription==0.2.0
azure-mgmt-trafficmanager==0.50.0
azure-mgmt-web==0.35.0
azure-nspkg==3.0.2
azure-servicebus==0.21.1
azure-servicefabric==6.3.0.0
azure-servicemanagement-legacy==0.20.8
azure-storage-blob==1.5.0
azure-storage-common==1.4.2
azure-storage-file==1.4.0
azure-storage-queue==1.4.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
msal==1.27.0
msrest==0.7.1
msrestazure==0.6.4.post1
multidict==5.2.0
oauthlib==3.2.2
packaging==21.3
pathlib2==2.3.7.post1
pluggy==1.0.0
py==1.11.0
pycparser==2.21
PyJWT==2.4.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
vcrpy==4.1.1
wrapt==1.16.0
yarl==1.7.2
zipp==3.6.0
| name: azure-data-lake-store-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==1.2.7
- attrs==22.2.0
- azure==4.0.0
- azure-applicationinsights==0.1.1
- azure-batch==4.1.3
- azure-common==1.1.28
- azure-core==1.24.2
- azure-cosmosdb-nspkg==2.0.2
- azure-cosmosdb-table==1.0.6
- azure-datalake-store==0.0.53
- azure-eventgrid==1.3.0
- azure-graphrbac==0.40.0
- azure-keyvault==1.1.0
- azure-loganalytics==0.1.1
- azure-mgmt==4.0.0
- azure-mgmt-advisor==1.0.1
- azure-mgmt-applicationinsights==0.1.1
- azure-mgmt-authorization==0.50.0
- azure-mgmt-batch==5.0.1
- azure-mgmt-batchai==2.0.0
- azure-mgmt-billing==0.2.0
- azure-mgmt-cdn==3.1.0
- azure-mgmt-cognitiveservices==3.0.0
- azure-mgmt-commerce==1.0.1
- azure-mgmt-compute==4.6.2
- azure-mgmt-consumption==2.0.0
- azure-mgmt-containerinstance==1.5.0
- azure-mgmt-containerregistry==2.8.0
- azure-mgmt-containerservice==4.4.0
- azure-mgmt-cosmosdb==0.4.1
- azure-mgmt-datafactory==0.6.0
- azure-mgmt-datalake-analytics==0.6.0
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.5.0
- azure-mgmt-datamigration==1.0.0
- azure-mgmt-devspaces==0.1.0
- azure-mgmt-devtestlabs==2.2.0
- azure-mgmt-dns==2.1.0
- azure-mgmt-eventgrid==1.0.0
- azure-mgmt-eventhub==2.6.0
- azure-mgmt-hanaonazure==0.1.1
- azure-mgmt-iotcentral==0.1.0
- azure-mgmt-iothub==0.5.0
- azure-mgmt-iothubprovisioningservices==0.2.0
- azure-mgmt-keyvault==1.1.0
- azure-mgmt-loganalytics==0.2.0
- azure-mgmt-logic==3.0.0
- azure-mgmt-machinelearningcompute==0.4.1
- azure-mgmt-managementgroups==0.1.0
- azure-mgmt-managementpartner==0.1.1
- azure-mgmt-maps==0.1.0
- azure-mgmt-marketplaceordering==0.1.0
- azure-mgmt-media==1.0.1
- azure-mgmt-monitor==0.5.2
- azure-mgmt-msi==0.2.0
- azure-mgmt-network==2.7.0
- azure-mgmt-notificationhubs==2.1.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-policyinsights==0.1.0
- azure-mgmt-powerbiembedded==2.0.0
- azure-mgmt-rdbms==1.9.0
- azure-mgmt-recoveryservices==0.3.0
- azure-mgmt-recoveryservicesbackup==0.3.0
- azure-mgmt-redis==5.0.0
- azure-mgmt-relay==0.1.0
- azure-mgmt-reservations==0.2.1
- azure-mgmt-resource==2.2.0
- azure-mgmt-scheduler==2.0.0
- azure-mgmt-search==2.1.0
- azure-mgmt-servicebus==0.5.3
- azure-mgmt-servicefabric==0.2.0
- azure-mgmt-signalr==0.1.1
- azure-mgmt-sql==0.9.1
- azure-mgmt-storage==2.0.0
- azure-mgmt-subscription==0.2.0
- azure-mgmt-trafficmanager==0.50.0
- azure-mgmt-web==0.35.0
- azure-nspkg==3.0.2
- azure-servicebus==0.21.1
- azure-servicefabric==6.3.0.0
- azure-servicemanagement-legacy==0.20.8
- azure-storage-blob==1.5.0
- azure-storage-common==1.4.2
- azure-storage-file==1.4.0
- azure-storage-queue==1.4.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- msal==1.27.0
- msrest==0.7.1
- msrestazure==0.6.4.post1
- multidict==5.2.0
- oauthlib==3.2.2
- packaging==21.3
- pathlib2==2.3.7.post1
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyjwt==2.4.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- vcrpy==4.1.1
- wrapt==1.16.0
- yarl==1.7.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-data-lake-store-python
| [
"tests/test_transfer.py::test_submit_and_run",
"tests/test_transfer.py::test_temporary_path"
]
| []
| []
| []
| MIT License | 766 | [
"adlfs/utils.py",
"adlfs/transfer.py",
"adlfs/multithread.py",
"adlfs/core.py",
"adlfs/cli.py",
"adlfs/lib.py"
]
| [
"adlfs/utils.py",
"adlfs/transfer.py",
"adlfs/multithread.py",
"adlfs/core.py",
"adlfs/cli.py",
"adlfs/lib.py"
]
|
tornadoweb__tornado-1838 | bec14a9152438fbf1b9584dc3da4500137577308 | 2016-09-21 19:50:00 | ecd8968c5135b810cd607b5902dda2cd32122b39 | diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index cadb4116..d6183176 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -616,10 +616,14 @@ class IOLoop(Configurable):
# result, which should just be ignored.
pass
else:
- self.add_future(ret, lambda f: f.result())
+ self.add_future(ret, self._discard_future_result)
except Exception:
self.handle_callback_exception(callback)
+ def _discard_future_result(self, future):
+ """Avoid unhandled-exception warnings from spawned coroutines."""
+ future.result()
+
def handle_callback_exception(self, callback):
"""This method is called whenever a callback run by the `IOLoop`
throws an exception.
| Excessive CPU load when returning empty collection
The following causes my CPU to stay pinned at 100% usage
```python
from tornado.ioloop import IOLoop
from tornado import gen
from threading import Thread
loop = IOLoop.current()
t = Thread(target=loop.start, daemon=True)
t.start()
@gen.coroutine
def f():
yield gen.sleep(0)
return {}
loop.add_callback(f)
from time import sleep # just to stick around in case you run this in a script
sleep(100)
```
Replacing the empty dict `{}` with an empty list `[]` has the same effect. Replacing with `None` behaves as expected.
Running Python 3.5, Tornado 4.4.1, on Ubuntu 16.04 | tornadoweb/tornado | diff --git a/tornado/test/ioloop_test.py b/tornado/test/ioloop_test.py
index 8570e73f..1bb8ce08 100644
--- a/tornado/test/ioloop_test.py
+++ b/tornado/test/ioloop_test.py
@@ -9,6 +9,7 @@ import socket
import sys
import threading
import time
+import types
from tornado import gen
from tornado.ioloop import IOLoop, TimeoutError, PollIOLoop, PeriodicCallback
@@ -61,6 +62,25 @@ class FakeTimeIOLoop(PollIOLoop):
class TestIOLoop(AsyncTestCase):
+ def test_add_callback_return_sequence(self):
+ # A callback returning {} or [] shouldn't spin the CPU, see Issue #1803.
+ self.calls = 0
+
+ loop = self.io_loop
+ test = self
+ old_add_callback = loop.add_callback
+
+ def add_callback(self, callback, *args, **kwargs):
+ test.calls += 1
+ old_add_callback(callback, *args, **kwargs)
+
+ loop.add_callback = types.MethodType(add_callback, loop)
+ loop.add_callback(lambda: {})
+ loop.add_callback(lambda: [])
+ loop.add_timeout(datetime.timedelta(milliseconds=50), loop.stop)
+ loop.start()
+ self.assertLess(self.calls, 10)
+
@skipOnTravis
def test_add_callback_wakeup(self):
# Make sure that add_callback from inside a running IOLoop
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"sphinx",
"sphinx_rtd_theme",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@bec14a9152438fbf1b9584dc3da4500137577308#egg=tornado
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- jinja2==3.0.3
- markupsafe==2.0.1
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- urllib3==1.26.20
prefix: /opt/conda/envs/tornado
| [
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_return_sequence"
]
| []
| [
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_from_signal",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_from_signal_other_thread",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_wakeup",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_wakeup_other_thread",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_callback_while_closing",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_timeout_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_add_timeout_timedelta",
"tornado/test/ioloop_test.py::TestIOLoop::test_call_at_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_call_later_return",
"tornado/test/ioloop_test.py::TestIOLoop::test_close_file_object",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging_future",
"tornado/test/ioloop_test.py::TestIOLoop::test_exception_logging_native_coro",
"tornado/test/ioloop_test.py::TestIOLoop::test_handle_callback_exception",
"tornado/test/ioloop_test.py::TestIOLoop::test_handler_callback_file_object",
"tornado/test/ioloop_test.py::TestIOLoop::test_mixed_fd_fileobj",
"tornado/test/ioloop_test.py::TestIOLoop::test_multiple_add",
"tornado/test/ioloop_test.py::TestIOLoop::test_read_while_writeable",
"tornado/test/ioloop_test.py::TestIOLoop::test_reentrant",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_handler_from_handler",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_after_fire",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_cleanup",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_timeout_from_timeout",
"tornado/test/ioloop_test.py::TestIOLoop::test_remove_without_add",
"tornado/test/ioloop_test.py::TestIOLoop::test_spawn_callback",
"tornado/test/ioloop_test.py::TestIOLoop::test_timeout_with_arguments",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_default_current",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_force_current",
"tornado/test/ioloop_test.py::TestIOLoopCurrent::test_non_current",
"tornado/test/ioloop_test.py::TestIOLoopAddCallback::test_pre_wrap",
"tornado/test/ioloop_test.py::TestIOLoopAddCallback::test_pre_wrap_with_args",
"tornado/test/ioloop_test.py::TestIOLoopAddCallbackFromSignal::test_pre_wrap",
"tornado/test/ioloop_test.py::TestIOLoopAddCallbackFromSignal::test_pre_wrap_with_args",
"tornado/test/ioloop_test.py::TestIOLoopFutures::test_add_future_stack_context",
"tornado/test/ioloop_test.py::TestIOLoopFutures::test_add_future_threads",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_async_exception",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_async_result",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_current",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_native_coroutine",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_sync_exception",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_sync_result",
"tornado/test/ioloop_test.py::TestIOLoopRunSync::test_timeout",
"tornado/test/ioloop_test.py::TestPeriodicCallback::test_basic",
"tornado/test/ioloop_test.py::TestPeriodicCallback::test_overrun"
]
| []
| Apache License 2.0 | 767 | [
"tornado/ioloop.py"
]
| [
"tornado/ioloop.py"
]
|
|
python-oca__python-oca-34 | 6a419381025efe6034cc63b5a03c5b40121b32f8 | 2016-09-21 20:39:53 | 2536ec08fbf7ec2c55ee067a92aa23ec92762a34 | diff --git a/oca/vm.py b/oca/vm.py
index 7f5f83b..23c8e7b 100644
--- a/oca/vm.py
+++ b/oca/vm.py
@@ -230,11 +230,11 @@ class VirtualMachine(PoolElement):
'''
self._action('finalize')
- def restart(self):
+ def reboot(self, hard=False):
'''
- Resubmits the VM after failure
+ Reboot the VM. Optionally perform a hard reboot
'''
- self._action('restart')
+ self._action('reboot-hard' if hard else 'reboot')
def resubmit(self):
'''
| Incorrect method is called for vm reboot operation
Here is the list of actions which can be performed via **one.vm.action** XML-RPC Method of Opennebula (http://docs.opennebula.org/4.14/integration/system_interfaces/api.html#actions-for-virtual-machine-management)
- delete
- boot
- shutdown
- suspend
- hold
- stop
- resume
- release
- poweroff
- reboot
The last operation is **reboot**. But in vm.py (233) we have:
```
def restart(self):
'''
Resubmits the VM after failure
'''
self._action('restart')
```
As result, error appears when trying restart a vm:
```
File "/usr/local/lib/python2.7/dist-packages/oca/vm.py", line 237, in restart
self._action('restart')
File "/usr/local/lib/python2.7/dist-packages/oca/vm.py", line 252, in _action
self.client.call(self.METHODS['action'], action, self.id)
File "/usr/local/lib/python2.7/dist-packages/oca/__init__.py", line 123, in call
raise OpenNebulaException(data)
oca.exceptions.OpenNebulaException: [VirtualMachineAction] Virtual machine action "restart" is not supported
``` | python-oca/python-oca | diff --git a/oca/tests/test_virtualmachine.py b/oca/tests/test_virtualmachine.py
index fc4303f..0a09642 100644
--- a/oca/tests/test_virtualmachine.py
+++ b/oca/tests/test_virtualmachine.py
@@ -91,7 +91,7 @@ class TestVirtualMachine(unittest.TestCase):
oca.client = oca.Client('test:test')
vm = oca.VirtualMachine(self.xml, self.client)
for action in ['shutdown', 'shutdown_hard', 'poweroff', 'poweroff_hard', 'hold', 'release', 'stop', 'cancel',
- 'suspend', 'resume', 'restart', 'finalize', 'delete']:
+ 'suspend', 'resume', 'reboot', 'finalize', 'delete']:
self.client.call = Mock(return_value='')
getattr(vm, action)()
if action in ('shutdown_hard', 'poweroff_hard', 'undeploy_hard'):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 4.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"mock",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
nose==1.3.7
-e git+https://github.com/python-oca/python-oca.git@6a419381025efe6034cc63b5a03c5b40121b32f8#egg=oca
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: python-oca
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- mock==5.2.0
- nose==1.3.7
prefix: /opt/conda/envs/python-oca
| [
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_actions"
]
| []
| [
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_History_repr",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_acces_items_not_using_brackets",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_acces_items_using_brackets",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_allocate",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_convert_types",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_deploy",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_lcm_states",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_live_migrate",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_migrate",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_new_with_id",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_no_history_records_element",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_raise_exception_Index_Error_when_using_brackets",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_repr",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_resubmit",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_save_disk",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_states",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_update",
"oca/tests/test_virtualmachine.py::TestVirtualMachine::test_user_template_variables"
]
| []
| null | 768 | [
"oca/vm.py"
]
| [
"oca/vm.py"
]
|
|
makerslocal__dooblr-28 | 6c73cbc543a68b8112f9c6eb748d8a44ee100ee0 | 2016-09-22 06:35:39 | bb927c1724b837db5fd25484f5f2e514877a5bfe | diff --git a/README.rst b/README.rst
index 6306c32..b1bd1f1 100644
--- a/README.rst
+++ b/README.rst
@@ -58,7 +58,7 @@ configs are used to tell **dooblr** which topics and pieces of data need to be p
Let's say you have a device that publishes to the ``home/kitchen/fridge/temperature`` and
``home/kitchen/freezer/temperature`` topics with the data::
- {"temperature": 40.1, "units":"F", "humidity": 0.12}
+ {"temperature": 40.1, "units":"F", "humidity": 0.12, "label": "blue"}
You would probably want to create a measurement config called ``temperature.yml`` that looks like::
@@ -73,9 +73,11 @@ You would probably want to create a measurement config called ``temperature.yml`
- humidity
tags:
- units
+ optional_tags:
+ - label # Maybe not every message on these topics have a "label" property!
Notice that there can be multiple topics, fields, and tags. Tags and fields refer to the tags and fields used in
-InfluxDB.
+InfluxDB. Optional tags will not raise an error if they weren't defined in the MQTT message, while regular tags will.
.. |Build Status| image:: https://travis-ci.org/makerslocal/dooblr.svg?branch=master
:target: https://travis-ci.org/makerslocal/dooblr
diff --git a/dooblr/config.py b/dooblr/config.py
index 1f0e389..e6476b9 100644
--- a/dooblr/config.py
+++ b/dooblr/config.py
@@ -99,23 +99,31 @@ class MeasurementConfig(object):
self.measurements[measurement] = {}
if "fields" not in self._config[measurement]:
- raise DooblrConfigError("Measurement {m} does not contain required option 'fields'".format(
+ raise DooblrConfigError("Measurement {m} does not contain required property 'fields'".format(
m=measurement))
else:
self.measurements[measurement]["fields"] = self._listify(self._config[measurement]["fields"])
if "topics" not in self._config[measurement]:
- raise DooblrConfigError("Measurement {m} does not contain required option 'topics'".format(
+ raise DooblrConfigError("Measurement {m} does not contain required property 'topics'".format(
m=measurement))
else:
self.measurements[measurement]["topics"] = self._listify(self._config[measurement]["topics"])
if "tags" not in self._config[measurement]:
- self._logger.info("Measurement {m} does not contain optional option 'tags'".format(m=measurement))
+ self._logger.info("Measurement {m} does not contain optional property 'tags'".format(m=measurement))
self.measurements[measurement]["tags"] = []
else:
self.measurements[measurement]["tags"] = self._listify(self._config[measurement]["tags"])
+ if "optional_tags" not in self._config[measurement]:
+ self._logger.info("Measurement {m} does not contain optional property 'optional_tags'".format(
+ m=measurement))
+ self.measurements[measurement]["optional_tags"] = []
+ else:
+ self.measurements[measurement]["optional_tags"] = self._listify(
+ self._config[measurement]["optional_tags"])
+
@staticmethod
def _listify(items):
item_list = items
@@ -129,7 +137,8 @@ class MeasurementConfig(object):
"my_measurement": {
"fields": ["important_value"],
"topics": ["dooblr/testing/device"],
- "tags": ["tag1", "tag2"]
+ "tags": ["tag1", "tag2"],
+ "optional_tags": ["option"]
}}
with open(path, 'w') as f:
yaml.dump(sample_config, f, default_flow_style=False)
diff --git a/dooblr/mqttclient.py b/dooblr/mqttclient.py
index 128966e..f1cc668 100644
--- a/dooblr/mqttclient.py
+++ b/dooblr/mqttclient.py
@@ -74,6 +74,12 @@ class MqttClient(object):
try:
parsed_message["tags"][tag] = message[tag]
except KeyError:
- raise DooblrMqttError("Message does not contain tag '{field}'".format(field=tag))
+ raise DooblrMqttError("Message does not contain required tag '{tag}'".format(tag=tag))
+
+ for tag in measurement["optional_tags"]:
+ try:
+ parsed_message["tags"][tag] = message[tag]
+ except KeyError:
+ self._logger.info("Message does not contain optional tag '{tag}'".format(tag=tag))
return parsed_message
| Make tags optional, not required, for a message to be valid | makerslocal/dooblr | diff --git a/test/test_config.py b/test/test_config.py
index b297bac..f1c6b10 100644
--- a/test/test_config.py
+++ b/test/test_config.py
@@ -15,8 +15,8 @@ class MainConfigParseTestCase(unittest.TestCase):
def test_partial_section_does_not_raise_error(self):
config = MainConfig()
config_text = "\n".join((
- u"mqtt:",
- " host: blah"))
+ u"mqtt:",
+ " host: blah"))
try:
config._parse(config_text)
except Exception as e:
@@ -28,39 +28,39 @@ class MeasurementConfigParseTestCase(unittest.TestCase):
def test_missing_field_value_raises_error(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " tags:",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " tags:",
+ " - supertag",
+ " - awesometag"))
with self.assertRaises(DooblrConfigError):
config._parse(config_text)
def test_missing_topic_value_raises_error(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " fields:",
- " - coolfield",
- " - neatfield",
- " tags: ",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " fields:",
+ " - coolfield",
+ " - neatfield",
+ " tags: ",
+ " - supertag",
+ " - awesometag"))
with self.assertRaises(DooblrConfigError):
config._parse(config_text)
def test_missing_tag_value_does_not_raise_error(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " fields:",
- " - coolfield",
- " - neatfield"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " fields:",
+ " - coolfield",
+ " - neatfield"))
try:
config._parse(config_text)
except DooblrConfigError as e:
@@ -69,32 +69,61 @@ class MeasurementConfigParseTestCase(unittest.TestCase):
def test_valid_config_does_not_raise_error(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " fields: ",
- " - coolfield",
- " - neatfield",
- " tags: ",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " fields: ",
+ " - coolfield",
+ " - neatfield",
+ " tags: ",
+ " - supertag",
+ " - awesometag"))
try:
config._parse(config_text)
except DooblrConfigError as e:
self.fail("Valid config raised an exception! ({e})".format(e=e))
+ def test_single_optional_tag_is_parsed(self):
+ config = MeasurementConfig()
+ config_text = "\n".join((
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " fields: ",
+ " - coolfield",
+ " optional_tags: ",
+ " - optiontag"))
+
+ config._parse(config_text)
+ self.assertEquals(config.measurements["measurement"]["optional_tags"], ["optiontag"])
+
+ def test_multiple_optional_tags_are_parsed(self):
+ config = MeasurementConfig()
+ config_text = "\n".join((
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " fields: ",
+ " - coolfield",
+ " optional_tags: ",
+ " - optiontag",
+ " - label"))
+
+ config._parse(config_text)
+ self.assertEquals(config.measurements["measurement"]["optional_tags"], ["optiontag", "label"])
+
def test_single_topic_is_parsed(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics: ml256/topic/device",
- " fields: ",
- " - coolfield",
- " - neatfield",
- " tags: ",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " topics: ml256/topic/device",
+ " fields: ",
+ " - coolfield",
+ " - neatfield",
+ " tags: ",
+ " - supertag",
+ " - awesometag"))
config._parse(config_text)
self.assertEquals(config.measurements["measurement"]["topics"], ["ml256/topic/device"])
@@ -102,16 +131,16 @@ class MeasurementConfigParseTestCase(unittest.TestCase):
def test_multiple_topics_are_parsed(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " fields: ",
- " - coolfield",
- " - neatfield",
- " tags: ",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " fields: ",
+ " - coolfield",
+ " - neatfield",
+ " tags: ",
+ " - supertag",
+ " - awesometag"))
config._parse(config_text)
self.assertEquals(config.measurements["measurement"]["topics"], ["ml256/topic/device", "ml256/topic/otherdevice"])
@@ -151,14 +180,14 @@ class MeasurementConfigParseTestCase(unittest.TestCase):
def test_single_tag_is_parsed(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " fields:",
- " - coolfield",
- " - neatfield",
- " tags: supertag"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " fields:",
+ " - coolfield",
+ " - neatfield",
+ " tags: supertag"))
config._parse(config_text)
self.assertEquals(config.measurements["measurement"]["tags"], ["supertag"])
@@ -166,16 +195,16 @@ class MeasurementConfigParseTestCase(unittest.TestCase):
def test_multiple_tags_are_parsed(self):
config = MeasurementConfig()
config_text = "\n".join((
- u"measurement:",
- " topics:",
- " - ml256/topic/device",
- " - ml256/topic/otherdevice",
- " fields:",
- " - coolfield",
- " - neatfield",
- " tags:",
- " - supertag",
- " - awesometag"))
+ u"measurement:",
+ " topics:",
+ " - ml256/topic/device",
+ " - ml256/topic/otherdevice",
+ " fields:",
+ " - coolfield",
+ " - neatfield",
+ " tags:",
+ " - supertag",
+ " - awesometag"))
config._parse(config_text)
self.assertEquals(config.measurements["measurement"]["tags"], ["supertag", "awesometag"])
diff --git a/test/test_mqttclient.py b/test/test_mqttclient.py
index df65079..d4742b3 100644
--- a/test/test_mqttclient.py
+++ b/test/test_mqttclient.py
@@ -4,30 +4,58 @@ from dooblr import mqttclient
class ParseMessageTestCase(unittest.TestCase):
- def test_fields_and_tags_are_parsed(self):
+ def test_good_message_is_parsed(self):
client = mqttclient.MqttClient(None)
- client._measurements = {"environment":
- {"fields": ["temperature", "humidity"],
- "tags": ["location", "address"]
- }
- }
+ client._measurements = {
+ "environment": {
+ "fields": [
+ "temperature",
+ "humidity"
+ ],
+ "tags": [
+ "location",
+ "address"
+ ],
+ "optional_tags": [
+ "machine",
+ "label"
+ ]
+ }
+ }
- parsed = client._parse_message("environment", '{"temperature": 25.6, "humidity": 19.44, "location": "tool", "address": "1dec40"}')
+ parsed = client._parse_message("environment", '{"temperature": 25.6, "humidity": 19.44, "location": "tool", "address": "1dec40", "machine": "ts9000", "label": "blue"}')
expected = {"measurement": "environment",
"fields": {"temperature": 25.6, "humidity": 19.44},
- "tags": {"location": u"tool", "address": u"1dec40"}}
+ "tags": {"location": u"tool", "address": u"1dec40", "machine": u"ts9000", "label": u"blue"}}
self.assertEquals(parsed, expected)
- def test_missing_tag_raises_error(self):
+ def test_missing_required_tag_raises_error(self):
client = mqttclient.MqttClient(None)
- client._measurements = {"environment":
- {"fields": ["temperature"],
- "tags": ["location"]
- }
- }
+ client._measurements = {
+ "environment": {
+ "fields": ["temperature"],
+ "tags": ["location"]
+ }
+ }
+
with self.assertRaises(mqttclient.DooblrMqttError):
client._parse_message("environment", '{"temperature": 25.6}')
+ def test_missing_optional_tag_does_not_raise_error(self):
+ client = mqttclient.MqttClient(None)
+ client._measurements = {
+ "environment": {
+ "fields": ["temperature"],
+ "tags": ["location"],
+ "optional_tags": ["machine_type"]
+ }
+ }
+
+ try:
+ client._parse_message("environment", '{"temperature": 25.6, "location":"kitchen"}')
+ except Exception as e:
+ self.fail("Unexpected error was raised: {e}".format(e=e))
+
def test_missing_field_raises_error(self):
client = mqttclient.MqttClient(None)
client._measurements = {"environment":
@@ -36,4 +64,4 @@ class ParseMessageTestCase(unittest.TestCase):
}
}
with self.assertRaises(mqttclient.DooblrMqttError):
- client._parse_message("environment", '{"location": "tool"}')
\ No newline at end of file
+ client._parse_message("environment", '{"location": "tool"}')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 3
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"flake8",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
configparser==7.2.0
-e git+https://github.com/makerslocal/dooblr.git@6c73cbc543a68b8112f9c6eb748d8a44ee100ee0#egg=dooblr
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
influxdb==5.3.2
iniconfig==2.1.0
mccabe==0.7.0
msgpack==1.1.0
nose==1.3.7
packaging==24.2
paho-mqtt==2.1.0
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
requests==2.32.3
schema==0.7.7
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dooblr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- configparser==7.2.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- influxdb==5.3.2
- iniconfig==2.1.0
- mccabe==0.7.0
- msgpack==1.1.0
- nose==1.3.7
- packaging==24.2
- paho-mqtt==2.1.0
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- requests==2.32.3
- schema==0.7.7
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dooblr
| [
"test/test_config.py::MeasurementConfigParseTestCase::test_multiple_optional_tags_are_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_single_optional_tag_is_parsed",
"test/test_mqttclient.py::ParseMessageTestCase::test_good_message_is_parsed"
]
| []
| [
"test/test_config.py::MainConfigParseTestCase::test_empty_section_does_not_raise_error",
"test/test_config.py::MainConfigParseTestCase::test_partial_section_does_not_raise_error",
"test/test_config.py::MeasurementConfigParseTestCase::test_missing_field_value_raises_error",
"test/test_config.py::MeasurementConfigParseTestCase::test_missing_tag_value_does_not_raise_error",
"test/test_config.py::MeasurementConfigParseTestCase::test_missing_topic_value_raises_error",
"test/test_config.py::MeasurementConfigParseTestCase::test_multiple_fields_are_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_multiple_tags_are_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_multiple_topics_are_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_single_field_is_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_single_tag_is_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_single_topic_is_parsed",
"test/test_config.py::MeasurementConfigParseTestCase::test_valid_config_does_not_raise_error",
"test/test_mqttclient.py::ParseMessageTestCase::test_missing_field_raises_error",
"test/test_mqttclient.py::ParseMessageTestCase::test_missing_optional_tag_does_not_raise_error",
"test/test_mqttclient.py::ParseMessageTestCase::test_missing_required_tag_raises_error"
]
| []
| ISC License | 769 | [
"README.rst",
"dooblr/mqttclient.py",
"dooblr/config.py"
]
| [
"README.rst",
"dooblr/mqttclient.py",
"dooblr/config.py"
]
|
|
box__box-python-sdk-178 | cc4921422bb070e5547c0c054c1d2c554380dba5 | 2016-09-22 06:38:23 | 8e192566e678490f540e3ece5ccb7eced975aa89 | boxcla: Verified that @jmoldow has signed the CLA. Thanks for the pull request! | diff --git a/README.rst b/README.rst
index 21b3f06..b157c9d 100644
--- a/README.rst
+++ b/README.rst
@@ -323,12 +323,12 @@ These users can then be authenticated:
ned_auth = JWTAuth(
client_id='YOUR_CLIENT_ID',
client_secret='YOUR_CLIENT_SECRET',
- enterprise_id='YOUR_ENTERPRISE_ID',
+ user=ned_stark_user,
jwt_key_id='YOUR_JWT_KEY_ID',
rsa_private_key_file_sys_path='CERT.PEM',
store_tokens=your_store_tokens_callback_method,
)
- ned_auth.authenticate_app_user(ned_stark_user)
+ ned_auth.authenticate_user()
ned_client = Client(ned_auth)
Requests made with ``ned_client`` (or objects returned from ``ned_client``'s methods)
@@ -396,7 +396,7 @@ Customization
Custom Subclasses
~~~~~~~~~~~~~~~~~
-Custom subclasses of any SDK object with an ``_item_type`` field can be defined:
+Custom object subclasses can be defined:
.. code-block:: pycon
@@ -407,12 +407,13 @@ Custom subclasses of any SDK object with an ``_item_type`` field can be defined:
pass
client = Client(oauth)
+ client.translator.register('folder', MyFolderSubclass)
folder = client.folder('0')
>>> print folder
>>> <Box MyFolderSubclass - 0>
-If a subclass of an SDK object with an ``_item_type`` field is defined, instances of this subclass will be
+If an object subclass is registered in this way, instances of this subclass will be
returned from all SDK methods that previously returned an instance of the parent. See ``BaseAPIJSONObjectMeta``
and ``Translator`` to see how the SDK performs dynamic lookups to determine return types.
diff --git a/boxsdk/auth/jwt_auth.py b/boxsdk/auth/jwt_auth.py
index 2d1f895..e536e85 100644
--- a/boxsdk/auth/jwt_auth.py
+++ b/boxsdk/auth/jwt_auth.py
@@ -9,8 +9,10 @@
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
import jwt
+from six import string_types, text_type
from .oauth2 import OAuth2
+from ..object.user import User
from ..util.compat import total_seconds
@@ -28,6 +30,7 @@ def __init__(
jwt_key_id,
rsa_private_key_file_sys_path,
rsa_private_key_passphrase=None,
+ user=None,
store_tokens=None,
box_device_id='0',
box_device_name='',
@@ -35,7 +38,12 @@ def __init__(
network_layer=None,
jwt_algorithm='RS256',
):
- """
+ """Extends baseclass method.
+
+ If both `enterprise_id` and `user` are non-`None`, the `user` takes
+ precedence when `refresh()` is called. This can be overruled with a
+ call to `authenticate_instance()`.
+
:param client_id:
Box API key used for identifying the application the user is authenticating with.
:type client_id:
@@ -46,8 +54,15 @@ def __init__(
`unicode`
:param enterprise_id:
The ID of the Box Developer Edition enterprise.
+
+ May be `None`, if the caller knows that it will not be
+ authenticating as an enterprise instance / service account.
+
+ If `user` is passed, this value is not used, unless
+ `authenticate_instance()` is called to clear the user and
+ authenticate as the enterprise instance.
:type enterprise_id:
- `unicode`
+ `unicode` or `None`
:param jwt_key_id:
Key ID for the JWT assertion.
:type jwt_key_id:
@@ -60,6 +75,27 @@ def __init__(
Passphrase used to unlock the private key. Do not pass a unicode string - this must be bytes.
:type rsa_private_key_passphrase:
`str` or None
+ :param user:
+ (optional) The user to authenticate, expressed as a Box User ID or
+ as a :class:`User` instance.
+
+ This value is not required. But if it is provided, then the user
+ will be auto-authenticated at the time of the first API call or
+ when calling `authenticate_user()` without any arguments.
+
+ Should be `None` if the intention is to authenticate as the
+ enterprise instance / service account. If both `enterprise_id` and
+ `user` are non-`None`, the `user` takes precedense when `refresh()`
+ is called.
+
+ May be one of this application's created App User. Depending on the
+ configured User Access Level, may also be any other App User or
+ Managed User in the enterprise.
+
+ <https://docs.box.com/docs/configuring-box-platform#section-3-enabling-app-auth-and-app-users>
+ <https://docs.box.com/docs/authentication#section-choosing-an-authentication-type>
+ :type user:
+ `unicode` or :class:`User` or `None`
:param store_tokens:
Optional callback for getting access to tokens for storing them.
:type store_tokens:
@@ -85,6 +121,7 @@ def __init__(
:type jwt_algorithm:
`unicode`
"""
+ user_id = self._normalize_user_id(user)
super(JWTAuth, self).__init__(
client_id,
client_secret,
@@ -104,12 +141,12 @@ def __init__(
self._enterprise_id = enterprise_id
self._jwt_algorithm = jwt_algorithm
self._jwt_key_id = jwt_key_id
- self._user_id = None
+ self._user_id = user_id
def _auth_with_jwt(self, sub, sub_type):
"""
Get an access token for use with Box Developer Edition. Pass an enterprise ID to get an enterprise token
- (which can be used to provision/deprovision users), or a user ID to get an app user token.
+ (which can be used to provision/deprovision users), or a user ID to get a user token.
:param sub:
The enterprise ID or user ID to auth.
@@ -157,31 +194,92 @@ def _auth_with_jwt(self, sub, sub_type):
data['box_device_name'] = self._box_device_name
return self.send_token_request(data, access_token=None, expect_refresh_token=False)[0]
- def authenticate_app_user(self, user):
+ def authenticate_user(self, user=None):
"""
- Get an access token for an App User (part of Box Developer Edition).
+ Get an access token for a User.
+
+ May be one of this application's created App User. Depending on the
+ configured User Access Level, may also be any other App User or Managed
+ User in the enterprise.
+
+ <https://docs.box.com/docs/configuring-box-platform#section-3-enabling-app-auth-and-app-users>
+ <https://docs.box.com/docs/authentication#section-choosing-an-authentication-type>
:param user:
- The user to authenticate.
+ (optional) The user to authenticate, expressed as a Box User ID or
+ as a :class:`User` instance.
+
+ If not given, then the most recently provided user ID, if
+ available, will be used.
:type user:
- :class:`User`
+ `unicode` or :class:`User`
+ :raises:
+ :exc:`ValueError` if no user ID was passed and the object is not
+ currently configured with one.
:return:
- The access token for the app user.
+ The access token for the user.
:rtype:
`unicode`
"""
- sub = self._user_id = user.object_id
+ sub = self._normalize_user_id(user) or self._user_id
+ if not sub:
+ raise ValueError("authenticate_user: Requires the user ID, but it was not provided.")
+ self._user_id = sub
return self._auth_with_jwt(sub, 'user')
- def authenticate_instance(self):
+ authenticate_app_user = authenticate_user
+
+ @classmethod
+ def _normalize_user_id(cls, user):
+ """Get a Box user ID from a selection of supported param types.
+
+ :param user:
+ An object representing the user or user ID.
+
+ Currently supported types are `unicode` (which represents the user
+ ID) and :class:`User`.
+
+ If `None`, returns `None`.
+ :raises: :exc:`TypeError` for unsupported types.
+ :rtype: `unicode` or `None`
+ """
+ if user is None:
+ return None
+ if isinstance(user, User):
+ return user.object_id
+ if isinstance(user, string_types):
+ return text_type(user)
+ raise TypeError("Got unsupported type {0!r} for user.".format(user.__class__.__name__))
+
+ def authenticate_instance(self, enterprise=None):
"""
Get an access token for a Box Developer Edition enterprise.
+ :param enterprise:
+ The ID of the Box Developer Edition enterprise.
+
+ Optional if the value was already given to `__init__`,
+ otherwise required.
+ :type enterprise: `unicode` or `None`
+ :raises:
+ :exc:`ValueError` if `None` was passed for the enterprise ID here
+ and in `__init__`, or if the non-`None` value passed here does not
+ match the non-`None` value passed to `__init__`.
:return:
The access token for the enterprise which can provision/deprovision app users.
:rtype:
`unicode`
"""
+ enterprises = [enterprise, self._enterprise_id]
+ if not any(enterprises):
+ raise ValueError("authenticate_instance: Requires the enterprise ID, but it was not provided.")
+ if all(enterprises) and (enterprise != self._enterprise_id):
+ raise ValueError(
+ "authenticate_instance: Given enterprise ID {given_enterprise!r}, but {auth} already has ID {existing_enterprise!r}"
+ .format(auth=self, given_enterprise=enterprise, existing_enterprise=self._enterprise_id)
+ )
+ if not self._enterprise_id:
+ self._enterprise_id = enterprise
self._user_id = None
return self._auth_with_jwt(self._enterprise_id, 'enterprise')
@@ -195,4 +293,4 @@ def _refresh(self, access_token):
if self._user_id is None:
return self.authenticate_instance()
else:
- return self._auth_with_jwt(self._user_id, 'user')
+ return self.authenticate_user()
diff --git a/boxsdk/session/box_session.py b/boxsdk/session/box_session.py
index fbccabf..a166a78 100644
--- a/boxsdk/session/box_session.py
+++ b/boxsdk/session/box_session.py
@@ -193,7 +193,8 @@ def _renew_session(self, access_token_used):
:type access_token_used:
`unicode`
"""
- self._oauth.refresh(access_token_used)
+ new_access_token, _ = self._oauth.refresh(access_token_used)
+ return new_access_token
@staticmethod
def _is_json_response(network_response):
@@ -390,6 +391,9 @@ def _make_request(
# Since there can be session renewal happening in the middle of preparing the request, it's important to be
# consistent with the access_token being used in the request.
access_token_will_be_used = self._oauth.access_token
+ if auto_session_renewal and (access_token_will_be_used is None):
+ access_token_will_be_used = self._renew_session(None)
+ auto_session_renewal = False
authorization_header = {'Authorization': 'Bearer {0}'.format(access_token_will_be_used)}
if headers is None:
headers = self._default_headers.copy()
diff --git a/requirements.txt b/requirements.txt
index 0e0ef69..5e33286 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,4 +4,4 @@ pyjwt>=1.3.0
requests>=2.4.3
requests-toolbelt>=0.4.0
six >= 1.4.0
--e .
+-e .[all]
| Change authenticate_app_user method
With service accounts, you can use the method to authenticate as a managed user, not just app users. Should the method name be changed to better reflect this?
Also we should changed the name from Box Developer Edition to Box Platform (or even Box service accounts)
| box/box-python-sdk | diff --git a/test/unit/auth/test_jwt_auth.py b/test/unit/auth/test_jwt_auth.py
index 63b553c..e32a686 100644
--- a/test/unit/auth/test_jwt_auth.py
+++ b/test/unit/auth/test_jwt_auth.py
@@ -1,9 +1,10 @@
# coding: utf-8
-from __future__ import unicode_literals
+from __future__ import absolute_import, unicode_literals
from contextlib import contextmanager
from datetime import datetime, timedelta
+from itertools import product
import json
import random
import string
@@ -11,6 +12,7 @@
from cryptography.hazmat.backends import default_backend
from mock import Mock, mock_open, patch, sentinel
import pytest
+from six import string_types, text_type
from boxsdk.auth.jwt_auth import JWTAuth
from boxsdk.config import API
@@ -50,68 +52,138 @@ def successful_token_response(successful_token_mock, successful_token_json_respo
return successful_token_mock
-@contextmanager
-def jwt_auth_init_mocks(
- mock_network_layer,
- successful_token_response,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
- enterprise_id=None,
-):
[email protected]
+def jwt_auth_init_mocks(mock_network_layer, successful_token_response, jwt_algorithm, jwt_key_id, rsa_passphrase):
# pylint:disable=redefined-outer-name
- fake_client_id = 'fake_client_id'
- fake_client_secret = 'fake_client_secret'
- assertion = Mock()
- data = {
- 'grant_type': JWTAuth._GRANT_TYPE, # pylint:disable=protected-access
- 'client_id': fake_client_id,
- 'client_secret': fake_client_secret,
- 'assertion': assertion,
- 'box_device_id': '0',
- 'box_device_name': 'my_awesome_device',
- }
-
- mock_network_layer.request.return_value = successful_token_response
- key_file_read_data = b'key_file_read_data'
- with patch('boxsdk.auth.jwt_auth.open', mock_open(read_data=key_file_read_data), create=True) as jwt_auth_open:
- with patch('cryptography.hazmat.primitives.serialization.load_pem_private_key') as load_pem_private_key:
- oauth = JWTAuth(
- client_id=fake_client_id,
- client_secret=fake_client_secret,
- enterprise_id=enterprise_id,
- rsa_private_key_file_sys_path=sentinel.rsa_path,
- rsa_private_key_passphrase=rsa_passphrase,
- network_layer=mock_network_layer,
- box_device_name='my_awesome_device',
- jwt_algorithm=jwt_algorithm,
- jwt_key_id=jwt_key_id,
- )
- jwt_auth_open.assert_called_once_with(sentinel.rsa_path, 'rb')
- jwt_auth_open.return_value.read.assert_called_once_with() # pylint:disable=no-member
- load_pem_private_key.assert_called_once_with(
- key_file_read_data,
- password=rsa_passphrase,
- backend=default_backend(),
+ @contextmanager
+ def _jwt_auth_init_mocks(**kwargs):
+ assert_authed = kwargs.pop('assert_authed', True)
+ fake_client_id = 'fake_client_id'
+ fake_client_secret = 'fake_client_secret'
+ assertion = Mock()
+ data = {
+ 'grant_type': JWTAuth._GRANT_TYPE, # pylint:disable=protected-access
+ 'client_id': fake_client_id,
+ 'client_secret': fake_client_secret,
+ 'assertion': assertion,
+ 'box_device_id': '0',
+ 'box_device_name': 'my_awesome_device',
+ }
+
+ mock_network_layer.request.return_value = successful_token_response
+ key_file_read_data = b'key_file_read_data'
+ with patch('boxsdk.auth.jwt_auth.open', mock_open(read_data=key_file_read_data), create=True) as jwt_auth_open:
+ with patch('cryptography.hazmat.primitives.serialization.load_pem_private_key') as load_pem_private_key:
+ oauth = JWTAuth(
+ client_id=fake_client_id,
+ client_secret=fake_client_secret,
+ rsa_private_key_file_sys_path=sentinel.rsa_path,
+ rsa_private_key_passphrase=rsa_passphrase,
+ network_layer=mock_network_layer,
+ box_device_name='my_awesome_device',
+ jwt_algorithm=jwt_algorithm,
+ jwt_key_id=jwt_key_id,
+ enterprise_id=kwargs.pop('enterprise_id', None),
+ **kwargs
+ )
+
+ jwt_auth_open.assert_called_once_with(sentinel.rsa_path, 'rb')
+ jwt_auth_open.return_value.read.assert_called_once_with() # pylint:disable=no-member
+ load_pem_private_key.assert_called_once_with(
+ key_file_read_data,
+ password=rsa_passphrase,
+ backend=default_backend(),
+ )
+
+ yield oauth, assertion, fake_client_id, load_pem_private_key.return_value
+
+ if assert_authed:
+ mock_network_layer.request.assert_called_once_with(
+ 'POST',
+ '{0}/token'.format(API.OAUTH2_API_URL),
+ data=data,
+ headers={'content-type': 'application/x-www-form-urlencoded'},
+ access_token=None,
)
+ assert oauth.access_token == successful_token_response.json()['access_token']
- yield oauth, assertion, fake_client_id, load_pem_private_key.return_value
+ return _jwt_auth_init_mocks
- mock_network_layer.request.assert_called_once_with(
- 'POST',
- '{0}/token'.format(API.OAUTH2_API_URL),
- data=data,
- headers={'content-type': 'application/x-www-form-urlencoded'},
- access_token=None,
- )
- assert oauth.access_token == successful_token_response.json()['access_token']
+def test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor(jwt_auth_init_and_auth_mocks):
+ user = 'fake_user_id'
+ with jwt_auth_init_and_auth_mocks(sub=user, sub_type='user', enterprise_id='fake_enterprise_id', user=user) as oauth:
+ oauth.refresh(None)
-@contextmanager
-def jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, sub, sub_type, oauth, assertion, client_id, secret):
- # pylint:disable=redefined-outer-name
- with patch('jwt.encode') as jwt_encode:
+
[email protected]('jwt_auth_method_name', ['authenticate_user', 'authenticate_instance'])
+def test_authenticate_raises_value_error_if_sub_was_never_given(jwt_auth_init_mocks, jwt_auth_method_name):
+ with jwt_auth_init_mocks(assert_authed=False) as params:
+ auth = params[0]
+ authenticate_method = getattr(auth, jwt_auth_method_name)
+ with pytest.raises(ValueError):
+ authenticate_method()
+
+
+def test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type(jwt_auth_init_mocks):
+ with pytest.raises(TypeError):
+ with jwt_auth_init_mocks(user=object()):
+ assert False
+
+
+def test_authenticate_user_raises_type_error_if_user_is_unsupported_type(jwt_auth_init_mocks):
+ with jwt_auth_init_mocks(assert_authed=False) as params:
+ auth = params[0]
+ with pytest.raises(TypeError):
+ auth.authenticate_user(object())
+
+
[email protected]('user_id_for_init', [None, 'fake_user_id_1'])
+def test_authenticate_user_saves_user_id_for_future_calls(jwt_auth_init_and_auth_mocks, user_id_for_init, jwt_encode):
+
+ def assert_jwt_encode_call_args(user_id):
+ assert jwt_encode.call_args[0][0]['sub'] == user_id
+ assert jwt_encode.call_args[0][0]['box_sub_type'] == 'user'
+ jwt_encode.call_args = None
+
+ with jwt_auth_init_and_auth_mocks(sub=None, sub_type=None, assert_authed=False, user=user_id_for_init) as auth:
+ for new_user_id in ['fake_user_id_2', 'fake_user_id_3']:
+ auth.authenticate_user(new_user_id)
+ assert_jwt_encode_call_args(new_user_id)
+ auth.authenticate_user()
+ assert_jwt_encode_call_args(new_user_id)
+
+
+def test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given(jwt_auth_init_mocks):
+ with jwt_auth_init_mocks(enterprise_id='fake_enterprise_id_1', assert_authed=False) as params:
+ auth = params[0]
+ with pytest.raises(ValueError):
+ auth.authenticate_instance('fake_enterprise_id_2')
+
+
+def test_authenticate_instance_saves_enterprise_id_for_future_calls(jwt_auth_init_and_auth_mocks):
+ enterprise_id = 'fake_enterprise_id'
+ with jwt_auth_init_and_auth_mocks(sub=enterprise_id, sub_type='enterprise', assert_authed=False) as auth:
+ auth.authenticate_instance(enterprise_id)
+ auth.authenticate_instance()
+ auth.authenticate_instance(enterprise_id)
+ with pytest.raises(ValueError):
+ auth.authenticate_instance('fake_enterprise_id_2')
+
+
[email protected]_fixture
+def jwt_encode():
+ with patch('jwt.encode') as patched_jwt_encode:
+ yield patched_jwt_encode
+
+
[email protected]
+def jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, jwt_encode):
+
+ @contextmanager
+ def _jwt_auth_auth_mocks(sub, sub_type, oauth, assertion, client_id, secret, assert_authed=True):
+ # pylint:disable=redefined-outer-name
with patch('boxsdk.auth.jwt_auth.datetime') as mock_datetime:
with patch('boxsdk.auth.jwt_auth.random.SystemRandom') as mock_system_random:
jwt_encode.return_value = assertion
@@ -129,88 +201,79 @@ def jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, sub, sub_type, oa
yield oauth
- system_random.randint.assert_called_once_with(16, 128)
- assert len(system_random.random.mock_calls) == jti_length
- jwt_encode.assert_called_once_with({
- 'iss': client_id,
- 'sub': sub,
- 'box_sub_type': sub_type,
- 'aud': 'https://api.box.com/oauth2/token',
- 'jti': jti,
- 'exp': exp,
- }, secret, algorithm=jwt_algorithm, headers={'kid': jwt_key_id})
-
-
-def test_authenticate_app_user_sends_post_request_with_correct_params(
- mock_network_layer,
- successful_token_response,
- jti_length,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
-):
+ if assert_authed:
+ system_random.randint.assert_called_once_with(16, 128)
+ assert len(system_random.random.mock_calls) == jti_length
+ jwt_encode.assert_called_once_with({
+ 'iss': client_id,
+ 'sub': sub,
+ 'box_sub_type': sub_type,
+ 'aud': 'https://api.box.com/oauth2/token',
+ 'jti': jti,
+ 'exp': exp,
+ }, secret, algorithm=jwt_algorithm, headers={'kid': jwt_key_id})
+
+ return _jwt_auth_auth_mocks
+
+
[email protected]
+def jwt_auth_init_and_auth_mocks(jwt_auth_init_mocks, jwt_auth_auth_mocks):
+
+ @contextmanager
+ def _jwt_auth_init_and_auth_mocks(sub, sub_type, *jwt_auth_init_mocks_args, **jwt_auth_init_mocks_kwargs):
+ assert_authed = jwt_auth_init_mocks_kwargs.pop('assert_authed', True)
+ with jwt_auth_init_mocks(*jwt_auth_init_mocks_args, assert_authed=assert_authed, **jwt_auth_init_mocks_kwargs) as params:
+ with jwt_auth_auth_mocks(sub, sub_type, *params, assert_authed=assert_authed) as oauth:
+ yield oauth
+
+ return _jwt_auth_init_and_auth_mocks
+
+
[email protected](
+ ('user', 'pass_in_init'),
+ list(product([str('fake_user_id'), text_type('fake_user_id'), User(None, 'fake_user_id')], [False, True])),
+)
+def test_authenticate_user_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks, user, pass_in_init):
# pylint:disable=redefined-outer-name
- fake_user_id = 'fake_user_id'
- with jwt_auth_init_mocks(mock_network_layer, successful_token_response, jwt_algorithm, jwt_key_id, rsa_passphrase) as params:
- with jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, fake_user_id, 'user', *params) as oauth:
- oauth.authenticate_app_user(User(None, fake_user_id))
-
-
-def test_authenticate_instance_sends_post_request_with_correct_params(
- mock_network_layer,
- successful_token_response,
- jti_length,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
-):
+ if isinstance(user, User):
+ user_id = user.object_id
+ elif isinstance(user, string_types):
+ user_id = user
+ else:
+ raise NotImplementedError
+ init_kwargs = {}
+ authenticate_params = []
+ if pass_in_init:
+ init_kwargs['user'] = user
+ else:
+ authenticate_params.append(user)
+ with jwt_auth_init_and_auth_mocks(user_id, 'user', **init_kwargs) as oauth:
+ oauth.authenticate_user(*authenticate_params)
+
+
[email protected](('pass_in_init', 'pass_in_auth'), [(True, False), (False, True), (True, True)])
+def test_authenticate_instance_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks, pass_in_init, pass_in_auth):
# pylint:disable=redefined-outer-name
enterprise_id = 'fake_enterprise_id'
- with jwt_auth_init_mocks(
- mock_network_layer,
- successful_token_response,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
- enterprise_id,
- ) as params:
- with jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, enterprise_id, 'enterprise', *params) as oauth:
- oauth.authenticate_instance()
-
-
-def test_refresh_app_user_sends_post_request_with_correct_params(
- mock_network_layer,
- successful_token_response,
- jti_length,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
-):
+ init_kwargs = {}
+ auth_params = []
+ if pass_in_init:
+ init_kwargs['enterprise_id'] = enterprise_id
+ if pass_in_auth:
+ auth_params.append(enterprise_id)
+ with jwt_auth_init_and_auth_mocks(enterprise_id, 'enterprise', **init_kwargs) as oauth:
+ oauth.authenticate_instance(*auth_params)
+
+
+def test_refresh_app_user_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks):
# pylint:disable=redefined-outer-name
fake_user_id = 'fake_user_id'
- with jwt_auth_init_mocks(mock_network_layer, successful_token_response, jwt_algorithm, jwt_key_id, rsa_passphrase) as params:
- with jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, fake_user_id, 'user', *params) as oauth:
- oauth._user_id = fake_user_id # pylint:disable=protected-access
- oauth.refresh(None)
-
-
-def test_refresh_instance_sends_post_request_with_correct_params(
- mock_network_layer,
- successful_token_response,
- jti_length,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
-):
+ with jwt_auth_init_and_auth_mocks(fake_user_id, 'user', user=fake_user_id) as oauth:
+ oauth.refresh(None)
+
+
+def test_refresh_instance_sends_post_request_with_correct_params(jwt_auth_init_and_auth_mocks):
# pylint:disable=redefined-outer-name
enterprise_id = 'fake_enterprise_id'
- with jwt_auth_init_mocks(
- mock_network_layer,
- successful_token_response,
- jwt_algorithm,
- jwt_key_id,
- rsa_passphrase,
- enterprise_id,
- ) as params:
- with jwt_auth_auth_mocks(jti_length, jwt_algorithm, jwt_key_id, enterprise_id, 'enterprise', *params) as oauth:
- oauth.refresh(None)
+ with jwt_auth_init_and_auth_mocks(enterprise_id, 'enterprise', enterprise_id=enterprise_id) as oauth:
+ oauth.refresh(None)
diff --git a/test/unit/session/test_box_session.py b/test/unit/session/test_box_session.py
index 9b837af..ae6b24f 100644
--- a/test/unit/session/test_box_session.py
+++ b/test/unit/session/test_box_session.py
@@ -6,7 +6,7 @@
from io import IOBase
from numbers import Number
-from mock import MagicMock, Mock, call
+from mock import MagicMock, Mock, PropertyMock, call
import pytest
from boxsdk.auth.oauth2 import OAuth2
@@ -23,13 +23,26 @@ def translator(default_translator, request): # pylint:disable=unused-argument
@pytest.fixture
-def box_session(translator):
- mock_oauth = Mock(OAuth2)
- mock_oauth.access_token = 'fake_access_token'
+def initial_access_token():
+ return 'fake_access_token'
- mock_network_layer = Mock(DefaultNetwork)
- return BoxSession(mock_oauth, mock_network_layer, translator=translator)
[email protected]
+def mock_oauth(initial_access_token):
+ mock_oauth = MagicMock(OAuth2)
+ mock_oauth.access_token = initial_access_token
+ return mock_oauth
+
+
[email protected]
+def mock_network_layer():
+ return Mock(DefaultNetwork)
+
+
[email protected]
+def box_session(mock_oauth, mock_network_layer, translator):
+ # pylint:disable=redefined-outer-name
+ return BoxSession(oauth=mock_oauth, network_layer=mock_network_layer, translator=translator)
@pytest.mark.parametrize('test_method', [
@@ -42,18 +55,68 @@ def box_session(translator):
def test_box_session_handles_unauthorized_response(
test_method,
box_session,
+ mock_oauth,
+ mock_network_layer,
unauthorized_response,
generic_successful_response,
test_url,
):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
- mock_network_layer.request.side_effect = [unauthorized_response, generic_successful_response]
+ # pylint:disable=redefined-outer-name
+
+ def get_access_token_from_auth_object():
+ return mock_oauth.access_token
+
+ mock_network_layer.request.side_effect = mock_responses = [unauthorized_response, generic_successful_response]
+ for mock_response in mock_responses:
+ type(mock_response).access_token_used = PropertyMock(side_effect=get_access_token_from_auth_object)
+
+ def refresh(access_token_used):
+ assert access_token_used == mock_oauth.access_token
+ mock_oauth.access_token = 'fake_new_access_token'
+ return (mock_oauth.access_token, None)
+
+ mock_oauth.refresh.side_effect = refresh
box_response = test_method(box_session, url=test_url)
assert box_response.status_code == 200
[email protected]('test_method', [
+ BoxSession.get,
+ BoxSession.post,
+ BoxSession.put,
+ BoxSession.delete,
+ BoxSession.options,
+])
[email protected]('initial_access_token', [None])
+def test_box_session_gets_access_token_before_request(
+ test_method,
+ box_session,
+ mock_oauth,
+ mock_network_layer,
+ generic_successful_response,
+ test_url,
+):
+ # pylint:disable=redefined-outer-name
+
+ def get_access_token_from_auth_object():
+ return mock_oauth.access_token
+
+ mock_network_layer.request.side_effect = mock_responses = [generic_successful_response]
+ for mock_response in mock_responses:
+ type(mock_response).access_token_used = PropertyMock(side_effect=get_access_token_from_auth_object)
+
+ def refresh(access_token_used):
+ assert access_token_used == mock_oauth.access_token
+ mock_oauth.access_token = 'fake_new_access_token'
+ return (mock_oauth.access_token, None)
+
+ mock_oauth.refresh.side_effect = refresh
+
+ box_response = test_method(box_session, url=test_url, auto_session_renewal=True)
+ assert box_response.status_code == 200
+
+
@pytest.mark.parametrize('test_method', [
BoxSession.get,
BoxSession.post,
@@ -65,12 +128,12 @@ def test_box_session_handles_unauthorized_response(
def test_box_session_retries_response_after_retry_after(
test_method,
box_session,
+ mock_network_layer,
retry_after_response,
generic_successful_response,
test_url,
):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [retry_after_response, generic_successful_response]
mock_network_layer.retry_after.side_effect = lambda delay, request, *args, **kwargs: request(*args, **kwargs)
@@ -92,12 +155,12 @@ def test_box_session_retries_response_after_retry_after(
def test_box_session_retries_request_after_server_error(
test_method,
box_session,
+ mock_network_layer,
server_error_response,
generic_successful_response,
test_url,
):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [server_error_response, server_error_response, generic_successful_response]
mock_network_layer.retry_after.side_effect = lambda delay, request, *args, **kwargs: request(*args, **kwargs)
@@ -113,9 +176,8 @@ def test_box_session_retries_request_after_server_error(
assert mock_network_layer.retry_after.call_args_list[1][0][0] == 2
-def test_box_session_seeks_file_after_retry(box_session, server_error_response, generic_successful_response, test_url):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+def test_box_session_seeks_file_after_retry(box_session, mock_network_layer, server_error_response, generic_successful_response, test_url):
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [server_error_response, generic_successful_response]
mock_network_layer.retry_after.side_effect = lambda delay, request, *args, **kwargs: request(*args, **kwargs)
mock_file_1, mock_file_2 = MagicMock(IOBase), MagicMock(IOBase)
@@ -137,27 +199,24 @@ def test_box_session_seeks_file_after_retry(box_session, server_error_response,
assert mock_file_2.seek.has_calls(call(3) * 2)
-def test_box_session_raises_for_non_json_response(box_session, non_json_response, test_url):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+def test_box_session_raises_for_non_json_response(box_session, mock_network_layer, non_json_response, test_url):
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [non_json_response]
with pytest.raises(BoxAPIException):
box_session.get(url=test_url)
-def test_box_session_raises_for_failed_response(box_session, bad_network_response, test_url):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+def test_box_session_raises_for_failed_response(box_session, mock_network_layer, bad_network_response, test_url):
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [bad_network_response]
with pytest.raises(BoxAPIException):
box_session.get(url=test_url)
-def test_box_session_raises_for_failed_non_json_response(box_session, failed_non_json_response, test_url):
- # pylint:disable=redefined-outer-name, protected-access
- mock_network_layer = box_session._network_layer
+def test_box_session_raises_for_failed_non_json_response(box_session, mock_network_layer, failed_non_json_response, test_url):
+ # pylint:disable=redefined-outer-name
mock_network_layer.request.side_effect = [failed_non_json_response]
with pytest.raises(BoxAPIException):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"mock",
"sqlalchemy",
"bottle",
"jsonpatch"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | async-timeout==4.0.3
bottle==0.13.2
-e git+https://github.com/box/box-python-sdk.git@cc4921422bb070e5547c0c054c1d2c554380dba5#egg=boxsdk
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
execnet==2.0.2
greenlet==3.1.1
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
jsonpatch==1.33
jsonpointer==3.0.0
mock==5.2.0
packaging==24.0
pluggy==1.2.0
pycparser==2.21
PyJWT==2.8.0
pytest==7.4.4
pytest-xdist==3.5.0
redis==5.0.8
requests==2.31.0
requests-toolbelt==1.0.0
six==1.17.0
SQLAlchemy==2.0.40
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: box-python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- async-timeout==4.0.3
- bottle==0.13.2
- cffi==1.15.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- execnet==2.0.2
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jsonpatch==1.33
- jsonpointer==3.0.0
- mock==5.2.0
- packaging==24.0
- pluggy==1.2.0
- pycparser==2.21
- pyjwt==2.8.0
- pytest==7.4.4
- pytest-xdist==3.5.0
- redis==5.0.8
- requests==2.31.0
- requests-toolbelt==1.0.0
- six==1.17.0
- sqlalchemy==2.0.40
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/box-python-sdk
| [
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS256-strong_password-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_authenticates_with_user_if_enterprise_id_and_user_both_passed_to_constructor[RS512-strong_password-128]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS256-None-authenticate_user]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS256-None-authenticate_instance]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS256-strong_password-authenticate_user]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS256-strong_password-authenticate_instance]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS512-None-authenticate_user]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS512-None-authenticate_instance]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS512-strong_password-authenticate_user]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_raises_value_error_if_sub_was_never_given[RS512-strong_password-authenticate_instance]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_raises_type_error_if_user_is_unsupported_type[RS256-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_raises_type_error_if_user_is_unsupported_type[RS256-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_raises_type_error_if_user_is_unsupported_type[RS512-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_raises_type_error_if_user_is_unsupported_type[RS512-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given[RS256-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given[RS256-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given[RS512-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_raises_value_error_if_different_enterprise_id_is_given[RS512-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-16-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-32-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-None-128-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-16-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-32-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS256-strong_password-128-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-16-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-32-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-None-128-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-16-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-32-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-fake_user_id-False0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-fake_user_id-True0]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-fake_user_id-False1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-fake_user_id-True1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-user4-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_sends_post_request_with_correct_params[RS512-strong_password-128-user5-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-16-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-16-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-32-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-32-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-128-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-128-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-16-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-16-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-32-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-32-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-128-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-128-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-16-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-16-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-32-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-32-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-128-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-128-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-16-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-16-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-32-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-32-True-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-128-False-True]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-128-True-True]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS256-strong_password-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_app_user_sends_post_request_with_correct_params[RS512-strong_password-128]"
]
| [
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-16-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-16-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-32-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-32-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-128-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-None-128-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-16-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-16-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-32-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-32-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-128-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS256-strong_password-128-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-16-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-16-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-32-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-32-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-128-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-None-128-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-16-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-16-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-32-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-32-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-128-None]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_user_saves_user_id_for_future_calls[RS512-strong_password-128-fake_user_id_1]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-None-16]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-None-32]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-None-128]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS256-strong_password-128]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-None-16]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-None-32]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-None-128]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_saves_enterprise_id_for_future_calls[RS512-strong_password-128]",
"test/unit/session/test_box_session.py::test_box_session_seeks_file_after_retry[False-502]",
"test/unit/session/test_box_session.py::test_box_session_seeks_file_after_retry[False-503]",
"test/unit/session/test_box_session.py::test_box_session_seeks_file_after_retry[True-502]",
"test/unit/session/test_box_session.py::test_box_session_seeks_file_after_retry[True-503]"
]
| [
"test/unit/auth/test_jwt_auth.py::test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type[RS256-None]",
"test/unit/auth/test_jwt_auth.py::test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type[RS256-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type[RS512-None]",
"test/unit/auth/test_jwt_auth.py::test_jwt_auth_constructor_raises_type_error_if_user_is_unsupported_type[RS512-strong_password]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-16-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-32-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-None-128-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-16-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-32-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS256-strong_password-128-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-16-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-32-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-None-128-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-16-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-32-True-False]",
"test/unit/auth/test_jwt_auth.py::test_authenticate_instance_sends_post_request_with_correct_params[RS512-strong_password-128-True-False]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS256-strong_password-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-None-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-None-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-None-128]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-strong_password-16]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-strong_password-32]",
"test/unit/auth/test_jwt_auth.py::test_refresh_instance_sends_post_request_with_correct_params[RS512-strong_password-128]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[False-get]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[False-post]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[False-put]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[False-delete]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[False-options]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[True-get]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[True-post]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[True-put]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[True-delete]",
"test/unit/session/test_box_session.py::test_box_session_handles_unauthorized_response[True-options]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[False-None-get]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[False-None-post]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[False-None-put]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[False-None-delete]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[False-None-options]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[True-None-get]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[True-None-post]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[True-None-put]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[True-None-delete]",
"test/unit/session/test_box_session.py::test_box_session_gets_access_token_before_request[True-None-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-False-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[202-True-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-False-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_response_after_retry_after[429-True-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-502-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[False-503-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-502-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-get]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-post]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-put]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-delete]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-options]",
"test/unit/session/test_box_session.py::test_box_session_retries_request_after_server_error[True-503-test_method5]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_non_json_response[False]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_non_json_response[True]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_failed_response[False]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_failed_response[True]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_failed_non_json_response[False]",
"test/unit/session/test_box_session.py::test_box_session_raises_for_failed_non_json_response[True]",
"test/unit/session/test_box_session.py::test_box_response_properties_pass_through_to_network_response_properties",
"test/unit/session/test_box_session.py::test_translator[False]",
"test/unit/session/test_box_session.py::test_translator[True]"
]
| []
| Apache License 2.0 | 770 | [
"README.rst",
"boxsdk/auth/jwt_auth.py",
"requirements.txt",
"boxsdk/session/box_session.py"
]
| [
"README.rst",
"boxsdk/auth/jwt_auth.py",
"requirements.txt",
"boxsdk/session/box_session.py"
]
|
MediaMath__t1-python-98 | 21b85e3d1b11e51092514a212afcc8934b173a66 | 2016-09-22 15:46:37 | 21b85e3d1b11e51092514a212afcc8934b173a66 | diff --git a/terminalone/models/campaign.py b/terminalone/models/campaign.py
index a38d866..41e950c 100644
--- a/terminalone/models/campaign.py
+++ b/terminalone/models/campaign.py
@@ -18,9 +18,9 @@ class Campaign(Entity):
'advertiser', 'ad_server', 'currency', 'merit_pixel', 'time_zone',
}
_conv = t1types.enum({'every', 'one', 'variable'}, 'variable')
- _freq_ints = t1types.enum({'hour', 'day', 'week', 'month',
- 'not-applicable'}, 'not-applicable')
- _freq_types = t1types.enum({'even', 'asap', 'no-limit'}, 'no-limit')
+ _cap_ints = t1types.enum({'hour', 'day', 'week', 'month',
+ 'not-applicable'}, 'not-applicable')
+ _cap_types = t1types.enum({'even', 'asap', 'no-limit'}, 'no-limit')
_goal_cats = t1types.enum({'audience', 'engagement', 'response'}, None)
_goal_types = t1types.enum({'spend', 'reach', 'cpc', 'cpe', 'cpa', 'roi'},
None)
@@ -47,6 +47,9 @@ class Campaign(Entity):
'goal_value': float,
'has_custom_attribution': t1types.int_to_bool,
'id': int,
+ 'impression_cap_amount': int,
+ 'impression_cap_automatic': t1types.int_to_bool,
+ 'impression_cap_type': None,
'io_name': None,
'io_reference_num': None,
'initial_start_date': t1types.strpt,
@@ -58,12 +61,15 @@ class Campaign(Entity):
'pv_pct': float,
'pv_window_minutes': int,
'service_type': None,
+ 'source_campaign_id': int,
'spend_cap_amount': float,
'spend_cap_automatic': t1types.int_to_bool,
'spend_cap_enabled': t1types.int_to_bool,
+ 'spend_cap_type': None,
'start_date': t1types.strpt,
'status': t1types.int_to_bool,
'total_budget': float,
+ 'total_impression_budget': int,
'updated_on': t1types.strpt,
'use_default_ad_server': t1types.int_to_bool,
'use_mm_freq': t1types.int_to_bool,
@@ -75,15 +81,18 @@ class Campaign(Entity):
'conversion_type': _conv,
'dcs_data_is_campaign_level': int,
'end_date': t1types.strft,
- 'frequency_interval': _freq_ints,
- 'frequency_type': _freq_types,
+ 'frequency_interval': _cap_ints,
+ 'frequency_type': _cap_types,
'goal_category': _goal_cats,
'goal_type': _goal_types,
'has_custom_attribution': int,
+ 'impression_cap_automatic': int,
+ 'impression_cap_type': _cap_types,
'initial_start_date': t1types.strft,
'service_type': _serv_types,
'spend_cap_automatic': int,
'spend_cap_enabled': int,
+ 'spend_cap_type': _cap_types,
'start_date': t1types.strft,
'status': int,
'use_default_ad_server': int,
@@ -91,4 +100,51 @@ class Campaign(Entity):
})
def __init__(self, session, properties=None, **kwargs):
+ if properties is None:
+ # super(Entity) supers to grandparent
+ super(Entity, self).__setattr__('_init_sce', None)
+ super(Entity, self).__setattr__('_init_sct', None)
+ else:
+ super(Entity, self).__setattr__('_init_sce',
+ properties.get('spend_cap_enabled'))
+ super(Entity, self).__setattr__('_init_sct',
+ properties.get('spend_cap_type'))
super(Campaign, self).__init__(session, properties, **kwargs)
+
+ def _migration_asst(self):
+ """Helps migrate users to the new impression pacing features.
+
+ spend_cap_enabled is the old field. spend_cap_type is the new field.
+ If the user has changed:
+ - Nothing (final vals all equal): remove both fields
+ - Old (new vals equal): remove new fields, post old
+ - New (old vals equal): remove old fields, post new
+ - Both (no vals equal): UNDEFINED. remove old fields to prep.
+ """
+ i_sce, i_sct = self._init_sce, self._init_sct
+ f_sce, f_sct = (self.properties.get('spend_cap_enabled'),
+ self.properties.get('spend_cap_type'))
+
+ fields_to_remove = None
+ if i_sce == f_sce and i_sct == f_sct:
+ fields_to_remove = ['spend_cap_enabled', 'spend_cap_type']
+ elif i_sct == f_sct:
+ fields_to_remove = ['spend_cap_type']
+ else: # we don't need a second elif here because it's the same result
+ fields_to_remove = ['spend_cap_enabled']
+ return fields_to_remove
+
+ def save(self, data=None, url=None):
+ """Save object to T1 while accounting for old fields"""
+ if data is None:
+ data = self.properties.copy()
+
+ fields_to_remove = self._migration_asst()
+ for field in fields_to_remove:
+ data.pop(field, None)
+
+ super(Campaign, self).save(data=data, url=url)
+ # Re-set the fields so that if the same object get saved, we
+ # compare agains the re-initialized values
+ super(Entity, self).__setattr__('_init_sce', self.spend_cap_enabled)
+ super(Entity, self).__setattr__('_init_sct', self.spend_cap_type)
diff --git a/terminalone/models/strategy.py b/terminalone/models/strategy.py
index a9f8edf..0c356ed 100644
--- a/terminalone/models/strategy.py
+++ b/terminalone/models/strategy.py
@@ -20,10 +20,10 @@ class Strategy(Entity):
_relations = {
'campaign', 'currency', 'time_zone',
}
- _aud_seg_ops = t1types.enum({'AND', 'OR'}, 'OR')
- _freq_int = t1types.enum({'hour', 'day', 'week', 'month', 'campaign',
- 'not-applicable'}, 'not-applicable')
- _freq_type = t1types.enum({'even', 'asap', 'no-limit'}, 'no-limit')
+ _seg_incexc_ops = t1types.enum({'AND', 'OR'}, 'OR')
+ _pacing_ints = t1types.enum({'hour', 'day', 'week', 'month', 'campaign',
+ 'not-applicable'}, 'not-applicable')
+ _pacing_types = t1types.enum({'even', 'asap', 'no-limit'}, 'no-limit')
_goal_type = t1types.enum({'spend', 'reach', 'cpc', 'cpe', 'cpa', 'roi'},
'cpc')
_media_type = t1types.enum({'DISPLAY', 'VIDEO'}, 'DISPLAY')
@@ -42,18 +42,24 @@ class Strategy(Entity):
'budget': float,
'campaign_id': int,
'created_on': t1types.strpt,
+ 'currency_code': None,
'description': None,
'effective_goal_value': float,
'end_date': t1types.strpt,
'feature_compatibility': None,
'frequency_amount': int,
'frequency_interval': None,
+ 'frequency_optimization': t1types.int_to_bool,
'frequency_type': None,
'goal_type': None,
'goal_value': float,
'id': int,
'impression_cap': int,
+ 'impression_pacing_amount': int,
+ 'impression_pacing_interval': None,
+ 'impression_pacing_type': None,
'max_bid': float,
+ 'max_bid_wm': float,
'media_type': None,
'name': None,
'pacing_amount': float,
@@ -71,6 +77,8 @@ class Strategy(Entity):
'start_date': t1types.strpt,
'status': t1types.int_to_bool,
'supply_type': None,
+ 'targeting_segment_exclude_op': None,
+ 'targeting_segment_include_op': None,
'type': None,
'updated_on': t1types.strpt,
'use_campaign_end': t1types.int_to_bool,
@@ -82,13 +90,16 @@ class Strategy(Entity):
}
_push = _pull.copy()
_push.update({
- 'audience_segment_exclude_op': _aud_seg_ops,
- 'audience_segment_include_op': _aud_seg_ops,
+ 'audience_segment_exclude_op': _seg_incexc_ops,
+ 'audience_segment_include_op': _seg_incexc_ops,
'bid_price_is_media_only': int,
'end_date': partial(t1types.strft, null_on_none=True),
- 'frequency_interval': _freq_int,
- 'frequency_type': _freq_type,
+ 'frequency_interval': _pacing_ints,
+ 'frequency_optimization': int,
+ 'frequency_type': _pacing_types,
'goal_type': _goal_type,
+ 'impression_pacing_interval': _pacing_ints,
+ 'impression_pacing_type': _pacing_types,
'media_type': _media_type,
'pacing_interval': _pac_int,
'pacing_type': _pac_type,
@@ -102,6 +113,8 @@ class Strategy(Entity):
'start_date': partial(t1types.strft, null_on_none=True),
'status': int,
'supply_type': _supply_type,
+ 'targeting_segment_exclude_op': _seg_incexc_ops,
+ 'targeting_segment_include_op': _seg_incexc_ops,
'type': _type,
'use_campaign_end': int,
'use_campaign_start': int,
@@ -112,6 +125,17 @@ class Strategy(Entity):
_readonly = Entity._readonly | {'effective_goal_value', 'zone_name'}
def __init__(self, session, properties=None, **kwargs):
+ if properties is None:
+ # super(Entity) supers to grandparent
+ super(Entity, self).__setattr__('_init_impcap', None)
+ super(Entity, self).__setattr__('_init_imppac', None)
+ else:
+ super(Entity, self).__setattr__('_init_impcap',
+ properties.get('impression_cap'))
+ super(Entity, self).__setattr__('_init_imppac',
+ (properties.get('impression_pacing_type'),
+ properties.get('impression_pacing_amount'),
+ properties.get('impression_pacing_interval')))
super(Strategy, self).__init__(session, properties, **kwargs)
try:
self.pixel_target_expr
@@ -148,6 +172,35 @@ class Strategy(Entity):
},
}
+ def _migration_asst(self):
+ """Helps migrate users to the new impression pacing features.
+
+ impression_cap is the old field. impression pacing comprise the new.
+ If the user has changed:
+ - Nothing (final vals all equal): remove both fields
+ - Old (new vals equal): remove new fields, post old
+ - New (old vals equal): remove old fields, post new
+ - Both (no vals equal): UNDEFINED. remove old fields to prep.
+ """
+ new_fields = ['impression_pacing_type',
+ 'impression_pacing_amount',
+ 'impression_pacing_interval']
+ i_cap, i_pac = self._init_impcap, self._init_imppac
+ f_cap, f_pac = (self.properties.get('impression_cap'),
+ (self.properties.get('impression_pacing_type'),
+ self.properties.get('impression_pacing_amount'),
+ self.properties.get('impression_pacing_interval')))
+
+ fields_to_remove = None
+ if i_cap == f_cap and i_pac == f_pac:
+ fields_to_remove = ['impression_cap']
+ fields_to_remove.extend(new_fields)
+ elif i_pac == f_pac:
+ fields_to_remove = new_fields
+ else: # we don't need a second elif here because it's the same result
+ fields_to_remove = ['impression_cap']
+ return fields_to_remove
+
def save_supplies(self, data):
url = self._construct_url(addl=['supplies', ])
entity, _ = super(Strategy, self)._post(PATHS['mgmt'], url, data)
@@ -189,12 +242,16 @@ class Strategy(Entity):
return include_string + exclude_string
def save(self, data=None, url=None):
-
+ """Save object to T1 accounting for fields an pixel target expr"""
if data is None:
data = self.properties.copy()
data['pixel_target_expr'] = self._serialize_target_expr()
+ fields_to_remove = self._migration_asst()
+ for field in fields_to_remove:
+ data.pop(field, None)
+
if getattr(self, 'use_campaign_start', False) and 'start_date' in data:
self.properties.pop('start_date', None)
data['start_date'] = None
@@ -204,7 +261,14 @@ class Strategy(Entity):
super(Strategy, self).save(data=data, url=url)
+ # Re-set the fields so that if the same object get saved, we
+ # compare agains the re-initialized values
self._deserialize_target_expr()
+ super(Entity, self).__setattr__('_init_impcap', self.impression_cap)
+ super(Entity, self).__setattr__('_init_imppac',
+ (self.impression_pacing_type,
+ self.impression_pacing_amount,
+ self.impression_pacing_interval))
@property
def pixel_target_expr_string(self):
diff --git a/terminalone/vendor/six.py b/terminalone/vendor/six.py
index 21b0e80..190c023 100644
--- a/terminalone/vendor/six.py
+++ b/terminalone/vendor/six.py
@@ -1,6 +1,6 @@
"""Utilities for writing code that runs on Python 2 and 3"""
-# Copyright (c) 2010-2014 Benjamin Peterson
+# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -23,17 +23,19 @@
from __future__ import absolute_import
import functools
+import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
-__version__ = "1.8.0"
+__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
@@ -56,6 +58,7 @@ else:
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
+
def __len__(self):
return 1 << 31
try:
@@ -87,9 +90,13 @@ class _LazyDescr(object):
def __get__(self, obj, tp):
result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- # This is a bit ugly, but it avoids running this again.
- delattr(obj.__class__, self.name)
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
return result
@@ -155,12 +162,14 @@ class MovedAttribute(_LazyDescr):
class _SixMetaPathImporter(object):
+
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
+
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
@@ -218,6 +227,7 @@ _importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
+
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
@@ -229,8 +239,10 @@ _moved_attributes = [
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
@@ -240,7 +252,6 @@ _moved_attributes = [
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
-
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
@@ -287,8 +298,13 @@ _moved_attributes = [
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
- MovedModule("winreg", "_winreg"),
]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
@@ -302,6 +318,7 @@ _importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_parse"""
@@ -341,6 +358,7 @@ _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_pa
class Module_six_moves_urllib_error(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_error"""
@@ -360,6 +378,7 @@ _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.er
class Module_six_moves_urllib_request(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_request"""
@@ -409,6 +428,7 @@ _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.
class Module_six_moves_urllib_response(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_response"""
@@ -429,6 +449,7 @@ _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib
class Module_six_moves_urllib_robotparser(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
@@ -446,6 +467,7 @@ _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.url
class Module_six_moves_urllib(types.ModuleType):
+
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
@@ -516,6 +538,9 @@ if PY3:
create_bound_method = types.MethodType
+ def create_unbound_method(func, cls):
+ return func
+
Iterator = object
else:
def get_unbound_function(unbound):
@@ -524,6 +549,9 @@ else:
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
class Iterator(object):
def next(self):
@@ -554,18 +582,30 @@ if PY3:
def iterlists(d, **kw):
return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
- return iter(d.iterkeys(**kw))
+ return d.iterkeys(**kw)
def itervalues(d, **kw):
- return iter(d.itervalues(**kw))
+ return d.itervalues(**kw)
def iteritems(d, **kw):
- return iter(d.iteritems(**kw))
+ return d.iteritems(**kw)
def iterlists(d, **kw):
- return iter(d.iterlists(**kw))
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
@@ -578,45 +618,66 @@ _add_doc(iterlists,
if PY3:
def b(s):
return s.encode("latin-1")
+
def u(s):
return s
unichr = chr
- if sys.version_info[1] <= 1:
- def int2byte(i):
- return bytes((i,))
- else:
- # This is about 2x faster than the implementation above on 3.2+
- int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
+
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
+
def byte2int(bs):
return ord(bs[0])
+
def indexbytes(buf, i):
return ord(buf[i])
- def iterbytes(buf):
- return (ord(byte) for byte in buf)
+ iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
-
def reraise(tp, value, tb=None):
if value is None:
value = tp()
@@ -637,12 +698,26 @@ else:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
-
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
@@ -650,13 +725,14 @@ if print_ is None:
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
+
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
+ isinstance(data, unicode) and
+ fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
@@ -697,6 +773,15 @@ if print_ is None:
write(sep)
write(arg)
write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
_add_doc(reraise, """Reraise an exception.""")
@@ -704,19 +789,21 @@ if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
- f = functools.wraps(wrapped)(f)
+ f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
+
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
+
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
@@ -737,6 +824,25 @@ def add_metaclass(metaclass):
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
@@ -754,7 +860,7 @@ if sys.meta_path:
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
+ importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
| Impression Pacing New Fields
**Handling of Posts With ONLY Old Paradigm Fields**
We have introduced logic in our API such that new posts to Old Paradigm fields are correctly translated into their New Paradigm counterparts, provided that the posts do not also include New Paradigm fields. The table below outlines the translation.
**Handling of Posts With ONLY New Paradigm Fields**
Posts to the New Paradigm fields that do not include any Old Paradigm fields will be accepted normally.
**Handling of Posts With Both New AND Old Paradigm Fields**
We strongly advise against submitting posts that include both New and Old Paradigm fields because it will be possible to create a conflict between the two Paradigms that will be resolved in a way that may or may not align with the user’s intent.
Currently, new impression pacing fields are not in POST data because they are not in the Strategy model's _pull list.
<img width="637" alt="screen shot 2016-09-21 at 4 55 24 pm" src="https://cloud.githubusercontent.com/assets/4910009/18729102/544200b0-801e-11e6-950f-f5a4eb7584c0.png">
| MediaMath/t1-python | diff --git a/tests/test_deprecated_fields.py b/tests/test_deprecated_fields.py
new file mode 100644
index 0000000..1f5123d
--- /dev/null
+++ b/tests/test_deprecated_fields.py
@@ -0,0 +1,85 @@
+from __future__ import absolute_import
+import unittest
+from terminalone.models import campaign
+from terminalone.models import strategy
+from terminalone.vendor import six
+
+
+class TestRemoveDeprecatedFields(unittest.TestCase):
+ """Tests for removing deprecated fields on save from strategy and campaign entities"""
+
+ def setUp(self):
+ mock_campaign_properties = {
+ "spend_cap_enabled": False,
+ "spend_cap_type": "no-limit",
+ "spend_cap_automatic": True
+
+ }
+ mock_strategy_properties = {
+ "impression_pacing_interval": "day",
+ "impression_pacing_type": "no-limit",
+ "impression_pacing_amount": 10,
+ "impression_cap": 10,
+
+ }
+ self.campaign = campaign.Campaign(None, mock_campaign_properties)
+ self.strategy = strategy.Strategy(None, mock_strategy_properties)
+
+ def test_campaign_remove_both_fields_when_no_changes(self):
+ fields_to_remove = self.campaign._migration_asst()
+ expected = ['spend_cap_enabled', 'spend_cap_type']
+ self.assertEqual(expected, fields_to_remove)
+
+ def test_campaign_remove_new_field_when_old_changed(self):
+ self.campaign.spend_cap_enabled = True
+
+ fields_to_remove = self.campaign._migration_asst()
+ expected = ['spend_cap_type']
+ self.assertEqual(expected, fields_to_remove)
+
+ def test_campaign_remove_old_field_when_new_changed(self):
+ self.campaign.spend_cap_type = 'derp'
+
+ fields_to_remove = self.campaign._migration_asst()
+ expected = ['spend_cap_enabled']
+ self.assertEqual(expected, fields_to_remove)
+
+ def test_campaign_remove_old_field_when_both_changed(self):
+ self.campaign.spend_cap_type = 'derp'
+ self.campaign.spend_cap_enabled = True
+
+ fields_to_remove = self.campaign._migration_asst()
+ expected = ['spend_cap_enabled']
+ self.assertEqual(expected, fields_to_remove)
+
+ def test_strategy_remove_all_fields_when_no_changes(self):
+ fields_to_remove = self.strategy._migration_asst()
+ expected = ['impression_pacing_interval', 'impression_pacing_type', 'impression_pacing_amount',
+ 'impression_cap']
+ six.assertCountEqual(self, expected, fields_to_remove)
+
+ def test_strategy_remove_new_fields_when_old_changed(self):
+ self.strategy.impression_cap = 1
+
+ fields_to_remove = self.strategy._migration_asst()
+ expected = ['impression_pacing_interval', 'impression_pacing_type', 'impression_pacing_amount']
+ six.assertCountEqual(self, expected, fields_to_remove)
+
+ def test_strategy_remove_old_fields_when_new_changed(self):
+ self.strategy.impression_pacing_interval = 'derp'
+ self.strategy.impression_pacing_type = 'derp'
+ self.strategy.impression_pacing_amount = 1
+
+ fields_to_remove = self.strategy._migration_asst()
+ expected = ['impression_cap']
+ six.assertCountEqual(self, expected, fields_to_remove)
+
+ def test_strategy_remove_old_fields_when_all_changed(self):
+ self.strategy.impression_pacing_interval = 'derp'
+ self.strategy.impression_pacing_type = 'derp'
+ self.strategy.impression_pacing_amount = 1
+ self.strategy.impression_cap = 1
+
+ fields_to_remove = self.strategy._migration_asst()
+ expected = ['impression_cap']
+ six.assertCountEqual(self, expected, fields_to_remove)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"responses",
"pep8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
nose==1.3.7
oauthlib==3.2.2
packaging==24.2
pep8==1.7.1
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
requests-oauthlib==2.0.0
responses==0.25.7
six==1.8.0
-e git+https://github.com/MediaMath/t1-python.git@21b85e3d1b11e51092514a212afcc8934b173a66#egg=TerminalOne
tomli==2.2.1
urllib3==2.3.0
| name: t1-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- nose==1.3.7
- oauthlib==3.2.2
- packaging==24.2
- pep8==1.7.1
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- requests-oauthlib==2.0.0
- responses==0.25.7
- six==1.8.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/t1-python
| [
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_campaign_remove_both_fields_when_no_changes",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_campaign_remove_new_field_when_old_changed",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_campaign_remove_old_field_when_both_changed",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_campaign_remove_old_field_when_new_changed",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_strategy_remove_all_fields_when_no_changes",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_strategy_remove_new_fields_when_old_changed",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_strategy_remove_old_fields_when_all_changed",
"tests/test_deprecated_fields.py::TestRemoveDeprecatedFields::test_strategy_remove_old_fields_when_new_changed"
]
| []
| []
| []
| Apache License 2.0 | 771 | [
"terminalone/vendor/six.py",
"terminalone/models/campaign.py",
"terminalone/models/strategy.py"
]
| [
"terminalone/vendor/six.py",
"terminalone/models/campaign.py",
"terminalone/models/strategy.py"
]
|
|
quentin7b__xee-sdk-python-28 | 01fe40c84a7cc3f558c8f7ba07b4b35d61b406ce | 2016-09-22 17:39:22 | b37ecd24b1e46f96baeee2592042487671f86293 | diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..b97b554
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,8 @@
+language: python
+python:
+ - "2.7"
+ - "3.5"
+# command to install dependencies
+install: make install
+# command to run tests
+script: make test
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..5b01bce
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,23 @@
+.PHONY: init lint test coverage
+
+install:
+ pip install -r requirements.txt
+
+lint:
+ pylint xee --output-format=html > lint.html || true
+ open lint.html
+
+test:
+ pip install -r test/requirements.txt
+ python -m unittest test.test_sdk
+
+coverage:
+ coverage run -m test.test_sdk discover
+ coverage report -m
+ coverage html
+ open htmlcov/index.html
+
+clean:
+ coverage erase
+ rm lint.html || true
+ rm -rf **/**.pyc
diff --git a/README.md b/README.md
index 9cb3622..153b283 100644
--- a/README.md
+++ b/README.md
@@ -37,35 +37,40 @@ login_url = xee.get_authentication_url()
#### Getting a [token from an `authorization_code`](https://github.com/xee-lab/xee-api-docs/tree/master/api/api/v3/auth/access_token.md)
```python
-token , error = xee.get_token_from_code(authorization_code)
+token, error = xee.get_token_from_code(authorization_code)
```
#### Getting a [token from an `refresh_token`](https://github.com/xee-lab/xee-api-docs/tree/master/api/api/v3/auth/access_token.md)
```python
-token , error = xee.get_token_from_refresh_token(token.refresh_token)
+token, error = xee.get_token_from_refresh_token(token.refresh_token)
```
### Requests
As simple as
```python
-user , error = xee.get_user(token.access_token)
+user, error = xee.get_user(token.access_token)
print(user.id)
```
Others examples:
```python
-status , error = xee.get_status(carId,token.access_token)
+status, error = xee.get_status(carId, token.access_token)
print(status)
```
```python
-signal , error = xee.get_signals(carId,token.access_token,names=['Odometer', 'FuelLevel'])
+signal, error = xee.get_signals(carId, token.access_token,names=['Odometer', 'FuelLevel'])
print(signal)
```
+```python
+trip_duration, error = xee.get_trip_duration(tripId, token.access_token)
+print(trip_duration.value)
+```
+
See the [docs](https://github.com/quentin7b/xee-sdk-python/docs) for more about how to use it
## Contributing
diff --git a/xee/entities.py b/xee/entities.py
index 93606c7..7f6b06b 100644
--- a/xee/entities.py
+++ b/xee/entities.py
@@ -26,7 +26,7 @@ User = collections.namedtuple(
'nick_name',
'gender',
'birth_date',
- 'license_delivery_date',
+ 'licence_delivery_date',
'role',
'is_location_enabled'
])
@@ -90,8 +90,7 @@ UsedTimeStat = collections.namedtuple(
'end_date',
'type',
'value'
- ]
-)
+ ])
MileageStat = collections.namedtuple(
'MileageStat',
[
@@ -99,8 +98,13 @@ MileageStat = collections.namedtuple(
'end_date',
'type',
'value'
- ]
-)
+ ])
+TripStat = collections.namedtuple(
+ 'TripStat',
+ [
+ 'type',
+ 'value'
+ ])
# Parsers
@@ -161,9 +165,9 @@ def parse_user(user):
birth_date = None
if user['birthDate']:
birth_date = isodate.parse_datetime(user['birthDate'])
- license_delivery_date = None
+ licence_delivery_date = None
if user['licenseDeliveryDate']:
- license_delivery_date = isodate.parse_datetime(user['license_delivery_date'])
+ licence_delivery_date = isodate.parse_datetime(user['licenseDeliveryDate'])
try:
return User(
user['id'],
@@ -172,7 +176,7 @@ def parse_user(user):
user['nickName'],
user['gender'],
birth_date,
- license_delivery_date,
+ licence_delivery_date,
user['role'],
user['isLocationEnabled']
)
@@ -303,11 +307,21 @@ def parse_status(status):
"""
try:
- accelerometer = status['accelerometer']
+ accelerometer = None
+ if 'accelerometer' in status:
+ accelerometer_dict = status['accelerometer']
+ if accelerometer_dict:
+ accelerometer = Accelerometer(accelerometer_dict['x'], accelerometer_dict['y'],
+ accelerometer_dict['z'],
+ isodate.parse_datetime(accelerometer_dict['date']))
+ location = None
+ if 'location' in status:
+ location_dict = status['location']
+ if location_dict:
+ location = parse_location(location_dict)
return Status(
- parse_location(status['location']),
- Accelerometer(accelerometer['x'], accelerometer['y'], accelerometer['z'],
- isodate.parse_datetime(accelerometer['date'])),
+ location,
+ accelerometer,
[parse_signal(signal) for signal in status['signals']]
)
except ValueError as err:
@@ -390,7 +404,7 @@ def parse_trip(trip):
Returns
-------
tuple
- A namedtuple containing trip stat info.
+ A namedtuple containing trip info.
The error is None if everything went fine.
Raises
@@ -409,3 +423,33 @@ def parse_trip(trip):
)
except ValueError as err:
raise xee_exceptions.ParseException(err)
+
+
+def parse_trip_stat(trip_stat):
+ """
+ Parse a trip stat from a a dict representation.
+
+ Parameters
+ ----------
+ trip_stat : dict
+ The trip stat as a dict.
+
+ Returns
+ -------
+ tuple
+ A namedtuple containing trip stat info.
+ The error is None if everything went fine.
+
+ Raises
+ ------
+ ValueError
+ If the dict does not contains the correct data.
+
+ """
+ try:
+ return TripStat(
+ trip_stat['type'],
+ trip_stat['value']
+ )
+ except ValueError as err:
+ raise xee_exceptions.ParseException(err)
diff --git a/xee/sdk.py b/xee/sdk.py
index 3d9ae0d..824435b 100644
--- a/xee/sdk.py
+++ b/xee/sdk.py
@@ -2,7 +2,10 @@
# coding: utf8
"""This script contains the Xee python SDK"""
-import urllib.parse
+try:
+ import urllib.parse as url_parser
+except ImportError:
+ import urllib as url_parser
import isodate
import requests
@@ -57,9 +60,9 @@ class Xee(object):
"""
route = '{host}/auth/auth'.format(host=self.host)
if state is None:
- query_params = urllib.parse.urlencode({'client_id': self.client_id})
+ query_params = url_parser.urlencode({'client_id': self.client_id})
else:
- query_params = urllib.parse.urlencode({'client_id': self.client_id, 'state': state})
+ query_params = url_parser.urlencode({'client_id': self.client_id, 'state': state})
return '{route}?{params}'.format(route=route, params=query_params)
def get_token_from_code(self, code):
@@ -257,7 +260,7 @@ class Xee(object):
if options.get('names', None) is not None:
params['name'] = ','.join(options['names'])
if bool(params):
- route = '?'.join([route, urllib.parse.urlencode(params)])
+ route = '?'.join([route, url_parser.urlencode(params)])
try:
response = xee_utils.do_get_request(route, access_token)
return [xee_entities.parse_signal(signal) for signal in response], None
@@ -303,7 +306,7 @@ class Xee(object):
if options.get('end', None) is not None:
params['end'] = isodate.datetime_isoformat(options['end'])
if bool(params):
- route = '?'.join([route, urllib.parse.urlencode(params)])
+ route = '?'.join([route, url_parser.urlencode(params)])
try:
response = xee_utils.do_get_request(route, access_token)
return [xee_entities.parse_location(location) for location in response], None
@@ -344,7 +347,7 @@ class Xee(object):
if end is not None:
params['end'] = isodate.datetime_isoformat(end)
if bool(params):
- route = '?'.join([route, urllib.parse.urlencode(params)])
+ route = '?'.join([route, url_parser.urlencode(params)])
try:
response = xee_utils.do_get_request(route, access_token)
return [xee_entities.parse_trip(trip) for trip in response], None
@@ -390,7 +393,7 @@ class Xee(object):
if options.get('initial_value', None) is not None:
params['initialValue'] = int(options.get('initial_value'))
if bool(params):
- route = '?'.join([route, urllib.parse.urlencode(params)])
+ route = '?'.join([route, url_parser.urlencode(params)])
try:
response = xee_utils.do_get_request(route, access_token)
return xee_entities.parse_used_time(response), None
@@ -433,7 +436,7 @@ class Xee(object):
if options.get('initial_value', None) is not None:
params['initialValue'] = float(options.get('initial_value'))
if bool(params):
- route = '?'.join([route, urllib.parse.urlencode(params)])
+ route = '?'.join([route, url_parser.urlencode(params)])
try:
response = xee_utils.do_get_request(route, access_token)
return xee_entities.parse_mileage(response), None
@@ -492,7 +495,7 @@ class Xee(object):
if names is not None:
params['name'] = ','.join(names)
if bool(params):
- route = '{route}?{params}'.format(route=route, params=urllib.parse.urlencode(params))
+ route = '{route}?{params}'.format(route=route, params=url_parser.urlencode(params))
try:
response = xee_utils.do_get_request(route, access_token)
signals = [xee_entities.parse_signal(signal) for signal in response]
@@ -531,3 +534,84 @@ class Xee(object):
return [], None
except (xee_exceptions.APIException, xee_exceptions.ParseException) as err:
return None, err
+
+ def get_trip_stats(self, trip_id, access_token):
+ """
+ Fetch a list of stats for a specific trip.
+
+ Parameters
+ ----------
+ trip_id : str
+ the id of the trip you are looking for the stats.
+ access_token : str
+ the access token of the user.
+
+ Returns
+ -------
+ tuple
+ A tuple containing [TripStat], Error.
+ The error is None if everything went fine.
+
+ """
+ route = '{host}/trips/{trip_id}/stats'.format(host=self.host, trip_id=trip_id)
+ try:
+ response = xee_utils.do_get_request(route, access_token)
+ stats = [xee_entities.parse_trip_stat(stat) for stat in response]
+ return stats, None
+ except ValueError:
+ # Happens when the stats list is empty
+ return [], None
+ except (xee_exceptions.APIException, xee_exceptions.ParseException) as err:
+ return None, err
+
+ def get_trip_mileage(self, trip_id, access_token):
+ """
+ Fetch trip mileage stat.
+
+ Parameters
+ ----------
+ trip_id : str
+ the id of the trip you are looking for the mileage.
+ access_token : str
+ the access token of the user.
+
+ Returns
+ -------
+ tuple
+ A tuple containing TripStat, Error.
+ The error is None if everything went fine.
+
+ """
+ route = '{host}/trips/{trip_id}/stats/mileage'.format(host=self.host, trip_id=trip_id)
+ try:
+ response = xee_utils.do_get_request(route, access_token)
+ mileage = xee_entities.parse_trip_stat(response)
+ return mileage, None
+ except (xee_exceptions.APIException, xee_exceptions.ParseException) as err:
+ return None, err
+
+ def get_trip_duration(self, trip_id, access_token):
+ """
+ Fetch trip duration stat.
+
+ Parameters
+ ----------
+ trip_id : str
+ the id of the trip you are looking for the duration.
+ access_token : str
+ the access token of the user.
+
+ Returns
+ -------
+ tuple
+ A tuple containing TripStat, Error.
+ The error is None if everything went fine.
+
+ """
+ route = '{host}/trips/{trip_id}/stats/usedtime'.format(host=self.host, trip_id=trip_id)
+ try:
+ response = xee_utils.do_get_request(route, access_token)
+ used_time = xee_entities.parse_trip_stat(response)
+ return used_time, None
+ except (xee_exceptions.APIException, xee_exceptions.ParseException) as err:
+ return None, err
diff --git a/xee/version.py b/xee/version.py
index bac0fcf..3032e4a 100644
--- a/xee/version.py
+++ b/xee/version.py
@@ -4,4 +4,4 @@
This package the version of the module
"""
-__version__ = "3.0.1"
+__version__ = "3.0.2"
| [User] Licence for driver but License in API
The API returns `licenseDeliveryDate` but this is a `licence`
Parser fails | quentin7b/xee-sdk-python | diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..3e70622
--- /dev/null
+++ b/test/__init__.py
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+# coding: utf8
+"""This package contains nothing !"""
+from test.test_sdk import *
\ No newline at end of file
diff --git a/test/requirements.txt b/test/requirements.txt
new file mode 100644
index 0000000..adc158d
--- /dev/null
+++ b/test/requirements.txt
@@ -0,0 +1,3 @@
+responses
+pytz
+isodate
\ No newline at end of file
diff --git a/test/test_sdk.py b/test/test_sdk.py
new file mode 100644
index 0000000..3790135
--- /dev/null
+++ b/test/test_sdk.py
@@ -0,0 +1,659 @@
+#!/usr/bin/env python
+# coding: utf8
+import unittest
+
+import responses
+import pytz
+
+from xee.exceptions import APIException
+from xee.sdk import Xee
+from datetime import datetime
+
+xee = Xee('toto', 'tata', 'tut')
+host = xee.host
+
+
+class TestAuthFromAuthorizationCode(unittest.TestCase):
+ @responses.activate
+ def test_access_token_from_authorization_code_ok(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/auth/access_token.md
+ responses.add(responses.POST, host + "/auth/access_token",
+ json={
+ "access_token": "22fe0c13e995da4a44a63a7ff549badb5d337a42bf80f17424482e35d4cca91a",
+ "expires_at": 1382962374,
+ "expires_in": 3600,
+ "refresh_token": "8eb667707535655f2d9e14fc6491a59f6e06f2e73170761259907d8de186b6a1",
+ "token_type": "bearer"
+ },
+ status=200)
+ token, err = xee.get_token_from_code("fake_code")
+ self.assertEqual(token.access_token,
+ '22fe0c13e995da4a44a63a7ff549badb5d337a42bf80f17424482e35d4cca91a')
+ self.assertEqual(token.refresh_token,
+ '8eb667707535655f2d9e14fc6491a59f6e06f2e73170761259907d8de186b6a1')
+
+
+class TestAuthFromRefreshToken(unittest.TestCase):
+ @responses.activate
+ def test_access_token_from_refresh_token_ok(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/auth/access_token.md
+ responses.add(responses.POST, host + "/auth/access_token",
+ json={
+ "access_token": "22fe0c13e995da4a44a63a7ff549badb5d337a42bf80f17424482e35d4cca91a",
+ "expires_at": 1382962374,
+ "expires_in": 3600,
+ "refresh_token": "8eb667707535655f2d9e14fc6491a59f6e06f2e73170761259907d8de186b6a1",
+ "token_type": "bearer"
+ },
+ status=200)
+ token, err = xee.get_token_from_refresh_token("fake_refresh_token")
+ self.assertEqual(token.access_token,
+ '22fe0c13e995da4a44a63a7ff549badb5d337a42bf80f17424482e35d4cca91a')
+ self.assertEqual(token.refresh_token,
+ '8eb667707535655f2d9e14fc6491a59f6e06f2e73170761259907d8de186b6a1')
+
+
+class TestUser(unittest.TestCase):
+ @responses.activate
+ def test_get_user_ok(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/users/me.md
+ responses.add(responses.GET, host + "/users/me",
+ json={
+ "id": 42,
+ "lastName": "Doe",
+ "firstName": "John",
+ "nickName": "Johny",
+ "gender": "MALE",
+ "birthDate": "2016-01-11T00:00:00+00:00",
+ "licenseDeliveryDate": "2014-08-13T00:00:00+00:00",
+ "role": "dev",
+ "isLocationEnabled": True,
+ "creationDate": "2014-08-13T15:20:58+00:00",
+ "lastUpdateDate": "2016-02-12T09:07:47+00:00",
+ },
+ status=200)
+ user, err = xee.get_user("fake_access_token")
+ self.assertEqual(user.id, 42)
+ self.assertEqual(user.last_name, 'Doe')
+ self.assertEqual(user.first_name, 'John')
+ self.assertEqual(user.nick_name, 'Johny')
+ self.assertEqual(user.gender, 'MALE')
+ self.assertEqual(user.birth_date, datetime(2016, 1, 11, 0, 0, 0, tzinfo=pytz.utc))
+ self.assertEqual(user.licence_delivery_date,
+ datetime(2014, 8, 13, 0, 0, 0, tzinfo=pytz.utc))
+ self.assertEqual(user.role, 'dev')
+ self.assertEqual(user.is_location_enabled, True)
+
+ @responses.activate
+ def test_get_user_403(self):
+ responses.add(responses.GET, host + "/users/me",
+ json=
+ [
+ {
+ 'type': 'AUTHORIZATION_ERROR',
+ 'message': "Token does not have the required scope",
+ 'tip': "Add the users_read scope to your app scopes and reconnect the user"
+ }
+ ],
+ status=403)
+ user, err = xee.get_user("oops")
+ self.assertIsNone(user)
+ self.assertEqual(err, APIException(
+ 'AUTHORIZATION_ERROR',
+ "Token does not have the required scope",
+ "Add the users_read scope to your app scopes and reconnect the user"))
+
+
+class TestCars(unittest.TestCase):
+ @responses.activate
+ def test_get_cars_list_ok(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/users/me.md
+ responses.add(responses.GET, host + "/users/me/cars",
+ json=[
+ {
+ "id": 1337,
+ "name": "Mark-42",
+ "make": "Mark",
+ "model": "42",
+ "year": 2014,
+ "numberPlate": "M-42-TS",
+ "deviceId": "E133742015",
+ "cardbId": 210,
+ "creationDate": "2014-09-23T12:49:48+00:00",
+ "lastUpdateDate": "2016-02-19T08:41:58+00:00"
+ }
+ ],
+ status=200)
+ cars, err = xee.get_cars("fake_access_token")
+ self.assertEqual(len(cars), 1)
+ self.assertEqual(cars[0].id, 1337)
+ self.assertEqual(cars[0].name, 'Mark-42')
+ self.assertEqual(cars[0].make, 'Mark')
+ self.assertEqual(cars[0].model, '42')
+ self.assertEqual(cars[0].year, 2014)
+ self.assertEqual(cars[0].number_plate, 'M-42-TS')
+ self.assertEqual(cars[0].device_id, 'E133742015')
+ self.assertEqual(cars[0].cardb_id, 210)
+
+ @responses.activate
+ def test_get_cars_list_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/users/me/cars.md
+ responses.add(responses.GET, host + "/users/me/cars",
+ json=[],
+ status=200)
+ cars, err = xee.get_cars("fake_access_token")
+ expected = []
+ self.assertListEqual(cars, expected)
+
+ @responses.activate
+ def test_get_car(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/car_id.md
+ responses.add(responses.GET, host + "/cars/1337",
+ json={
+ "id": 1337,
+ "name": "Mark-42",
+ "make": "Mark",
+ "model": "42",
+ "year": 2014,
+ "numberPlate": "M-42-TS",
+ "deviceId": "E133742015",
+ "cardbId": 210,
+ "creationDate": "2014-09-23T12:49:48+00:00",
+ "lastUpdateDate": "2016-02-19T08:41:58+00:00"
+ },
+ status=200)
+ car, err = xee.get_car(1337, "fake_access_token")
+ self.assertEqual(car.id, 1337)
+ self.assertEqual(car.name, 'Mark-42')
+ self.assertEqual(car.make, 'Mark')
+ self.assertEqual(car.model, '42')
+ self.assertEqual(car.year, 2014)
+ self.assertEqual(car.number_plate, 'M-42-TS')
+ self.assertEqual(car.device_id, 'E133742015')
+ self.assertEqual(car.cardb_id, 210)
+
+ @responses.activate
+ def test_get_cars_scope_403(self):
+ responses.add(responses.GET, host + "/users/me/cars",
+ json=
+ [
+ {
+ 'type': 'AUTHORIZATION_ERROR',
+ 'message': "Token does not have the required scope",
+ 'tip': "Add the cars_read scope to your app scopes and reconnect the user"
+ }
+ ],
+ status=403)
+ cars, err = xee.get_cars("oops")
+ self.assertIsNone(cars)
+ self.assertEqual(err, APIException(
+ 'AUTHORIZATION_ERROR',
+ "Token does not have the required scope",
+ "Add the cars_read scope to your app scopes and reconnect the user"))
+
+ @responses.activate
+ def test_get_cars_access_403(self):
+ responses.add(responses.GET, host + "/users/me/cars",
+ json=
+ [
+ {
+ 'type': 'AUTHORIZATION_ERROR',
+ 'message': "Token can't access this user",
+ 'tip': "Make sure the trip belongs to the user you asked for"
+ }
+ ],
+ status=403)
+ cars, err = xee.get_cars("oops")
+ self.assertIsNone(cars)
+ self.assertEqual(err, APIException(
+ 'AUTHORIZATION_ERROR',
+ "Token can't access this user",
+ "Make sure the trip belongs to the user you asked for"))
+
+ @responses.activate
+ def test_get_cars_404(self):
+ responses.add(responses.GET, host + "/users/me/cars",
+ json=
+ [
+ {
+ 'type': 'PARAMETERS_ERROR',
+ 'message': "User not found",
+ 'tip': "Please check that the user exists, looks like it does not"
+ }
+ ],
+ status=404)
+ cars, err = xee.get_cars("oops")
+ self.assertIsNone(cars)
+ self.assertEqual(err, APIException(
+ 'PARAMETERS_ERROR',
+ "User not found",
+ "Please check that the user exists, looks like it does not"))
+
+
+class TestStats(unittest.TestCase):
+ @responses.activate
+ def test_get_used_time_no_params(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/stats/usedtime.md
+ responses.add(responses.GET, host + "/cars/1337/stats/usedtime",
+ json={
+ "beginDate": "2016-07-01T00:00:00Z",
+ "endDate": "2016-07-15T12:34:30.854Z",
+ "type": "USED_TIME",
+ "value": 4200
+ },
+ status=200)
+ stat, err = xee.get_used_time(1337, "fake_access_token")
+ self.assertEqual(stat.begin_date, datetime(2016, 7, 1, 0, 0, 0, 0, tzinfo=pytz.utc))
+ self.assertEqual(stat.end_date,
+ datetime(2016, 7, 15, 12, 34, 30, 854000, tzinfo=pytz.utc))
+ self.assertEqual(stat.type, "USED_TIME")
+ self.assertEqual(stat.value, 4200)
+
+ @responses.activate
+ def test_get_mileage_no_params(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/stats/mileage.md
+ responses.add(responses.GET, host + "/cars/1337/stats/mileage",
+ json={
+ "beginDate": "2016-07-01T00:00:00Z",
+ "endDate": "2016-07-15T12:34:30.854Z",
+ "type": "MILEAGE",
+ "value": 17.50
+ },
+ status=200)
+ stat, err = xee.get_mileage(1337, "fake_access_token")
+ self.assertEqual(stat.begin_date, datetime(2016, 7, 1, 0, 0, 0, 0, tzinfo=pytz.utc))
+ self.assertEqual(stat.end_date,
+ datetime(2016, 7, 15, 12, 34, 30, 854000, tzinfo=pytz.utc))
+ self.assertEqual(stat.type, "MILEAGE")
+ self.assertEqual(stat.value, 17.50)
+
+
+class TestSignals(unittest.TestCase):
+ @responses.activate
+ def test_get_signals_no_params(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/signals.md
+ responses.add(responses.GET, host + "/cars/1337/signals",
+ json=[
+ {
+ "name": "LockSts",
+ "value": 0,
+ "date": "2016-03-01T02:24:24.000000+00:00"
+ },
+ {
+ "name": "Odometer",
+ "value": 34512.1,
+ "date": "2016-03-01T02:24:27.116000+00:00"
+ }
+ ],
+ status=200)
+ signals, err = xee.get_signals(1337, "fake_access_token")
+ self.assertEqual(len(signals), 2)
+ self.assertEqual(signals[0].name, 'LockSts')
+ self.assertEqual(signals[0].value, 0.0)
+ self.assertEqual(signals[0].date, datetime(2016, 3, 1, 2, 24, 24, 0, tzinfo=pytz.utc))
+ self.assertEqual(signals[1].name, 'Odometer')
+ self.assertEqual(signals[1].value, 34512.1)
+ self.assertEqual(signals[1].date, datetime(2016, 3, 1, 2, 24, 27, 116000, tzinfo=pytz.utc))
+
+ @responses.activate
+ def test_get_signals_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/signals.md
+ responses.add(responses.GET, host + "/cars/1337/signals",
+ json=[],
+ status=200)
+ signals, err = xee.get_signals(1337, "fake_access_token")
+ expected = []
+ self.assertListEqual(signals, expected)
+
+
+class TestLocations(unittest.TestCase):
+ @responses.activate
+ def test_get_locations_no_params(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/locations.md
+ responses.add(responses.GET, host + "/cars/1337/locations",
+ json=[
+ {
+ "latitude": 50.67815,
+ "longitude": 3.208155,
+ "altitude": 31.8,
+ "satellites": 4,
+ "heading": 167,
+ "date": "2016-03-01T02:24:20.000000+00:00"
+ }
+ ],
+ status=200)
+ locations, err = xee.get_locations(1337, "fake_access_token")
+ self.assertEqual(len(locations), 1)
+ self.assertEqual(locations[0].latitude, 50.67815)
+ self.assertEqual(locations[0].longitude, 3.208155)
+ self.assertEqual(locations[0].altitude, 31.8)
+ self.assertEqual(locations[0].satellites, 4)
+ self.assertEqual(locations[0].heading, 167)
+ self.assertEqual(locations[0].date, datetime(2016, 3, 1, 2, 24, 20, 0, tzinfo=pytz.utc))
+
+ @responses.activate
+ def test_get_locations_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/locations.md
+ responses.add(responses.GET, host + "/cars/1337/locations",
+ json=[],
+ status=200)
+ locations, err = xee.get_locations(1337, "fake_access_token")
+ expected = []
+ self.assertListEqual(locations, expected)
+
+
+class TestTrips(unittest.TestCase):
+ @responses.activate
+ def test_get_trips_no_params(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/trips.md
+ responses.add(responses.GET, host + "/cars/1337/trips",
+ json=[
+ {
+ "id": "56b43a4f051f29071f14218d",
+ "beginLocation": {
+ "latitude": 50.6817,
+ "longitude": 3.08202,
+ "altitude": 2,
+ "heading": 0,
+ "satellites": 1,
+ "date": "2016-01-29T18:36:17Z"
+ },
+ "endLocation": {
+ "latitude": 50.6817,
+ "longitude": 3.08202,
+ "altitude": 2,
+ "heading": 0,
+ "satellites": 1,
+ "date": "2016-01-29T18:36:17Z"
+ },
+ "beginDate": "2016-01-29T18:39:17Z",
+ "endDate": "2016-01-29T19:15:15Z",
+ "creationDate": "2016-01-29T18:39:17Z",
+ "lastUpdateDate": "2016-01-29T19:15:15Z"
+ }
+ ],
+ status=200)
+ trips, err = xee.get_trips(1337, "fake_access_token")
+ self.assertEqual(len(trips), 1)
+ self.assertEqual(trips[0].id, '56b43a4f051f29071f14218d')
+ self.assertDictEqual(trips[0].begin_location._asdict(), {
+ "latitude": 50.6817,
+ "longitude": 3.08202,
+ "altitude": 2,
+ "heading": 0,
+ "satellites": 1,
+ "date": datetime(2016, 1, 29, 18, 36, 17, tzinfo=pytz.utc),
+ })
+ self.assertDictEqual(trips[0].end_location._asdict(), {
+ "latitude": 50.6817,
+ "longitude": 3.08202,
+ "altitude": 2,
+ "heading": 0,
+ "satellites": 1,
+ "date": datetime(2016, 1, 29, 18, 36, 17, tzinfo=pytz.utc),
+ })
+ self.assertEqual(trips[0].begin_date, datetime(2016, 1, 29, 18, 39, 17, tzinfo=pytz.utc))
+ self.assertEqual(trips[0].end_date, datetime(2016, 1, 29, 19, 15, 15, tzinfo=pytz.utc))
+
+ @responses.activate
+ def test_get_trips_empty(self):
+ # Mock in https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/trips.md
+ responses.add(responses.GET, host + "/cars/1337/trips",
+ json=[],
+ status=200)
+ trips, err = xee.get_trips(1337, "fake_access_token")
+ expected = []
+ self.assertListEqual(trips, expected)
+
+
+class TestTripLocations(unittest.TestCase):
+ @responses.activate
+ def test_get_locations(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/v3/trips
+ # /56b43a4f051f29071f14218d/locations.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/locations",
+ json=[
+ {
+ "latitude": 50.67815,
+ "longitude": 3.208155,
+ "altitude": 31.8,
+ "satellites": 4,
+ "heading": 167,
+ "date": "2016-03-01T02:24:20.000000+00:00"
+ }
+ ],
+ status=200)
+ locations, err = xee.get_trip_locations("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertEqual(len(locations), 1)
+ self.assertEqual(locations[0].latitude, 50.67815)
+ self.assertEqual(locations[0].longitude, 3.208155)
+ self.assertEqual(locations[0].altitude, 31.8)
+ self.assertEqual(locations[0].satellites, 4)
+ self.assertEqual(locations[0].heading, 167)
+ self.assertEqual(locations[0].date, datetime(2016, 3, 1, 2, 24, 20, 0, tzinfo=pytz.utc))
+
+ @responses.activate
+ def test_get_locations_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/cars/locations.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/locations",
+ json=[],
+ status=200)
+ locations, err = xee.get_trip_locations("56b43a4f051f29071f14218d", "fake_access_token")
+ expected = []
+ self.assertListEqual(locations, expected)
+
+
+class TestTripSignals(unittest.TestCase):
+ @responses.activate
+ def test_get_signals(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/v3/trips
+ # /56b43a4f051f29071f14218d/signals.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/signals",
+ json=[
+ {
+ "name": "LockSts",
+ "value": 0,
+ "date": "2016-03-01T02:24:24.000000+00:00"
+ },
+ {
+ "name": "Odometer",
+ "value": 34512.1,
+ "date": "2016-03-01T02:24:27.116000+00:00"
+ }
+ ],
+ status=200)
+ signals, err = xee.get_trip_signals("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertEqual(len(signals), 2)
+ self.assertEqual(signals[0].name, 'LockSts')
+ self.assertEqual(signals[0].value, 0.0)
+ self.assertEqual(signals[0].date, datetime(2016, 3, 1, 2, 24, 24, 0, tzinfo=pytz.utc))
+ self.assertEqual(signals[1].name, 'Odometer')
+ self.assertEqual(signals[1].value, 34512.1)
+ self.assertEqual(signals[1].date, datetime(2016, 3, 1, 2, 24, 27, 116000, tzinfo=pytz.utc))
+
+ @responses.activate
+ def test_get_signals_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/v3/trips
+ # /56b43a4f051f29071f14218d/signals.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/signals",
+ json=[],
+ status=200)
+ locations, err = xee.get_trip_signals("56b43a4f051f29071f14218d", "fake_access_token")
+ expected = []
+ self.assertListEqual(locations, expected)
+
+
+class TestTripStats(unittest.TestCase):
+ @responses.activate
+ def test_get_stats(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/trips/trip_id/stats.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats",
+ json=[
+ {
+ "type": "MILEAGE",
+ "value": 5.800642496450446
+ },
+ {
+ "type": "USED_TIME",
+ "value": 980
+ }
+ ],
+ status=200)
+ stats, err = xee.get_trip_stats("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertEqual(len(stats), 2)
+ self.assertEqual(stats[0].type, 'MILEAGE')
+ self.assertEqual(stats[0].value, 5.800642496450446)
+ self.assertEqual(stats[1].type, 'USED_TIME')
+ self.assertEqual(stats[1].value, 980)
+
+ @responses.activate
+ def test_get_stats_empty(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/v3/trips
+ # /56b43a4f051f29071f14218d/signals.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats",
+ json=[],
+ status=200)
+ stats, err = xee.get_trip_stats("56b43a4f051f29071f14218d", "fake_access_token")
+ expected = []
+ self.assertListEqual(stats, expected)
+
+ @responses.activate
+ def test_get_stats_trip_does_not_exists(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/v3/trips
+ # /56b43a4f051f29071f14218d/signals.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats",
+ json=[
+ {
+ "type": "PARAMETERS_ERROR",
+ "message": "Trip not found",
+ "tip": "Please check that the trip exists, looks like it does not"
+ }
+ ],
+ status=404)
+ stats, err = xee.get_trip_stats("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertIsNotNone(err)
+ self.assertEqual(err.type, 'PARAMETERS_ERROR')
+ self.assertEqual(err.message, "Trip not found")
+ self.assertEqual(err.tip, "Please check that the trip exists, looks like it does not")
+
+
+class TestTripMileage(unittest.TestCase):
+ @responses.activate
+ def test_get_mileage(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/trips/trip_id/stats
+ # /mileage.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/mileage",
+ json={
+ "type": "MILEAGE",
+ "value": 5.800642496450446
+ },
+ status=200)
+ mileage, err = xee.get_trip_mileage("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertEqual(mileage.type, 'MILEAGE')
+ self.assertEqual(mileage.value, 5.800642496450446)
+
+ @responses.activate
+ def test_get_mileage_not_exists(self):
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/mileage",
+ json=[
+ {
+ "type": "PARAMETERS_ERROR",
+ "message": "Statistics not found",
+ "tip": "Please check that the trip exists and data are present, " +
+ "looks like it does not"
+ }
+ ],
+ status=404)
+ mileage, err = xee.get_trip_mileage("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertIsNotNone(err)
+ self.assertEqual(err.type, 'PARAMETERS_ERROR')
+ self.assertEqual(err.message, "Statistics not found")
+ self.assertEqual(err.tip, "Please check that the trip exists and data are present, " +
+ "looks like it does not")
+
+ @responses.activate
+ def test_get_mileage_trip_does_not_exists(self):
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/mileage",
+ json=[
+ {
+ "type": "PARAMETERS_ERROR",
+ "message": "Trip not found",
+ "tip": "Please check that the trip exists, looks like it does not"
+ }
+ ],
+ status=404)
+ stats, err = xee.get_trip_mileage("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertIsNotNone(err)
+ self.assertEqual(err.type, 'PARAMETERS_ERROR')
+ self.assertEqual(err.message, "Trip not found")
+ self.assertEqual(err.tip, "Please check that the trip exists, looks like it does not")
+
+
+class TestTripDuration(unittest.TestCase):
+ @responses.activate
+ def test_get_duration(self):
+ # Mock https://github.com/xee-lab/xee-api-docs/blob/master/api/api/v3/trips/trip_id/stats
+ # /usedtime.md
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/usedtime",
+ json={
+ "type": "USED_TIME",
+ "value": 1271
+ },
+ status=200)
+ duration, err = xee.get_trip_duration("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertEqual(duration.type, 'USED_TIME')
+ self.assertEqual(duration.value, 1271)
+
+ @responses.activate
+ def test_get_duration_not_exists(self):
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/usedtime",
+ json=[
+ {
+ "type": "PARAMETERS_ERROR",
+ "message": "Statistics not found",
+ "tip": "Please check that the trip exists and data are present, " +
+ "looks like it does not"
+ }
+ ],
+ status=404)
+ mileage, err = xee.get_trip_duration("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertIsNotNone(err)
+ self.assertEqual(err.type, 'PARAMETERS_ERROR')
+ self.assertEqual(err.message, "Statistics not found")
+ self.assertEqual(err.tip, "Please check that the trip exists and data are present, " +
+ "looks like it does not")
+
+ @responses.activate
+ def test_get_duration_trip_does_not_exists(self):
+ responses.add(responses.GET, host + "/trips/56b43a4f051f29071f14218d/stats/usedtime",
+ json=[
+ {
+ "type": "PARAMETERS_ERROR",
+ "message": "Trip not found",
+ "tip": "Please check that the trip exists, looks like it does not"
+ }
+ ],
+ status=404)
+ stats, err = xee.get_trip_duration("56b43a4f051f29071f14218d", "fake_access_token")
+ self.assertIsNotNone(err)
+ self.assertEqual(err.type, 'PARAMETERS_ERROR')
+ self.assertEqual(err.message, "Trip not found")
+ self.assertEqual(err.tip, "Please check that the trip exists, looks like it does not")
+
+
+class TestErrors(unittest.TestCase):
+ @responses.activate
+ def test_400(self):
+ return
+
+ def test_401(self):
+ return
+
+ def test_403(self):
+ return
+
+ def test_404(self):
+ return
+
+ def test_416(self):
+ return
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | 3.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"responses",
"pytz",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
isodate==0.7.2
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytz==2025.2
PyYAML==6.0.2
requests==2.32.3
responses==0.25.7
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/quentin7b/xee-sdk-python.git@01fe40c84a7cc3f558c8f7ba07b4b35d61b406ce#egg=xee
| name: xee-sdk-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- isodate==0.7.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytz==2025.2
- pyyaml==6.0.2
- requests==2.32.3
- responses==0.25.7
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/xee-sdk-python
| [
"test/__init__.py::TestUser::test_get_user_ok",
"test/__init__.py::TestTripStats::test_get_stats",
"test/__init__.py::TestTripStats::test_get_stats_empty",
"test/__init__.py::TestTripStats::test_get_stats_trip_does_not_exists",
"test/__init__.py::TestTripMileage::test_get_mileage",
"test/__init__.py::TestTripMileage::test_get_mileage_not_exists",
"test/__init__.py::TestTripMileage::test_get_mileage_trip_does_not_exists",
"test/__init__.py::TestTripDuration::test_get_duration",
"test/__init__.py::TestTripDuration::test_get_duration_not_exists",
"test/__init__.py::TestTripDuration::test_get_duration_trip_does_not_exists",
"test/test_sdk.py::TestUser::test_get_user_ok",
"test/test_sdk.py::TestTripStats::test_get_stats",
"test/test_sdk.py::TestTripStats::test_get_stats_empty",
"test/test_sdk.py::TestTripStats::test_get_stats_trip_does_not_exists",
"test/test_sdk.py::TestTripMileage::test_get_mileage",
"test/test_sdk.py::TestTripMileage::test_get_mileage_not_exists",
"test/test_sdk.py::TestTripMileage::test_get_mileage_trip_does_not_exists",
"test/test_sdk.py::TestTripDuration::test_get_duration",
"test/test_sdk.py::TestTripDuration::test_get_duration_not_exists",
"test/test_sdk.py::TestTripDuration::test_get_duration_trip_does_not_exists"
]
| []
| [
"test/__init__.py::TestAuthFromAuthorizationCode::test_access_token_from_authorization_code_ok",
"test/__init__.py::TestAuthFromRefreshToken::test_access_token_from_refresh_token_ok",
"test/__init__.py::TestUser::test_get_user_403",
"test/__init__.py::TestCars::test_get_car",
"test/__init__.py::TestCars::test_get_cars_404",
"test/__init__.py::TestCars::test_get_cars_access_403",
"test/__init__.py::TestCars::test_get_cars_list_empty",
"test/__init__.py::TestCars::test_get_cars_list_ok",
"test/__init__.py::TestCars::test_get_cars_scope_403",
"test/__init__.py::TestStats::test_get_mileage_no_params",
"test/__init__.py::TestStats::test_get_used_time_no_params",
"test/__init__.py::TestSignals::test_get_signals_empty",
"test/__init__.py::TestSignals::test_get_signals_no_params",
"test/__init__.py::TestLocations::test_get_locations_empty",
"test/__init__.py::TestLocations::test_get_locations_no_params",
"test/__init__.py::TestTrips::test_get_trips_empty",
"test/__init__.py::TestTrips::test_get_trips_no_params",
"test/__init__.py::TestTripLocations::test_get_locations",
"test/__init__.py::TestTripLocations::test_get_locations_empty",
"test/__init__.py::TestTripSignals::test_get_signals",
"test/__init__.py::TestTripSignals::test_get_signals_empty",
"test/__init__.py::TestErrors::test_400",
"test/__init__.py::TestErrors::test_401",
"test/__init__.py::TestErrors::test_403",
"test/__init__.py::TestErrors::test_404",
"test/__init__.py::TestErrors::test_416",
"test/test_sdk.py::TestAuthFromAuthorizationCode::test_access_token_from_authorization_code_ok",
"test/test_sdk.py::TestAuthFromRefreshToken::test_access_token_from_refresh_token_ok",
"test/test_sdk.py::TestUser::test_get_user_403",
"test/test_sdk.py::TestCars::test_get_car",
"test/test_sdk.py::TestCars::test_get_cars_404",
"test/test_sdk.py::TestCars::test_get_cars_access_403",
"test/test_sdk.py::TestCars::test_get_cars_list_empty",
"test/test_sdk.py::TestCars::test_get_cars_list_ok",
"test/test_sdk.py::TestCars::test_get_cars_scope_403",
"test/test_sdk.py::TestStats::test_get_mileage_no_params",
"test/test_sdk.py::TestStats::test_get_used_time_no_params",
"test/test_sdk.py::TestSignals::test_get_signals_empty",
"test/test_sdk.py::TestSignals::test_get_signals_no_params",
"test/test_sdk.py::TestLocations::test_get_locations_empty",
"test/test_sdk.py::TestLocations::test_get_locations_no_params",
"test/test_sdk.py::TestTrips::test_get_trips_empty",
"test/test_sdk.py::TestTrips::test_get_trips_no_params",
"test/test_sdk.py::TestTripLocations::test_get_locations",
"test/test_sdk.py::TestTripLocations::test_get_locations_empty",
"test/test_sdk.py::TestTripSignals::test_get_signals",
"test/test_sdk.py::TestTripSignals::test_get_signals_empty",
"test/test_sdk.py::TestErrors::test_400",
"test/test_sdk.py::TestErrors::test_401",
"test/test_sdk.py::TestErrors::test_403",
"test/test_sdk.py::TestErrors::test_404",
"test/test_sdk.py::TestErrors::test_416"
]
| []
| Apache License 2.0 | 772 | [
"Makefile",
"xee/entities.py",
"xee/version.py",
".travis.yml",
"README.md",
"xee/sdk.py"
]
| [
"Makefile",
"xee/entities.py",
"xee/version.py",
".travis.yml",
"README.md",
"xee/sdk.py"
]
|
|
falconry__falcon-909 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | 2016-09-22 22:46:33 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/909?src=pr) is 100% (diff: 100%)
> Merging [#909](https://codecov.io/gh/falconry/falcon/pull/909?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #909 diff @@
====================================
Files 31 31
Lines 1969 1970 +1
Methods 0 0
Messages 0 0
Branches 320 320
====================================
+ Hits 1969 1970 +1
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [67d6102...bd67690](https://codecov.io/gh/falconry/falcon/compare/67d61029847cbf59e4053c8a424df4f9f87ad36f...bd6769013acdb0cb864dddeef8c65bac200ad089?src=pr) | diff --git a/falcon/response.py b/falcon/response.py
index 6e73436..eabced5 100644
--- a/falcon/response.py
+++ b/falcon/response.py
@@ -625,6 +625,24 @@ class Response(object):
""",
lambda v: ', '.join(v))
+ accept_ranges = header_property(
+ 'Accept-Ranges',
+ """Sets the Accept-Ranges header.
+
+ The Accept-Ranges header field indicates to the client which
+ range units are supported (e.g. "bytes") for the target
+ resource.
+
+ If range requests are not supported for the target resource,
+ the header may be set to "none" to advise the client not to
+ attempt any such requests.
+
+ Note:
+ "none" is the literal string, not Python's built-in ``None``
+ type.
+
+ """)
+
def _encode_header(self, name, value, py2=PY2):
if py2:
if isinstance(name, unicode):
| Add support to Response for the Accept-Ranges header
Should be fairly straightforward (see also: https://tools.ietf.org/html/rfc7233#section-2.3) | falconry/falcon | diff --git a/tests/test_headers.py b/tests/test_headers.py
index 3244f22..387690f 100644
--- a/tests/test_headers.py
+++ b/tests/test_headers.py
@@ -50,6 +50,8 @@ class HeaderHelpersResource(object):
else:
resp.content_range = (0, 25, 100, req.range_unit)
+ resp.accept_ranges = 'bytes'
+
self.resp = resp
def on_head(self, req, resp):
@@ -322,6 +324,9 @@ class TestHeaders(testing.TestCase):
resp.content_range = (1, 499, 10 * 1024, 'bytes')
self.assertEqual(resp.content_range, 'bytes 1-499/10240')
+ self.assertEqual(resp.accept_ranges, 'bytes')
+ self.assertEqual(result.headers['Accept-Ranges'], 'bytes')
+
req_headers = {'Range': 'items=0-25'}
result = self.simulate_get(headers=req_headers)
self.assertEqual(result.headers['Content-Range'], 'items 0-25/100')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"ddt",
"pytest-randomly",
"pytest-cov",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
execnet==1.9.0
-e git+https://github.com/falconry/falcon.git@67d61029847cbf59e4053c8a424df4f9f87ad36f#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-randomly==3.10.3
pytest-xdist==3.0.2
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- execnet==1.9.0
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-randomly==3.10.3
- pytest-xdist==3.0.2
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_headers.py::TestHeaders::test_response_header_helpers_on_get"
]
| []
| [
"tests/test_headers.py::TestHeaders::test_add_link_with_title",
"tests/test_headers.py::TestHeaders::test_add_link_with_hreflang_multi",
"tests/test_headers.py::TestHeaders::test_unicode_headers",
"tests/test_headers.py::TestHeaders::test_required_header",
"tests/test_headers.py::TestHeaders::test_content_length",
"tests/test_headers.py::TestHeaders::test_add_link_complex",
"tests/test_headers.py::TestHeaders::test_custom_content_type",
"tests/test_headers.py::TestHeaders::test_vary_header_2____accept_encoding____x_auth_token_____accept_encoding__x_auth_token__",
"tests/test_headers.py::TestHeaders::test_headers_as_list",
"tests/test_headers.py::TestHeaders::test_override_default_media_type_missing_encoding",
"tests/test_headers.py::TestHeaders::test_add_link_single",
"tests/test_headers.py::TestHeaders::test_add_link_with_hreflang",
"tests/test_headers.py::TestHeaders::test_unicode_location_headers",
"tests/test_headers.py::TestHeaders::test_no_content_length_1_204_No_Content",
"tests/test_headers.py::TestHeaders::test_passthrough_request_headers",
"tests/test_headers.py::TestHeaders::test_content_type_no_body",
"tests/test_headers.py::TestHeaders::test_vary_star",
"tests/test_headers.py::TestHeaders::test_response_set_and_get_header",
"tests/test_headers.py::TestHeaders::test_default_media_type",
"tests/test_headers.py::TestHeaders::test_content_header_missing",
"tests/test_headers.py::TestHeaders::test_add_link_with_title_star",
"tests/test_headers.py::TestHeaders::test_no_content_type_2_304_Not_Modified",
"tests/test_headers.py::TestHeaders::test_content_length_options",
"tests/test_headers.py::TestHeaders::test_vary_header_3____accept_encoding____x_auth_token_____accept_encoding__x_auth_token__",
"tests/test_headers.py::TestHeaders::test_override_default_media_type_1___text_plain__charset_UTF_8____Hello_Unicode_____",
"tests/test_headers.py::TestHeaders::test_default_value",
"tests/test_headers.py::TestHeaders::test_override_default_media_type_2___text_plain____Hello_ISO_8859_1___",
"tests/test_headers.py::TestHeaders::test_add_link_with_type_hint",
"tests/test_headers.py::TestHeaders::test_add_link_multiple",
"tests/test_headers.py::TestHeaders::test_response_append_header",
"tests/test_headers.py::TestHeaders::test_vary_header_1____accept_encoding_____accept_encoding__",
"tests/test_headers.py::TestHeaders::test_add_link_with_anchor",
"tests/test_headers.py::TestHeaders::test_no_content_type_1_204_No_Content",
"tests/test_headers.py::TestHeaders::test_no_content_length_2_304_Not_Modified"
]
| []
| Apache License 2.0 | 773 | [
"falcon/response.py"
]
| [
"falcon/response.py"
]
|
treasure-data__td-client-python-21 | 59f47438514f128cadf945f54cf56d5f311c5338 | 2016-09-23 08:10:45 | 59f47438514f128cadf945f54cf56d5f311c5338 | diff --git a/tdclient/client.py b/tdclient/client.py
index 691b225..85c2258 100644
--- a/tdclient/client.py
+++ b/tdclient/client.py
@@ -527,10 +527,7 @@ class Client(object):
[:class:`tdclient.models.Schedule`]
"""
result = self.api.list_schedules()
- def schedule(m):
- name,cron,query,database,result_url,timezone,delay,next_time,priority,retry_limit,org_name = m
- return models.Schedule(self, name, cron, query, database, result_url, timezone, delay, next_time, priority, retry_limit, org_name)
- return [ schedule(m) for m in result ]
+ return [ models.Schedule(self, m.get("name"), m.get("cron"), m.get("query"), **m) for m in result ]
def update_schedule(self, name, params=None):
"""
diff --git a/tdclient/schedule_api.py b/tdclient/schedule_api.py
index 8d9ec3b..02e7106 100644
--- a/tdclient/schedule_api.py
+++ b/tdclient/schedule_api.py
@@ -50,17 +50,12 @@ class ScheduleAPI(object):
self.raise_error("List schedules failed", res, body)
js = self.checked_json(body, ["schedules"])
def schedule(m):
- name = m.get("name")
- cron = m.get("cron")
- query = m.get("query")
- database = m.get("database")
- result_url = m.get("result")
- timezone = m.get("timezone", "UTC")
- delay = m.get("delay")
- next_time = self._parsedate(self.get_or_else(m, "next_time", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
- priority = m.get("priority")
- retry_limit = m.get("retry_limit")
- return (name, cron, query, database, result_url, timezone, delay, next_time, priority, retry_limit, None) # same as database
+ m = dict(m)
+ if "timezone" not in m:
+ m["timezone"] = "UTC"
+ m["created_at"] = self._parsedate(self.get_or_else(m, "created_at", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
+ m["next_time"] = self._parsedate(self.get_or_else(m, "next_time", "1970-01-01T00:00:00Z"), "%Y-%m-%dT%H:%M:%SZ")
+ return m
return [ schedule(m) for m in js["schedules"] ]
def update_schedule(self, name, params=None):
diff --git a/tdclient/schedule_model.py b/tdclient/schedule_model.py
index 104550d..888ae08 100644
--- a/tdclient/schedule_model.py
+++ b/tdclient/schedule_model.py
@@ -24,19 +24,27 @@ class Schedule(Model):
"""Schedule on Treasure Data Service
"""
- def __init__(self, client, name, cron, query, database=None, result_url=None, timezone=None, delay=None, next_time=None, priority=None, retry_limit=None, org_name=None):
+ def __init__(self, client, name, cron, query, **kwargs):
super(Schedule, self).__init__(client)
self._name = name
self._cron = cron
+ self._timezone = kwargs.get("timezone")
+ self._delay = kwargs.get("delay")
+ self._created_at = kwargs.get("created_at")
+ self._type = kwargs.get("type")
self._query = query
- self._database = database
- self._result_url = result_url
- self._timezone = timezone
- self._delay = delay
- self._next_time = next_time
- self._priority = priority
- self._retry_limit = retry_limit
- self._org_name = org_name
+ self._database = kwargs.get("database")
+ self._user_name = kwargs.get("user_name")
+ self._priority = kwargs.get("priority")
+ self._retry_limit = kwargs.get("retry_limit")
+ if "result_url" in kwargs:
+ # backward compatibility for td-client-python < 0.6.0
+ # TODO: remove this code if not necessary with fixing test
+ self._result = kwargs.get("result_url")
+ else:
+ self._result = kwargs.get("result")
+ self._next_time = kwargs.get("next_time")
+ self._org_name = kwargs.get("org_name")
@property
def name(self):
@@ -68,7 +76,7 @@ class Schedule(Model):
def result_url(self):
"""The result output configuration in URL form of a scheduled job
"""
- return self._result_url
+ return self._result
@property
def timezone(self):
@@ -88,7 +96,10 @@ class Schedule(Model):
def priority(self):
"""The priority of a scheduled job
"""
- return self._priority
+ if self._priority in Job.JOB_PRIORITY:
+ return Job.JOB_PRIORITY[self._priority]
+ else:
+ return str(self._priority)
@property
def retry_limit(self):
@@ -111,6 +122,27 @@ class Schedule(Model):
"""
return self._next_time
+ @property
+ def created_at(self):
+ """
+ TODO: add docstring
+ """
+ return self._created_at
+
+ @property
+ def type(self):
+ """
+ TODO: add docstring
+ """
+ return self._type
+
+ @property
+ def user_name(self):
+ """
+ TODO: add docstring
+ """
+ return self._user_name
+
def run(self, time, num=None):
"""Run a scheduled job
"""
| Missing created_time and user_name in list_schedules api
Schedule API returns the following for each scheduled.
But, created_time and user_name are missing
```
$ curl -H "AUTHORIZATION: TD1 XXXXX" "http://api.treasuredata.com/v3/schedule/list"
...
{
"name":"xxx",
"cron":null,
"timezone":"UTC",
"delay":0,
"created_at":"2016-08-15T23:03:59Z",
"type":"presto",
"query":"xxxx",
"database":"api_production",
"user_name":"YYYY",
"priority":0,
"retry_limit":0,
"result":"",
"next_time":null
}
```
https://github.com/treasure-data/td-client-python/blob/master/tdclient/schedule_api.py#L52-L63
| treasure-data/td-client-python | diff --git a/tdclient/test/schedule_api_test.py b/tdclient/test/schedule_api_test.py
index b7f18bb..faca3d1 100644
--- a/tdclient/test/schedule_api_test.py
+++ b/tdclient/test/schedule_api_test.py
@@ -70,13 +70,54 @@ def test_delete_schedule_success():
def test_list_schedules_success():
td = api.API("APIKEY")
- # TODO: should be replaced by wire dump
body = b"""
{
"schedules":[
- {"name":"foo","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""},
- {"name":"bar","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""},
- {"name":"baz","cron":"* * * * *","query":"SELECT COUNT(1) FROM nasdaq;","database":"sample_datasets","result":"","timezone":"UTC","delay":"","next_time":"","priority":"","retry_limit":""}
+ {
+ "name": "foo",
+ "cron": null,
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-08-02T17:58:40Z",
+ "type": "presto",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Yuu Yamashita",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": null
+ },
+ {
+ "name": "bar",
+ "cron": "0 0 * * *",
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-08-02T18:01:04Z",
+ "type": "presto",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Kazuki Ota",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": "2016-09-24T00:00:00Z"
+ },
+ {
+ "name": "baz",
+ "cron": "* * * * *",
+ "timezone": "UTC",
+ "delay": 0,
+ "created_at": "2016-03-02T23:01:59Z",
+ "type": "hive",
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "database": "sample_datasets",
+ "user_name": "Yuu Yamashita",
+ "priority": 0,
+ "retry_limit": 0,
+ "result": "",
+ "next_time": "2016-07-06T00:00:00Z"
+ }
]
}
"""
@@ -84,6 +125,22 @@ def test_list_schedules_success():
schedules = td.list_schedules()
td.get.assert_called_with("/v3/schedule/list")
assert len(schedules) == 3
+ next_time = sorted([ schedule.get("next_time") for schedule in schedules if "next_time" in schedule ])
+ assert len(next_time) == 3
+ assert next_time[2].year == 2016
+ assert next_time[2].month == 9
+ assert next_time[2].day == 24
+ assert next_time[2].hour == 0
+ assert next_time[2].minute == 0
+ assert next_time[2].second == 0
+ created_at = sorted([ schedule.get("created_at") for schedule in schedules if "created_at" in schedule ])
+ assert len(created_at) == 3
+ assert created_at[2].year == 2016
+ assert created_at[2].month == 8
+ assert created_at[2].day == 2
+ assert created_at[2].hour == 18
+ assert created_at[2].minute == 1
+ assert created_at[2].second == 4
def test_list_schedules_failure():
td = api.API("APIKEY")
@@ -100,13 +157,59 @@ def test_update_schedule_success():
def test_history_success():
td = api.API("APIKEY")
- # TODO: should be replaced by wire dump
body = b"""
{
"history": [
- {"job_id":"12345"},
- {"job_id":"67890"}
- ]
+ {
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "type": "presto",
+ "priority": 0,
+ "retry_limit": 0,
+ "duration": 1,
+ "status": "success",
+ "cpu_time": null,
+ "result_size": 30,
+ "job_id": "12345",
+ "created_at": "2016-04-13 05:24:59 UTC",
+ "updated_at": "2016-04-13 05:25:02 UTC",
+ "start_at": "2016-04-13 05:25:00 UTC",
+ "end_at": "2016-04-13 05:25:01 UTC",
+ "num_records": 1,
+ "database": "sample_datasets",
+ "user_name": "Ryuta Kamizono",
+ "result": "",
+ "url": "https://console.treasuredata.com/jobs/12345",
+ "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
+ "organization": null,
+ "scheduled_at": ""
+ },
+ {
+ "query": "SELECT COUNT(1) FROM nasdaq;",
+ "type": "presto",
+ "priority": 0,
+ "retry_limit": 0,
+ "duration": 1,
+ "status": "success",
+ "cpu_time": null,
+ "result_size": 30,
+ "job_id": "67890",
+ "created_at": "2016-04-13 05:24:59 UTC",
+ "updated_at": "2016-04-13 05:25:02 UTC",
+ "start_at": "2016-04-13 05:25:00 UTC",
+ "end_at": "2016-04-13 05:25:01 UTC",
+ "num_records": 1,
+ "database": "sample_datasets",
+ "user_name": "Ryuta Kamizono",
+ "result": "",
+ "url": "https://console.treasuredata.com/jobs/67890",
+ "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
+ "organization": null,
+ "scheduled_at": ""
+ }
+ ],
+ "count": 2,
+ "from": 0,
+ "to": 20
}
"""
td.get = mock.MagicMock(return_value=make_response(200, body))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5.2",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "py.test --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
msgpack-python==0.4.8
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.4.2
six==1.17.0
-e git+https://github.com/treasure-data/td-client-python.git@59f47438514f128cadf945f54cf56d5f311c5338#egg=td_client
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: td-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.4.8
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.4.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/td-client-python
| [
"tdclient/test/schedule_api_test.py::test_list_schedules_success"
]
| []
| [
"tdclient/test/schedule_api_test.py::test_create_schedule_success",
"tdclient/test/schedule_api_test.py::test_create_schedule_without_cron_success",
"tdclient/test/schedule_api_test.py::test_delete_schedule_success",
"tdclient/test/schedule_api_test.py::test_list_schedules_failure",
"tdclient/test/schedule_api_test.py::test_update_schedule_success",
"tdclient/test/schedule_api_test.py::test_history_success",
"tdclient/test/schedule_api_test.py::test_run_schedule_success"
]
| []
| Apache License 2.0 | 774 | [
"tdclient/client.py",
"tdclient/schedule_api.py",
"tdclient/schedule_model.py"
]
| [
"tdclient/client.py",
"tdclient/schedule_api.py",
"tdclient/schedule_model.py"
]
|
|
tornadoweb__tornado-1844 | 53d23fbe1d04c4f664c4c3856025d3d0920b0240 | 2016-09-23 14:02:13 | ecd8968c5135b810cd607b5902dda2cd32122b39 | diff --git a/docs/releases.rst b/docs/releases.rst
index a9bfa1c5..f61d1ccb 100644
--- a/docs/releases.rst
+++ b/docs/releases.rst
@@ -4,7 +4,6 @@ Release notes
.. toctree::
:maxdepth: 2
- releases/v4.4.2
releases/v4.4.1
releases/v4.4.0
releases/v4.3.0
diff --git a/docs/releases/v4.4.2.rst b/docs/releases/v4.4.2.rst
deleted file mode 100644
index 66349a3f..00000000
--- a/docs/releases/v4.4.2.rst
+++ /dev/null
@@ -1,22 +0,0 @@
-What's new in Tornado 4.4.2
-===========================
-
-Oct 1, 2016
-------------
-
-Security fixes
-~~~~~~~~~~~~~~
-
-* A difference in cookie parsing between Tornado and web browsers
- (especially when combined with Google Analytics) could allow an
- attacker to set arbitrary cookies and bypass XSRF protection. The
- cookie parser has been rewritten to fix this attack.
-
-Backwards-compatibility notes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* Cookies containing certain special characters (in particular semicolon
- and square brackets) are now parsed differently.
-* If the cookie header contains a combination of valid and invalid cookies,
- the valid ones will be returned (older versions of Tornado would reject the
- entire header for a single invalid cookie).
diff --git a/tornado/httpclient.py b/tornado/httpclient.py
index 13f81e2f..2b5d1fba 100644
--- a/tornado/httpclient.py
+++ b/tornado/httpclient.py
@@ -341,13 +341,15 @@ class HTTPRequest(object):
Allowed values are implementation-defined; ``curl_httpclient``
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
- :arg float connect_timeout: Timeout for initial connection in seconds
- :arg float request_timeout: Timeout for entire request in seconds
+ :arg float connect_timeout: Timeout for initial connection in seconds,
+ default 20 seconds
+ :arg float request_timeout: Timeout for entire request in seconds,
+ default 20 seconds
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
- or return the 3xx response?
- :arg int max_redirects: Limit for ``follow_redirects``
+ or return the 3xx response? Default True.
+ :arg int max_redirects: Limit for ``follow_redirects``, default 5.
:arg string user_agent: String to send as ``User-Agent`` header
:arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
@@ -381,9 +383,9 @@ class HTTPRequest(object):
:arg string proxy_auth_mode: HTTP proxy Authentication mode;
default is "basic". supports "basic" and "digest"
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
- argument?
+ argument? Default is False.
:arg bool validate_cert: For HTTPS requests, validate the server's
- certificate?
+ certificate? Default is True.
:arg string ca_certs: filename of CA certificates in PEM format,
or None to use defaults. See note below when used with
``curl_httpclient``.
diff --git a/tornado/httputil.py b/tornado/httputil.py
index 21842caa..9ca840db 100644
--- a/tornado/httputil.py
+++ b/tornado/httputil.py
@@ -379,18 +379,10 @@ class HTTPServerRequest(object):
self._cookies = Cookie.SimpleCookie()
if "Cookie" in self.headers:
try:
- parsed = parse_cookie(self.headers["Cookie"])
+ self._cookies.load(
+ native_str(self.headers["Cookie"]))
except Exception:
- pass
- else:
- for k, v in parsed.items():
- try:
- self._cookies[k] = v
- except Exception:
- # SimpleCookie imposes some restrictions on keys;
- # parse_cookie does not. Discard any cookies
- # with disallowed keys.
- pass
+ self._cookies = {}
return self._cookies
def write(self, chunk, callback=None):
@@ -917,82 +909,3 @@ def split_host_and_port(netloc):
host = netloc
port = None
return (host, port)
-
-_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
-_QuotePatt = re.compile(r"[\\].")
-_nulljoin = ''.join
-
-def _unquote_cookie(str):
- """Handle double quotes and escaping in cookie values.
-
- This method is copied verbatim from the Python 3.5 standard
- library (http.cookies._unquote) so we don't have to depend on
- non-public interfaces.
- """
- # If there aren't any doublequotes,
- # then there can't be any special characters. See RFC 2109.
- if str is None or len(str) < 2:
- return str
- if str[0] != '"' or str[-1] != '"':
- return str
-
- # We have to assume that we must decode this string.
- # Down to work.
-
- # Remove the "s
- str = str[1:-1]
-
- # Check for special sequences. Examples:
- # \012 --> \n
- # \" --> "
- #
- i = 0
- n = len(str)
- res = []
- while 0 <= i < n:
- o_match = _OctalPatt.search(str, i)
- q_match = _QuotePatt.search(str, i)
- if not o_match and not q_match: # Neither matched
- res.append(str[i:])
- break
- # else:
- j = k = -1
- if o_match:
- j = o_match.start(0)
- if q_match:
- k = q_match.start(0)
- if q_match and (not o_match or k < j): # QuotePatt matched
- res.append(str[i:k])
- res.append(str[k+1])
- i = k + 2
- else: # OctalPatt matched
- res.append(str[i:j])
- res.append(chr(int(str[j+1:j+4], 8)))
- i = j + 4
- return _nulljoin(res)
-
-
-def parse_cookie(cookie):
- """Parse a ``Cookie`` HTTP header into a dict of name/value pairs.
-
- This function attempts to mimic browser cookie parsing behavior;
- it specifically does not follow any of the cookie-related RFCs
- (because browsers don't either).
-
- The algorithm used is identical to that used by Django version 1.9.10.
-
- .. versionadded:: 4.4.2
- """
- cookiedict = {}
- for chunk in cookie.split(str(';')):
- if str('=') in chunk:
- key, val = chunk.split(str('='), 1)
- else:
- # Assume an empty name per
- # https://bugzilla.mozilla.org/show_bug.cgi?id=169091
- key, val = str(''), chunk
- key, val = key.strip(), val.strip()
- if key or val:
- # unquote using Python's algorithm.
- cookiedict[key] = _unquote_cookie(val)
- return cookiedict
| HTTPRequest has default request_timeout
Hi
While developing a reserver proxy, I have realised that http requests times out while they should stay indefinitely.
it seems like default `request_timeout=20` .
This is better to be without internal default value or documentation should mention there is a default value | tornadoweb/tornado | diff --git a/tornado/test/httputil_test.py b/tornado/test/httputil_test.py
index 3eb104d1..62b8c6d7 100644
--- a/tornado/test/httputil_test.py
+++ b/tornado/test/httputil_test.py
@@ -1,9 +1,8 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, with_statement
-from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders, format_timestamp, HTTPServerRequest, parse_request_start_line, parse_cookie
+from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders, format_timestamp, HTTPServerRequest, parse_request_start_line
from tornado.escape import utf8, native_str
from tornado.log import gen_log
from tornado.testing import ExpectLog
@@ -379,53 +378,3 @@ class ParseRequestStartLineTest(unittest.TestCase):
self.assertEqual(parsed_start_line.method, self.METHOD)
self.assertEqual(parsed_start_line.path, self.PATH)
self.assertEqual(parsed_start_line.version, self.VERSION)
-
-
-class ParseCookieTest(unittest.TestCase):
- # These tests copied from Django:
- # https://github.com/django/django/pull/6277/commits/da810901ada1cae9fc1f018f879f11a7fb467b28
- def test_python_cookies(self):
- """
- Test cases copied from Python's Lib/test/test_http_cookies.py
- """
- self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
- # Here parse_cookie() differs from Python's cookie parsing in that it
- # treats all semicolons as delimiters, even within quotes.
- self.assertEqual(
- parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
- {'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
- )
- # Illegal cookies that have an '=' char in an unquoted value.
- self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
- # Cookies with ':' character in their name.
- self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
- # Cookies with '[' and ']'.
- self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
-
- def test_cookie_edgecases(self):
- # Cookies that RFC6265 allows.
- self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
- # parse_cookie() has historically kept only the last cookie with the
- # same name.
- self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
-
- def test_invalid_cookies(self):
- """
- Cookie strings that go against RFC6265 but browsers will send if set
- via document.cookie.
- """
- # Chunks without an equals sign appear as unnamed values per
- # https://bugzilla.mozilla.org/show_bug.cgi?id=169091
- self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en').keys())
- # Even a double quote may be an unamed value.
- self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
- # Spaces in names and values, and an equals sign in values.
- self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
- # More characters the spec forbids.
- self.assertEqual(parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {'a b,c<>@:/[]?{}': 'd " =e,f g'})
- # Unicode characters. The spec only allows ASCII.
- self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': native_str('André Bessette')})
- # Browsers don't send extra whitespace or semicolons in Cookie headers,
- # but parse_cookie() should parse whitespace the same way
- # document.cookie parses whitespace.
- self.assertEqual(parse_cookie(' = b ; ; = ; c = ; '), {'': 'b', 'c': ''})
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index 14f6904a..fdd1797c 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -279,8 +279,8 @@ class CookieTest(WebTestCase):
data = [('foo=a=b', 'a=b'),
('foo="a=b"', 'a=b'),
- ('foo="a;b"', '"a'), # even quoted, ";" is a delimiter
- ('foo=a\\073b', 'a\\073b'), # escapes only decoded in quotes
+ ('foo="a;b"', 'a;b'),
+ # ('foo=a\\073b', 'a;b'), # even encoded, ";" is a delimiter
('foo="a\\073b"', 'a;b'),
('foo="a\\"b"', 'a"b'),
]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"sphinx",
"sphinx_rtd_theme",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@53d23fbe1d04c4f664c4c3856025d3d0920b0240#egg=tornado
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- jinja2==3.0.3
- markupsafe==2.0.1
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- urllib3==1.26.20
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::CookieTest::test_cookie_special_char"
]
| []
| [
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_encode_args",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_mult_params",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_no_params",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_no_query_params",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_q_with_no_trailing_amp",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_trailing_amp",
"tornado/test/httputil_test.py::TestUrlConcat::test_url_concat_trailing_q",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_boundary_starts_and_ends_with_quotes",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_content_disposition_header_without_name_parameter",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_data_after_final_boundary",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_file_upload",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_invalid_content_disposition",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_line_does_not_end_with_correct_line_break",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_missing_headers",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_special_filenames",
"tornado/test/httputil_test.py::MultipartFormDataTest::test_unquoted_names",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_copy",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_multi_line",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_optional_cr",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_pickle_roundtrip",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_setdefault",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_string",
"tornado/test/httputil_test.py::HTTPHeadersTest::test_unicode_newlines",
"tornado/test/httputil_test.py::FormatTimestampTest::test_datetime",
"tornado/test/httputil_test.py::FormatTimestampTest::test_struct_time",
"tornado/test/httputil_test.py::FormatTimestampTest::test_time_tuple",
"tornado/test/httputil_test.py::FormatTimestampTest::test_unix_time_float",
"tornado/test/httputil_test.py::FormatTimestampTest::test_unix_time_int",
"tornado/test/httputil_test.py::HTTPServerRequestTest::test_body_is_a_byte_string",
"tornado/test/httputil_test.py::HTTPServerRequestTest::test_default_constructor",
"tornado/test/httputil_test.py::ParseRequestStartLineTest::test_parse_request_start_line",
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header204Test::test_204_headers",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::StatusReasonTest::test_status",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments"
]
| []
| Apache License 2.0 | 775 | [
"docs/releases.rst",
"tornado/httpclient.py",
"docs/releases/v4.4.2.rst",
"tornado/httputil.py"
]
| [
"docs/releases.rst",
"tornado/httpclient.py",
"docs/releases/v4.4.2.rst",
"tornado/httputil.py"
]
|
|
conjure-up__conjure-up-441 | 8b2de20b459ab069b1b29db6e31f3e22fcbe6c9f | 2016-09-23 19:26:31 | 2c8b2b9a848e19ffc77cade081472db13b94004b | diff --git a/conjureup/app.py b/conjureup/app.py
index 926cd3c..7bd28d1 100644
--- a/conjureup/app.py
+++ b/conjureup/app.py
@@ -243,13 +243,6 @@ def main():
utils.warning("Could not find spell {}".format(opts.spell))
sys.exit(1)
- if not os.path.exists(os.path.join(opts.spell,
- "metadata.yaml")):
- utils.warning("'{}' does not appear to be a spell. "
- "{}/metadata.yaml was not found.".format(
- opts.spell, opts.spell))
- sys.exit(1)
-
spell_name = os.path.basename(os.path.abspath(spell))
utils.set_chosen_spell(spell_name,
path.join(opts.cache_dir, spell_name))
diff --git a/conjureup/controllers/clouds/gui.py b/conjureup/controllers/clouds/gui.py
index f36ff2d..9969194 100644
--- a/conjureup/controllers/clouds/gui.py
+++ b/conjureup/controllers/clouds/gui.py
@@ -16,9 +16,7 @@ class CloudsController:
app.ui.show_exception_message(exc)
def __add_model(self):
- juju.switch_controller(app.current_controller)
- juju.add_model(app.current_model)
- juju.switch_model(app.current_model)
+ juju.add_model(app.current_model, app.current_controller)
def finish(self, cloud):
""" Load the Model controller passing along the selected cloud.
diff --git a/conjureup/controllers/clouds/tui.py b/conjureup/controllers/clouds/tui.py
index edc2bc4..08cf4de 100644
--- a/conjureup/controllers/clouds/tui.py
+++ b/conjureup/controllers/clouds/tui.py
@@ -22,12 +22,10 @@ class CloudsController:
return controllers.use('newcloud').render(app.argv.cloud)
app.current_controller = existing_controller
- juju.switch_controller(app.current_controller)
app.current_model = petname.Name()
utils.info("Creating new juju model named '{}', "
"please wait.".format(app.current_model))
- juju.add_model(app.current_model)
- juju.switch_model(app.current_model)
+ juju.add_model(app.current_model, app.current_controller)
return controllers.use('deploy').render()
diff --git a/conjureup/controllers/controllerpicker/gui.py b/conjureup/controllers/controllerpicker/gui.py
index 5834d02..74085ec 100644
--- a/conjureup/controllers/controllerpicker/gui.py
+++ b/conjureup/controllers/controllerpicker/gui.py
@@ -15,9 +15,7 @@ class ControllerPicker:
app.ui.show_exception_message(exc)
def __add_model(self):
- juju.switch_controller(app.current_controller)
- juju.add_model(app.current_model)
- juju.switch_model(app.current_model)
+ juju.add_model(app.current_model, app.current_controller)
def finish(self, controller):
utils.pollinate(app.session_id, 'CS')
diff --git a/conjureup/controllers/deploy/gui.py b/conjureup/controllers/deploy/gui.py
index 62399c4..f8c66a2 100644
--- a/conjureup/controllers/deploy/gui.py
+++ b/conjureup/controllers/deploy/gui.py
@@ -27,7 +27,7 @@ class DeployController:
""" runs pre deploy script if exists
"""
app.env['JUJU_PROVIDERTYPE'] = model_info(
- juju.get_current_model())['provider-type']
+ app.current_model)['provider-type']
pre_deploy_sh = os.path.join(app.config['spell-dir'],
'steps/00_pre-deploy')
diff --git a/conjureup/controllers/deploy/tui.py b/conjureup/controllers/deploy/tui.py
index 9103fe9..0bfebb1 100644
--- a/conjureup/controllers/deploy/tui.py
+++ b/conjureup/controllers/deploy/tui.py
@@ -26,7 +26,8 @@ class DeployController:
""" runs pre deploy script if exists
"""
# Set provider type for post-bootstrap
- app.env['JUJU_PROVIDERTYPE'] = model_info('default')['provider-type']
+ app.env['JUJU_PROVIDERTYPE'] = model_info(
+ app.current_model)['provider-type']
pre_deploy_sh = os.path.join(app.config['spell-dir'],
'steps/00_pre-deploy')
diff --git a/conjureup/controllers/newcloud/gui.py b/conjureup/controllers/newcloud/gui.py
index 6f1d6b4..ef95e90 100644
--- a/conjureup/controllers/newcloud/gui.py
+++ b/conjureup/controllers/newcloud/gui.py
@@ -39,7 +39,6 @@ class NewCloudController:
utils.pollinate(app.session_id, 'J004')
EventLoop.remove_alarms()
app.ui.set_footer('Bootstrap complete...')
- juju.switch_controller(app.current_controller)
self.__post_bootstrap_exec()
def __do_bootstrap(self, cloud=None, credential=None):
@@ -66,7 +65,7 @@ class NewCloudController:
def __post_bootstrap_exec(self):
""" Executes post-bootstrap.sh if exists
"""
- info = model_info(juju.get_current_model())
+ info = model_info(app.current_model)
# Set our provider type environment var so that it is
# exposed in future processing tasks
app.env['JUJU_PROVIDERTYPE'] = info['provider-type']
@@ -106,9 +105,6 @@ class NewCloudController:
'bootstrap processing phase: {}.'.format(result)))
utils.pollinate(app.session_id, 'J002')
app.ui.set_footer('')
- app.log.debug("Switching to controller: {}".format(
- app.current_controller))
- juju.switch_controller(app.current_controller)
controllers.use('deploy').render()
def finish(self, credentials=None, back=False):
diff --git a/conjureup/hooklib/writer.py b/conjureup/hooklib/writer.py
index f3e5445..db8e935 100644
--- a/conjureup/hooklib/writer.py
+++ b/conjureup/hooklib/writer.py
@@ -9,7 +9,7 @@ CACHEDIR = os.getenv('CONJURE_UP_CACHEDIR',
SPELL_NAME = os.getenv('CONJURE_UP_SPELL', '_unspecified_spell')
LOGFILE = os.path.join(CACHEDIR, '{spell}.log'.format(spell=SPELL_NAME))
-log = setup_logging("conjure-up/{}".format(SPELL_NAME), LOGFILE, True)
+log = setup_logging(SPELL_NAME, LOGFILE, True)
def success(msg):
diff --git a/conjureup/juju.py b/conjureup/juju.py
index 7fcebba..b28369c 100644
--- a/conjureup/juju.py
+++ b/conjureup/juju.py
@@ -29,7 +29,7 @@ this.USER_TAG = None
def requires_login(f):
def _decorator(*args, **kwargs):
if not this.IS_AUTHENTICATED:
- login()
+ login(force=True)
return f(*args, **kwargs)
return wraps(f)(_decorator)
@@ -92,15 +92,15 @@ def login(force=False):
if this.IS_AUTHENTICATED is True and not force:
return
- if not get_current_controller():
+ if app.current_controller is None:
raise Exception("Unable to determine current controller")
- if not get_current_model():
+ if app.current_model is None:
raise Exception("Tried to login with no current model set.")
- env = get_controller(get_current_controller())
- account = get_account(get_current_controller())
- uuid = get_model(get_current_model())['model-uuid']
+ env = get_controller(app.current_controller)
+ account = get_account(app.current_controller)
+ uuid = get_model(app.current_controller, app.current_model)['model-uuid']
server = env['api-endpoints'][0]
this.USER_TAG = "user-{}".format(account['user'].split("@")[0])
url = os.path.join('wss://', server, 'model', uuid, 'api')
@@ -279,52 +279,6 @@ def get_cloud(name):
raise LookupError("Unable to locate cloud: {}".format(name))
-def _do_switch(target):
- try:
- app.log.debug('calling juju switch {}'.format(target))
- run('juju-2.0 switch {}'.format(target),
- shell=True, check=True, stdout=DEVNULL, stderr=DEVNULL)
- except CalledProcessError as e:
- raise LookupError("Unable to switch: {}".format(e))
-
-
-def switch_model(model):
- """Switch
-
- Arguments:
- model: Model to select
-
- Returns: Raises exception if model is not a model in the current
- controller, or if we otherwise failed to switch models.
- """
-
- if model not in [m['name'] for m in get_models()['models']]:
- raise Exception("model '{}' not found in controller '{}'.".format(
- model, get_current_controller()))
- _do_switch(model)
-
-
-def switch_controller(controller):
- """ switch controllers
-
- Arguments:
- controller: controller to switch to
-
- Returns None.
- Raises exception if failed to switch.
- """
- assert controller is not None
-
- cinfo = get_controllers()
- prev_controller = cinfo.get('current-controller', None)
- if prev_controller == controller:
- return
- if controller not in cinfo.get('controllers', {}).keys():
- raise Exception("Could not find controller '{}'".format(controller))
- _do_switch(controller)
- login(True)
-
-
def deploy(bundle):
""" Juju deploy bundle
@@ -559,35 +513,17 @@ def get_accounts():
raise Exception("Unable to find accounts")
-def model_by_owner(user):
- """ List model associated with user
-
- Arguments:
- user: username to query
-
- Returns:
- Dictionary containing model information for user
- """
- models = get_models()
- for m in models:
- if m['owner'] == user:
- return m
- raise LookupError(
- "Unable to find user: {}".format(
- user
- ))
-
-
-def get_model(name):
+def get_model(controller, name):
""" List information for model
Arguments:
name: model name
+ controller: name of controller to work in
Returns:
Dictionary of model information
"""
- models = get_models()['models']
+ models = get_models(controller)['models']
for m in models:
if m['name'] == name:
return m
@@ -595,23 +531,29 @@ def get_model(name):
"Unable to find model: {}".format(name))
-def add_model(name):
+def add_model(name, controller):
""" Adds a model to current controller
+
+ Arguments:
+ controller: controller to add model in
"""
- sh = run('juju-2.0 add-model {}'.format(name),
+ sh = run('juju-2.0 add-model {} -c {}'.format(name, controller),
shell=True, stdout=DEVNULL, stderr=PIPE)
if sh.returncode > 0:
raise Exception(
"Unable to create model: {}".format(sh.stderr.decode('utf8')))
-def get_models():
+def get_models(controller):
""" List available models
+ Arguments:
+ controller: existing controller to get models for
+
Returns:
List of known models
"""
- sh = run('juju-2.0 list-models --format yaml',
+ sh = run('juju-2.0 list-models --format yaml -c {}'.format(controller),
shell=True, stdout=PIPE, stderr=PIPE)
if sh.returncode > 0:
raise LookupError(
diff --git a/conjureup/log.py b/conjureup/log.py
index 974c02f..5384e92 100644
--- a/conjureup/log.py
+++ b/conjureup/log.py
@@ -4,25 +4,6 @@ import stat
from logging.handlers import SysLogHandler, TimedRotatingFileHandler
-class _log:
-
- def __init__(self, app, logger):
- self.app = app
- self.logger = logger
-
- def debug(self, msg):
- self.logger.debug("{}: {}".format(self.app, msg))
-
- def error(self, msg):
- self.logger.error("{}: {}".format(self.app, msg))
-
- def info(self, msg):
- self.logger.info("{}: {}".format(self.app, msg))
-
- def exception(self, msg):
- self.logger.exception("{}: {}".format(self.app, msg))
-
-
def setup_logging(app, logfile, debug=False):
cmdslog = TimedRotatingFileHandler(logfile,
when='D',
@@ -49,4 +30,4 @@ def setup_logging(app, logfile, debug=False):
syslog_h.set_name(app)
logger.addHandler(syslog_h)
- return _log(app, logger)
+ return logger
diff --git a/conjureup/ui/views/applicationconfigure.py b/conjureup/ui/views/applicationconfigure.py
index 3f3f3d1..9fa5b7c 100644
--- a/conjureup/ui/views/applicationconfigure.py
+++ b/conjureup/ui/views/applicationconfigure.py
@@ -3,15 +3,13 @@
"""
import logging
-from functools import partial
-from urwid import Columns, Filler, Frame, Pile, Text, WidgetWrap
+from urwid import Filler, Pile, Text, WidgetWrap
from conjureup import utils
from conjureup.ui.widgets.option_widget import OptionWidget
-from ubuntui.ev import EventLoop
-from ubuntui.utils import Color, Padding
-from ubuntui.widgets.buttons import PlainButton, menu_btn
+from ubuntui.utils import Padding
+from ubuntui.widgets.buttons import PlainButton
from ubuntui.widgets.hr import HR
log = logging.getLogger('conjure')
@@ -25,77 +23,12 @@ class ApplicationConfigureView(WidgetWrap):
self.options_copy = self.application.options.copy()
self.metadata_controller = metadata_controller
self.widgets = self.build_widgets()
- self.description_w = Text("")
- self.showing_all = False
- self.buttons_selected = False
- self.frame = Frame(body=self.build_widgets(),
- footer=self.build_footer())
- super().__init__(self.frame)
-
- self.metadata_controller.get_readme(
- self.application.csid.as_seriesname(),
- partial(self._handle_readme_load))
-
- def _handle_readme_load(self, readme_f):
- EventLoop.loop.event_loop._loop.call_soon_threadsafe(
- partial(self._update_readme_on_main_thread,
- readme_f.result()))
-
- def _update_readme_on_main_thread(self, readme):
- rt = self._trim_readme(readme)
- self.description_w.set_text(rt)
-
- def _trim_readme(self, readme):
- rls = readme.splitlines()
- rls = [l for l in rls if not l.startswith("#")]
- nrls = []
- for i in range(len(rls)):
- if i + 1 == len(rls):
- break
- if len(rls[i]) > 0:
- if rls[i][0] in ['-', '#', '=']:
- continue
- if len(rls[i + 1]) > 0:
- if rls[i + 1][0] in ['-', '=']:
- continue
- nrls.append(rls[i])
-
- if len(nrls) == 0:
- return
-
- if nrls[0] == '':
- nrls = nrls[1:]
- # split after two paragraphs:
- if '' in nrls:
- firstparidx = nrls.index('')
- else:
- firstparidx = 1
- try:
- splitidx = nrls.index('', firstparidx + 1)
- except:
- splitidx = firstparidx
- nrls = nrls[:splitidx]
- return "\n".join(nrls)
+ super().__init__(self.widgets)
+ self.pile.focus_position = 1
def selectable(self):
return True
- def keypress(self, size, key):
- # handle keypress first, then get new focus widget
- rv = super().keypress(size, key)
- if key in ['tab', 'shift tab']:
- self._swap_focus()
- return rv
-
- def _swap_focus(self):
- if not self.buttons_selected:
- self.buttons_selected = True
- self.frame.focus_position = 'footer'
- self.buttons.focus_position = 3
- else:
- self.buttons_selected = False
- self.frame.focus_position = 'body'
-
def build_widgets(self):
ws = [Text("Configure {}".format(
self.application.service_name))]
@@ -105,47 +38,14 @@ class ApplicationConfigureView(WidgetWrap):
current_value=self.application.num_units,
value_changed_callback=self.handle_scale)
ws.append(num_unit_ow)
- ws += self.get_whitelisted_option_widgets()
- self.toggle_show_all_button_index = len(ws) + 1
- self.toggle_show_all_button = PlainButton(
- "Show Advanced Configuration",
- self.do_toggle_show_all_config)
- ws += [HR(),
- Columns([('weight', 1, Text(" ")),
- (36, Color.button_secondary(
- self.toggle_show_all_button))])]
+ ws += self.get_option_widgets()
+ ws += [HR(), PlainButton("Cancel", self.do_cancel),
+ PlainButton("Accept Changes", self.do_commit)]
self.pile = Pile(ws)
return Padding.center_90(Filler(self.pile, valign="top"))
- def build_footer(self):
- cancel = menu_btn(on_press=self.do_cancel,
- label="\n BACK\n")
- confirm = menu_btn(on_press=self.do_commit,
- label="\n APPLY CHANGES\n")
- self.buttons = Columns([
- ('fixed', 2, Text("")),
- ('fixed', 13, Color.menu_button(
- cancel,
- focus_map='button_primary focus')),
- Text(""),
- ('fixed', 20, Color.menu_button(
- confirm,
- focus_map='button_primary focus')),
- ('fixed', 2, Text(""))
- ])
-
- footer = Pile([
- HR(top=0),
- Padding.center_90(self.description_w),
- Padding.line_break(""),
- Color.frame_footer(Pile([
- Padding.line_break(""),
- self.buttons]))
- ])
-
- return footer
-
- def get_whitelisted_option_widgets(self):
+ def get_option_widgets(self):
+ ws = []
service_id = self.application.csid.as_str_without_rev()
options = self.metadata_controller.get_options(service_id)
@@ -153,21 +53,7 @@ class ApplicationConfigureView(WidgetWrap):
self.application.service_name)
hidden = [n for n in options.keys() if n not in svc_opts_whitelist]
log.info("Hiding options not in the whitelist: {}".format(hidden))
-
- return self._get_option_widgets(svc_opts_whitelist, options)
-
- def get_non_whitelisted_option_widgets(self):
- service_id = self.application.csid.as_str_without_rev()
- options = self.metadata_controller.get_options(service_id)
-
- svc_opts_whitelist = utils.get_options_whitelist(
- self.application.service_name)
- hidden = [n for n in options.keys() if n not in svc_opts_whitelist]
- return self._get_option_widgets(hidden, options)
-
- def _get_option_widgets(self, opnames, options):
- ws = []
- for opname in opnames:
+ for opname in svc_opts_whitelist:
opdict = options[opname]
cv = self.application.options.get(opname, None)
ow = OptionWidget(opname,
@@ -179,24 +65,6 @@ class ApplicationConfigureView(WidgetWrap):
ws.append(ow)
return ws
- def do_toggle_show_all_config(self, sender):
- if not self.showing_all:
- new_ows = self.get_non_whitelisted_option_widgets()
- header = Text("Advanced Configuration Options")
- opts = self.pile.options()
- self.pile.contents.append((header, opts))
- for ow in new_ows:
- self.pile.contents.append((ow, opts))
- self.toggle_show_all_button.set_label(
- "Hide Advanced Configuration")
- self.showing_all = True
- else:
- i = self.toggle_show_all_button_index
- self.pile.contents = self.pile.contents[:i + 1]
- self.toggle_show_all_button.set_label(
- "Show Advanced Configuration")
- self.showing_all = False
-
def handle_edit(self, opname, value):
self.options_copy[opname] = value
| make model explicit when running juju-cli commands or API calls
utilize `juju -m [controller:]model` when executing juju cli commands. Currently we switch to the model we want and then run juju. This has the effect of killing any other existing conjure-up that may be running in seperate model.
As for the API we always switch to the known current model and login to the api, we need to login to the API without switching models. | conjure-up/conjure-up | diff --git a/test/test_controllers_clouds_gui.py b/test/test_controllers_clouds_gui.py
index 9f60952..0b96672 100644
--- a/test/test_controllers_clouds_gui.py
+++ b/test/test_controllers_clouds_gui.py
@@ -78,6 +78,10 @@ class CloudsGUIFinishTestCase(unittest.TestCase):
'conjureup.controllers.clouds.gui.juju.get_controller_in_cloud')
self.mock_gcc = self.gcc_patcher.start()
+ self.petname_patcher = patch(
+ 'conjureup.controllers.clouds.gui.petname')
+ self.mock_petname = self.petname_patcher.start()
+
def tearDown(self):
self.controllers_patcher.stop()
self.utils_patcher.stop()
@@ -85,13 +89,15 @@ class CloudsGUIFinishTestCase(unittest.TestCase):
self.app_patcher.stop()
self.juju_patcher.stop()
self.gcc_patcher.stop()
+ self.mock_petname.stop()
def test_finish_w_controller(self):
"clouds.finish with an existing controller"
self.mock_gcc.return_value = 'testcontroller'
+ self.mock_petname.Name.return_value = 'moo'
self.controller.finish('testcloud')
- self.mock_juju.assert_has_calls([
- call.switch_controller('testcontroller')])
+ self.mock_juju.add_model.assert_called_once_with('moo',
+ 'testcontroller')
def test_finish_no_controller(self):
"clouds.finish without existing controller"
diff --git a/test/test_controllers_clouds_tui.py b/test/test_controllers_clouds_tui.py
index 30d5ca9..42e1299 100644
--- a/test/test_controllers_clouds_tui.py
+++ b/test/test_controllers_clouds_tui.py
@@ -6,7 +6,7 @@
import unittest
-from unittest.mock import MagicMock, call, patch
+from unittest.mock import ANY, MagicMock, call, patch
from conjureup.controllers.clouds.tui import CloudsController
@@ -94,7 +94,7 @@ class CloudsTUIFinishTestCase(unittest.TestCase):
self.mock_gcc.return_value = 'testcontroller'
self.controller.finish()
self.mock_juju.assert_has_calls([
- call.switch_controller('testcontroller')])
+ call.add_model(ANY, 'testcontroller')])
def test_finish_no_controller(self):
"clouds.finish without existing controller"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 11
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bson==0.5.10
certifi==2021.5.30
charset-normalizer==2.0.12
configobj==5.0.8
-e git+https://github.com/conjure-up/conjure-up.git@8b2de20b459ab069b1b29db6e31f3e22fcbe6c9f#egg=conjure_up
distlib==0.3.9
filelock==3.4.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
petname==2.6
platformdirs==2.4.0
pluggy==1.0.0
prettytable==2.5.0
progressbar2==3.55.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
python-utils==3.5.2
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
urwid==2.1.2
virtualenv==20.17.1
wcwidth==0.2.13
ws4py==0.3.4
zipp==3.6.0
| name: conjure-up
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bson==0.5.10
- charset-normalizer==2.0.12
- configobj==5.0.8
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- petname==2.6
- platformdirs==2.4.0
- pluggy==1.0.0
- prettytable==2.5.0
- progressbar2==3.55.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- python-utils==3.5.2
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- urwid==2.1.2
- virtualenv==20.17.1
- wcwidth==0.2.13
- ws4py==0.3.4
- zipp==3.6.0
prefix: /opt/conda/envs/conjure-up
| [
"test/test_controllers_clouds_gui.py::CloudsGUIFinishTestCase::test_finish_w_controller",
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_w_controller"
]
| []
| [
"test/test_controllers_clouds_gui.py::CloudsGUIRenderTestCase::test_render",
"test/test_controllers_clouds_gui.py::CloudsGUIFinishTestCase::test_finish_no_controller",
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render",
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render_unknown",
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_no_controller"
]
| []
| MIT License | 777 | [
"conjureup/juju.py",
"conjureup/controllers/clouds/tui.py",
"conjureup/ui/views/applicationconfigure.py",
"conjureup/controllers/deploy/gui.py",
"conjureup/app.py",
"conjureup/hooklib/writer.py",
"conjureup/controllers/deploy/tui.py",
"conjureup/controllers/newcloud/gui.py",
"conjureup/log.py",
"conjureup/controllers/controllerpicker/gui.py",
"conjureup/controllers/clouds/gui.py"
]
| [
"conjureup/juju.py",
"conjureup/controllers/clouds/tui.py",
"conjureup/ui/views/applicationconfigure.py",
"conjureup/controllers/deploy/gui.py",
"conjureup/app.py",
"conjureup/hooklib/writer.py",
"conjureup/controllers/deploy/tui.py",
"conjureup/controllers/newcloud/gui.py",
"conjureup/log.py",
"conjureup/controllers/controllerpicker/gui.py",
"conjureup/controllers/clouds/gui.py"
]
|
|
alecthomas__voluptuous-215 | 987fd98f88b7bfd5c34f4923056b62097db75deb | 2016-09-24 11:28:27 | 99fb7bd43b2cbd8e53cb9e50765c93339b2f7843 | tusharmakkar08: Hey @tuukkamustonen ,
Travis build is failing. Please update the PR.
Thanks
coveralls:
[](https://coveralls.io/builds/8039012)
Coverage decreased (-3.1%) to 91.593% when pulling **5928c7c122b43764eac4874f63805ab955f225b4 on tuukkamustonen:empty-dict-handling** into **987fd98f88b7bfd5c34f4923056b62097db75deb on alecthomas:master**.
coveralls:
[](https://coveralls.io/builds/8039043)
Coverage increased (+0.1%) to 94.801% when pulling **dcb757f523211d01f6898e7d43602fa52c9dadbe on tuukkamustonen:empty-dict-handling** into **987fd98f88b7bfd5c34f4923056b62097db75deb on alecthomas:master**.
tuukkamustonen: @tusharmakkar08 Fixed, but I set `+IGNORE_EXCEPTION_DETAIL` doctest flag in `setup.cfg`.
Note that this is backwards-incompatible change: `Schema({}, extra=ALLOW_EXTRA)` does not work anymore, one should use ´Schema(dict)` instead.
I see there's no `CHANGELOG` in this project, have you thought about adding one?
tusharmakkar08: Hey @tuukkamustonen
It is preferred not to set `+IGNORE_EXCEPTION_DETAIL` since the content of exception is also important for `voluptuous`. We even have specialised modules like `humanize.py` to make errors more reader-friendly. I would suggest to use `try-catch` instead of setting the flag.
Currently, there's no `CHANGELOG` in this project. It would be great if you can add it via this PR itself.
Thanks.
tuukkamustonen: @tusharmakkar08 Alright, sounds reasonable. I've been a bit busy but I'll update PR tomorrow! | diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index d978679..0000000
--- a/.coveragerc
+++ /dev/null
@@ -1,2 +0,0 @@
-[run]
-omit = *tests*
diff --git a/.travis.yml b/.travis.yml
index 725c58d..14ed583 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,11 +9,6 @@ python:
- "3.5"
- "pypy"
# command to install dependencies
-install:
- - pip install coveralls
- # Need to do this since coverage is broken in travis https://github.com/travis-ci/travis-ci/issues/4866
- - pip install 'coverage<4'
+#install: "pip install -r requirements.txt --use-mirrors"
# command to run tests
-script: nosetests --with-coverage --cover-package=voluptuous
-after_success:
- - coveralls
\ No newline at end of file
+script: nosetests
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..5112703
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,18 @@
+# Changelog
+
+## [Unreleased]
+
+**Changes**:
+
+- [#198](https://github.com/alecthomas/voluptuous/issues/198):
+ `{}` and `[]` now always evaluate as is, instead of as any dict or any list.
+ To specify a free-form list, use `list` instead of `[]`. To specify a
+ free-form dict, use `dict` instead of `Schema({}, extra=ALLOW_EXTRA)`.
+
+**New**:
+
+**Fixes**:
+
+## 0.9.3 (2016-08-03)
+
+Changelog not kept for 0.9.3 and earlier releases.
diff --git a/README.md b/README.md
index fc26f40..f85f838 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,6 @@
# Voluptuous is a Python data validation library
[](https://travis-ci.org/alecthomas/voluptuous) [](https://waffle.io/alecthomas/voluptuous)
-[](https://coveralls.io/github/alecthomas/voluptuous?branch=master)
Voluptuous, *despite* the name, is a Python data validation library. It
is primarily intended for validating data coming into Python as JSON,
@@ -28,6 +27,10 @@ To file a bug, create a [new issue](https://github.com/alecthomas/voluptuous/iss
The documentation is provided [here] (http://alecthomas.github.io/voluptuous/).
+## Changelog
+
+See [CHANGELOG.md](CHANGELOG.md).
+
## Show me an example
Twitter's [user search API](https://dev.twitter.com/docs/api/1/get/users/search) accepts
@@ -225,10 +228,13 @@ contain anything, specify it as `list`:
```pycon
>>> schema = Schema([])
->>> schema([1]) # doctest: +IGNORE_EXCEPTION_DETAIL
-Traceback (most recent call last):
- ...
-MultipleInvalid: not a valid value
+>>> try:
+... schema([1])
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value"
+True
>>> schema([])
[]
>>> schema = Schema(list)
@@ -361,6 +367,28 @@ token `extra` as a key:
```
+However, an empty dict (`{}`) is treated as is. If you want to specify a list that can
+contain anything, specify it as `dict`:
+
+```pycon
+>>> schema = Schema({}, extra=ALLOW_EXTRA) # don't do this
+>>> try:
+... schema({'extra': 1})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value"
+True
+>>> schema({})
+{}
+>>> schema = Schema(dict) # do this instead
+>>> schema({})
+{}
+>>> schema({'extra': 1})
+{'extra': 1}
+
+```
+
#### Required dictionary keys
By default, keys in the schema are not required to be in the data:
diff --git a/voluptuous/schema_builder.py b/voluptuous/schema_builder.py
index 042a713..04fc86b 100644
--- a/voluptuous/schema_builder.py
+++ b/voluptuous/schema_builder.py
@@ -153,17 +153,6 @@ class Schema(object):
Nodes can be values, in which case a direct comparison is used, types,
in which case an isinstance() check is performed, or callables, which will
validate and optionally convert the value.
-
- We can equate schemas also.
-
- For Example:
-
- >>> v = Schema({Required('a'): unicode})
- >>> v1 = Schema({Required('a'): unicode})
- >>> v2 = Schema({Required('b'): unicode})
- >>> assert v == v1
- >>> assert v != v2
-
"""
_extra_to_name = {
@@ -192,15 +181,6 @@ class Schema(object):
self.extra = int(extra) # ensure the value is an integer
self._compiled = self._compile(schema)
- def __eq__(self, other):
- if str(other) == str(self.schema):
- # Because repr is combination mixture of object and schema
- return True
- return False
-
- def __str__(self):
- return str(self.schema)
-
def __repr__(self):
return "<Schema(%s, extra=%s, required=%s) object at 0x%x>" % (
self.schema, self._extra_to_name.get(self.extra, '??'),
@@ -221,7 +201,7 @@ class Schema(object):
return lambda _, v: v
if isinstance(schema, Object):
return self._compile_object(schema)
- if isinstance(schema, collections.Mapping):
+ if isinstance(schema, collections.Mapping) and len(schema):
return self._compile_dict(schema)
elif isinstance(schema, list) and len(schema):
return self._compile_list(schema)
@@ -386,7 +366,7 @@ class Schema(object):
A dictionary schema will only validate a dictionary:
- >>> validate = Schema({})
+ >>> validate = Schema({'prop': str})
>>> with raises(er.MultipleInvalid, 'expected a dictionary'):
... validate([])
@@ -401,7 +381,6 @@ class Schema(object):
>>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['two']"):
... validate({'two': 'three'})
-
Validation function, in this case the "int" type:
>>> validate = Schema({'one': 'two', 'three': 'four', int: str})
@@ -411,10 +390,17 @@ class Schema(object):
>>> validate({10: 'twenty'})
{10: 'twenty'}
+ An empty dictionary is matched as value:
+
+ >>> validate = Schema({})
+ >>> with raises(er.MultipleInvalid, 'not a valid value'):
+ ... validate([])
+
By default, a "type" in the schema (in this case "int") will be used
purely to validate that the corresponding value is of that type. It
will not Coerce the value:
+ >>> validate = Schema({'one': 'two', 'three': 'four', int: str})
>>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['10']"):
... validate({'10': 'twenty'})
@@ -1073,7 +1059,7 @@ def validate(*a, **kw):
Set restriction for returned value:
>>> @validate(arg=int, __return__=int)
- ... def bar(arg1):
+ ... def foo(arg1):
... return arg1 * 2
"""
| [] should evaluate to an empty list, not any list
```python
>>> from voluptuous import Schema
>>> Schema({})({})
{}
>>> Schema({})({'foo': 1})
Traceback (most recent call last):
...
voluptuous.error.MultipleInvalid: extra keys not allowed @ data['foo']
>>> Schema(int)(5)
5
>>> Schema(2)(5)
Traceback (most recent call last):
...
voluptuous.error.MultipleInvalid: not a valid value
>>> Schema([])([])
[]
>>> Schema([])([1]) # shouldn't this throw error?
[1]
```
Wouldn't it be logical to evaluate `list` for any list and `[]` for an exact value (an empty list)? | alecthomas/voluptuous | diff --git a/voluptuous/tests/tests.py b/voluptuous/tests/tests.py
index 922d62b..debd09a 100644
--- a/voluptuous/tests/tests.py
+++ b/voluptuous/tests/tests.py
@@ -423,6 +423,7 @@ def test_fix_157():
assert_raises(MultipleInvalid, s, ['four'])
+
def test_range_exlcudes_nan():
s = Schema(Range(min=0, max=10))
assert_raises(MultipleInvalid, s, float('nan'))
@@ -478,6 +479,40 @@ def test_empty_list_as_exact():
s([])
+def test_empty_dict_as_exact():
+ # {} always evaluates as {}
+ s = Schema({})
+ assert_raises(Invalid, s, {'extra': 1})
+ s = Schema({}, extra=ALLOW_EXTRA) # this should not be used
+ assert_raises(Invalid, s, {'extra': 1})
+
+ # {...} evaluates as Schema({...})
+ s = Schema({'foo': int})
+ assert_raises(Invalid, s, {'foo': 1, 'extra': 1})
+ s = Schema({'foo': int}, extra=ALLOW_EXTRA)
+ s({'foo': 1, 'extra': 1})
+
+ # dict matches {} or {...}
+ s = Schema(dict)
+ s({'extra': 1})
+ s({})
+ s = Schema(dict, extra=PREVENT_EXTRA)
+ s({'extra': 1})
+ s({})
+
+ # nested {} evaluate as {}
+ s = Schema({
+ 'inner': {}
+ }, extra=ALLOW_EXTRA)
+ assert_raises(Invalid, s, {'inner': {'extra': 1}})
+ s({})
+ s = Schema({
+ 'inner': Schema({}, extra=ALLOW_EXTRA)
+ })
+ assert_raises(Invalid, s, {'inner': {'extra': 1}})
+ s({})
+
+
def test_schema_decorator_match_with_args():
@validate(int)
def fn(arg):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/alecthomas/voluptuous.git@987fd98f88b7bfd5c34f4923056b62097db75deb#egg=voluptuous
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: voluptuous
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- nose==1.3.7
prefix: /opt/conda/envs/voluptuous
| [
"voluptuous/tests/tests.py::test_empty_dict_as_exact"
]
| []
| [
"voluptuous/tests/tests.py::test_exact_sequence",
"voluptuous/tests/tests.py::test_required",
"voluptuous/tests/tests.py::test_extra_with_required",
"voluptuous/tests/tests.py::test_iterate_candidates",
"voluptuous/tests/tests.py::test_in",
"voluptuous/tests/tests.py::test_not_in",
"voluptuous/tests/tests.py::test_remove",
"voluptuous/tests/tests.py::test_extra_empty_errors",
"voluptuous/tests/tests.py::test_literal",
"voluptuous/tests/tests.py::test_email_validation",
"voluptuous/tests/tests.py::test_email_validation_with_none",
"voluptuous/tests/tests.py::test_email_validation_with_empty_string",
"voluptuous/tests/tests.py::test_email_validation_without_host",
"voluptuous/tests/tests.py::test_fqdn_url_validation",
"voluptuous/tests/tests.py::test_fqdn_url_without_domain_name",
"voluptuous/tests/tests.py::test_fqdnurl_validation_with_none",
"voluptuous/tests/tests.py::test_fqdnurl_validation_with_empty_string",
"voluptuous/tests/tests.py::test_fqdnurl_validation_without_host",
"voluptuous/tests/tests.py::test_url_validation",
"voluptuous/tests/tests.py::test_url_validation_with_none",
"voluptuous/tests/tests.py::test_url_validation_with_empty_string",
"voluptuous/tests/tests.py::test_url_validation_without_host",
"voluptuous/tests/tests.py::test_copy_dict_undefined",
"voluptuous/tests/tests.py::test_sorting",
"voluptuous/tests/tests.py::test_schema_extend",
"voluptuous/tests/tests.py::test_schema_extend_overrides",
"voluptuous/tests/tests.py::test_repr",
"voluptuous/tests/tests.py::test_list_validation_messages",
"voluptuous/tests/tests.py::test_nested_multiple_validation_errors",
"voluptuous/tests/tests.py::test_humanize_error",
"voluptuous/tests/tests.py::test_fix_157",
"voluptuous/tests/tests.py::test_range_exlcudes_nan",
"voluptuous/tests/tests.py::test_equal",
"voluptuous/tests/tests.py::test_unordered",
"voluptuous/tests/tests.py::test_empty_list_as_exact",
"voluptuous/tests/tests.py::test_schema_decorator_match_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_match_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_match_return_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_return_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_match_return_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_return_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_return_only_match",
"voluptuous/tests/tests.py::test_schema_decorator_return_only_unmatch"
]
| []
| BSD 3-Clause "New" or "Revised" License | 778 | [
"voluptuous/schema_builder.py",
"CHANGELOG.md",
".travis.yml",
"README.md",
".coveragerc"
]
| [
"voluptuous/schema_builder.py",
"CHANGELOG.md",
".travis.yml",
"README.md",
".coveragerc"
]
|
OnroerendErfgoed__pyramid_urireferencer-12 | 067293d191dc9dd4f7c2554f71bf0c730786a872 | 2016-09-27 08:28:45 | 067293d191dc9dd4f7c2554f71bf0c730786a872 | diff --git a/CHANGES.rst b/CHANGES.rst
index 72b550b..195a2b5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -5,6 +5,7 @@
- Some minor doc fixes
- Changed an edge case where `items` or `applications` response attributes could
be `None` so that they are now always empty lists. (#6)
+- Updated error message and added JSON error message when a backend application can't be reached (#9) and when a resource can't be deleted (#10)
0.4.0 (2015-07-10)
------------------
diff --git a/pyramid_urireferencer/models.py b/pyramid_urireferencer/models.py
index 067dd57..8ce334b 100644
--- a/pyramid_urireferencer/models.py
+++ b/pyramid_urireferencer/models.py
@@ -94,7 +94,7 @@ class ApplicationResponse:
"success": self.success,
"has_references": self.has_references,
"count": self.count,
- "items": [item.to_json() for item in self.items]
+ "items": [item.to_json() for item in self.items] if self.items else []
}
diff --git a/pyramid_urireferencer/protected_resources.py b/pyramid_urireferencer/protected_resources.py
index db4556a..d54efaa 100644
--- a/pyramid_urireferencer/protected_resources.py
+++ b/pyramid_urireferencer/protected_resources.py
@@ -50,8 +50,8 @@ def protected_operation(fn):
app_response.count,
', '.join([i.uri for i in app_response.items]))
response_json["errors"].append(error_string)
- response.json_body = response_json
- response.content_type = 'application/json'
+ response.json_body = response_json
+ response.content_type = 'application/json'
return response
else:
raise HTTPConflict(
@@ -60,8 +60,26 @@ def protected_operation(fn):
', '.join([app_response.title for app_response in registery_response.applications
if app_response.has_references])))
elif not registery_response.success:
- raise HTTPInternalServerError(
- detail="Urireferencer: Something went wrong while retrieving references of the uri {0}".format(uri))
+ if parent_object.request.headers.get("Accept", None) == "application/json":
+ response = Response()
+ response.status_code = 500
+ response_json = {
+ "message": "Unable to verify the uri {0} is no longer being used.".format(uri),
+ "errors": [],
+ "registry_response": registery_response.to_json()
+ }
+ for app_response in registery_response.applications:
+ if not app_response.success:
+ response_json["errors"].append(
+ "{}: Could not verify the uri is no longer being used.".format(app_response.uri))
+ response.json_body = response_json
+ response.content_type = 'application/json'
+ return response
+ else:
+ raise HTTPInternalServerError(
+ detail="Urireferencer: Unable to verify the uri {0} is no longer being used. Could not verify with {1}".
+ format(uri, ', '.join([app_response.uri for app_response
+ in registery_response.applications if not app_response.success])))
return fn(parent_object, *args, **kw)
return advice
| Error message when a backend application can't be reached
When a service is not reachable for the uriregistry.
AS IS:
HTTPInternalServerError: Urireferencer: Something went wrong while retrieving references of the uri https://id.erfgoed.net/besluiten/1
TO BE:
HTTP 503 Service Unavailable: Urireferencer: Something went wrong while retrieving references of the uri https://id.erfgoed.net/test/1. Application A not reachable.
| OnroerendErfgoed/pyramid_urireferencer | diff --git a/tests/test_protected_resources.py b/tests/test_protected_resources.py
index 1989235..81972ee 100644
--- a/tests/test_protected_resources.py
+++ b/tests/test_protected_resources.py
@@ -29,6 +29,18 @@ def get_app(nr):
return a
+def get_app_500():
+ return ApplicationResponse(
+ title='App',
+ uri="https://dev-app.onroerenderfgoed.be/",
+ service_url="https://dev-app.onroerenderfgoed.be/references",
+ success=False,
+ has_references=None,
+ count=None,
+ items=None
+ )
+
+
class DummyParent(object):
def __init__(self):
self.request = testing.DummyRequest()
@@ -96,7 +108,25 @@ class ProtectedTests(unittest.TestCase):
def test_protected_operation_500(self, is_referenced_mock):
dummy = DummyParent()
is_referenced_mock.return_value = RegistryResponse('https://id.erfgoed.net/resources/1', False, None, None,
- None)
+ [get_app_500()])
self.assertRaises(HTTPInternalServerError, dummy.protected_dummy)
is_referenced_call = is_referenced_mock.mock_calls[0]
self.assertEqual('https://id.erfgoed.net/resources/1', is_referenced_call[1][0])
+
+ @patch('pyramid_urireferencer.protected_resources.pyramid_urireferencer.Referencer.is_referenced')
+ def test_protected_operation_500_json(self, is_referenced_mock):
+ dummy = DummyParent()
+ dummy.request.headers = {"Accept": "application/json"}
+ is_referenced_mock.return_value = RegistryResponse('https://id.erfgoed.net/resources/1', False, None, None,
+ [get_app_500()])
+ res = dummy.protected_dummy()
+ self.assertEqual(500, res.status_code)
+ self.assertEqual(res.json_body["message"],
+ "Unable to verify the uri https://id.erfgoed.net/resources/1 is no longer being used.")
+ self.assertListEqual(res.json_body["errors"],
+ ["https://dev-app.onroerenderfgoed.be/: Could not verify the uri is no longer being used."])
+ self.assertEqual("application/json", res.content_type)
+
+ is_referenced_call = is_referenced_mock.mock_calls[0]
+ self.assertEqual('https://id.erfgoed.net/resources/1', is_referenced_call[1][0])
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"webtest",
"httpretty",
"coveralls",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "py.test --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
beautifulsoup4==4.12.3
certifi==2021.5.30
coverage==6.2
coveralls==3.3.1
docopt==0.6.2
httpretty==1.1.4
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==5.2.0
packaging==21.3
PasteDeploy==2.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pyramid==1.6.1
-e git+https://github.com/OnroerendErfgoed/pyramid_urireferencer.git@067293d191dc9dd4f7c2554f71bf0c730786a872#egg=pyramid_urireferencer
pytest==7.0.1
pytest-cov==4.0.0
repoze.lru==0.7
requests==2.9.1
soupsieve==2.3.2.post1
tomli==1.2.3
translationstring==1.4
typing_extensions==4.1.1
venusian==3.0.0
waitress==2.0.0
WebOb==1.8.9
WebTest==3.0.0
zipp==3.6.0
zope.deprecation==4.4.0
zope.interface==5.5.2
| name: pyramid_urireferencer
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- beautifulsoup4==4.12.3
- coverage==6.2
- coveralls==3.3.1
- docopt==0.6.2
- httpretty==1.1.4
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==5.2.0
- packaging==21.3
- pastedeploy==2.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pyramid==1.6.1
- pytest==7.0.1
- pytest-cov==4.0.0
- repoze-lru==0.7
- requests==2.9.1
- soupsieve==2.3.2.post1
- tomli==1.2.3
- translationstring==1.4
- typing-extensions==4.1.1
- venusian==3.0.0
- waitress==2.0.0
- webob==1.8.9
- webtest==3.0.0
- zipp==3.6.0
- zope-deprecation==4.4.0
- zope-interface==5.5.2
prefix: /opt/conda/envs/pyramid_urireferencer
| [
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation_500_json"
]
| []
| [
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation",
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation_409",
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation_409_2",
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation_409_json",
"tests/test_protected_resources.py::ProtectedTests::test_protected_operation_500"
]
| []
| MIT License | 779 | [
"pyramid_urireferencer/protected_resources.py",
"pyramid_urireferencer/models.py",
"CHANGES.rst"
]
| [
"pyramid_urireferencer/protected_resources.py",
"pyramid_urireferencer/models.py",
"CHANGES.rst"
]
|
|
morepath__dectate-40 | 704615da9f6bf57d57e4f42d3a710475de777b08 | 2016-09-27 13:37:39 | 704615da9f6bf57d57e4f42d3a710475de777b08 | diff --git a/CHANGES.txt b/CHANGES.txt
index 37a51e8..75646cf 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -4,6 +4,38 @@ CHANGES
0.12 (unreleased)
=================
+- **Breaking changes**: previously you defined new directives using the
+ ``App.directive`` directive. This would lead to import confusion: you
+ *have* to import the modules that define directives before you can actually
+ use them, even though you've already imported your app class.
+
+ In this version of Dectate we've changed the way you define directives.
+ Instead of::
+
+ class MyApp(dectate.App):
+ pass
+
+ @MyApp.directive('foo')
+ class FooAction(dectate.Action):
+ ...
+
+ You now write this::
+
+ class FooAction(directive.Action)
+ ...
+
+ class MyApp(dectate.App):
+ foo = directive(FooAction)
+
+ So, you define the directives directly on the app class that needs
+ them.
+
+ Uses of ``private_action_class`` should be replaced by an underscored
+ directive definition::
+
+ class MyApp(dectate.App):
+ _my_private_thing = directive(PrivateAction)
+
- Use the same Git ignore file used in other Morepath projects.
- If you set the ``app_class_arg`` class attribute to ``True`` on an
diff --git a/dectate/__init__.py b/dectate/__init__.py
index 153be1a..1ffc9b2 100644
--- a/dectate/__init__.py
+++ b/dectate/__init__.py
@@ -1,5 +1,5 @@
# flake8: noqa
-from .app import App
+from .app import App, directive
from .config import commit, Action, Composite, CodeInfo, NOT_FOUND
from .error import (ConfigError, DirectiveError, TopologicalSortError,
DirectiveReportError, ConflictError, QueryError)
diff --git a/dectate/app.py b/dectate/app.py
index de93c83..ceace6e 100644
--- a/dectate/app.py
+++ b/dectate/app.py
@@ -1,4 +1,3 @@
-import logging
import sys
from .config import Configurable, Directive, commit, create_code_info
from .compat import with_metaclass
@@ -65,51 +64,14 @@ class App(with_metaclass(AppMeta)):
"""
@classmethod
- def directive(cls, name):
- """Decorator to register a new directive with this application class.
-
- You use this as a class decorator for a
- :class:`dectate.Action` or a :class:`dectate.Composite`
- subclass::
-
- @MyApp.directive('my_directive')
- class FooAction(dectate.Action):
- ...
-
- This needs to be executed *before* the directive is used and
- thus might introduce import dependency issues unlike normal
- Dectate configuration, so beware! An easy way to make sure
- that all directives are installed before you use them is to
- make sure you define them in the same module as where you
- define the :class:`App` subclass that has them.
-
- :param name: the name of the directive to register.
- :return: a directive that when called installs the directive
- method on the class.
- """
- return DirectiveDirective(cls, name)
-
- @classmethod
- def private_action_class(cls, action_class):
- """Register a private action class.
-
- In some cases action classes can be an implementation detail,
- for instance in the implementation of a Composite action.
-
- In this case you don't want the action class to be known
- but not have a directive.
-
- This function may be used as a decorator like this::
-
- @App.private_action_class
- class MyActionClass(dectate.Action):
- ...
-
- :param action_class: the :class:`dectate.Action` subclass to register.
- :return: the :class`dectate.Action` class that was registered.
- """
- cls.dectate.register_action_class(action_class)
- return action_class
+ def get_directive_methods(cls):
+ for name in dir(cls):
+ attr = getattr(cls, name)
+ im_func = getattr(attr, '__func__', None)
+ if im_func is None:
+ continue
+ if hasattr(im_func, 'action_factory'):
+ yield name, attr
@classmethod
def commit(cls):
@@ -146,42 +108,36 @@ class App(with_metaclass(AppMeta)):
pass
-class DirectiveDirective(object):
- """Implementation of the ``directive`` directive.
+def directive(action_factory):
+ """Create a classmethod to hook action to application class.
- :param cls: the class that this directive is registered on.
- :param name: the name of the directive.
- """
- def __init__(self, cls, name):
- self.cls = cls
- self.name = name
+ You pass in a :class:`dectate.Action` or a
+ :class:`dectate.Composite` subclass and can attach the result as a
+ class method to an :class:`dectate.App` subclass::
- def __call__(self, action_factory):
- """Register the directive with app class.
+ class FooAction(dectate.Action):
+ ...
- Creates a class method on the app class for the directive.
+ class MyApp(dectate.App):
+ my_directive = dectate.directive(MyAction)
- :param action_factory: the :class:`dectate.Action` or
- :class:`dectate.Composite` subclass to register.
- :return: the action or composite subclass that was registered.
- """
- directive_name = self.name
-
- def method(cls, *args, **kw):
- frame = sys._getframe(1)
- code_info = create_code_info(frame)
- logger = logging.getLogger('%s.%s' %
- (cls.logger_name, directive_name))
- return Directive(cls, action_factory, args, kw,
- code_info, directive_name, logger)
- method.action_factory = action_factory # to help sphinxext
- setattr(self.cls, self.name, classmethod(method))
- method.__name__ = self.name
- # As of Python 3.5, the repr of bound methods uses __qualname__ instead
- # of __name__. See http://bugs.python.org/issue21389#msg217566
- if hasattr(method, '__qualname__'):
- method.__qualname__ = type(self.cls).__name__ + '.' + self.name
- method.__doc__ = action_factory.__doc__
- method.__module__ = action_factory.__module__
- self.cls.dectate.register_action_class(action_factory)
- return action_factory
+ Alternatively you can also define the direction inline using
+ this as a decorator::
+
+ class MyApp(dectate.App):
+ @directive
+ class my_directive(dectate.Action):
+ ...
+
+ :param action_factory: an action class to use as the directive.
+ :return: a class method that represents the directive.
+ """
+ def method(cls, *args, **kw):
+ frame = sys._getframe(1)
+ code_info = create_code_info(frame)
+ return Directive(action_factory, code_info, cls, args, kw)
+ # sphinxext and App.get_action_classes need to recognize this
+ method.action_factory = action_factory
+ method.__doc__ = action_factory.__doc__
+ method.__module__ = action_factory.__module__
+ return classmethod(method)
diff --git a/dectate/config.py b/dectate/config.py
index 084e601..f37b0ee 100644
--- a/dectate/config.py
+++ b/dectate/config.py
@@ -1,4 +1,5 @@
import abc
+import logging
import sys
import inspect
from .error import (
@@ -47,22 +48,12 @@ class Configurable(object):
self.extends = extends
self.config = config
# all action classes known
- self._action_classes = set()
+ self._action_classes = {}
# directives used with configurable
self._directives = []
# have we ever been committed
self.committed = False
- def register_action_class(self, action_class):
- """Register an action class with this configurable.
-
- Called during import time when the :meth:`App.directive` directive
- is executed.
-
- :param action_class: the :class:`dectate.Action` subclass to register.
- """
- self._action_classes.add(action_class)
-
def register_directive(self, directive, obj):
"""Register a directive with this configurable.
@@ -75,6 +66,38 @@ class Configurable(object):
"""
self._directives.append((directive, obj))
+ def _fixup_directive_names(self):
+ """Set up correct name for directives.
+ """
+ app_class = self.app_class
+ for name, method in app_class.get_directive_methods():
+ func = method.__func__
+ func.__name__ = name
+ # As of Python 3.5, the repr of bound methods uses
+ # __qualname__ instead of __name__.
+ # See http://bugs.python.org/issue21389#msg217566
+ if hasattr(func, '__qualname__'):
+ func.__qualname__ = type(app_class).__name__ + '.' + name
+
+ def get_action_classes(self):
+ """Get all action classes registered for this app.
+
+ This includes action classes registered for its base class.
+
+ :return: a dict with action class keys and name values.
+ """
+ result = {}
+ app_class = self.app_class
+ for name, method in app_class.get_directive_methods():
+ result[method.__func__.action_factory] = name
+
+ # add any action classes defined by base classes
+ for configurable in self.extends:
+ for action_class, name in configurable._action_classes.items():
+ if action_class not in result:
+ result[action_class] = name
+ return result
+
def setup(self):
"""Set up config object and action groups.
@@ -82,42 +105,14 @@ class Configurable(object):
Takes inheritance of apps into account.
"""
- # add any action classes defined by base classes
- s = self._action_classes
- for configurable in self.extends:
- for action_class in configurable._action_classes:
- if action_class not in s:
- s.add(action_class)
-
- # we want to have use group_class for each true Action class
- action_classes = set()
- for action_class in s:
- if not issubclass(action_class, Action):
- continue
- group_class = action_class.group_class
- if group_class is None:
- group_class = action_class
- else:
- if group_class.group_class is not None:
- raise ConfigError(
- "Cannot use group_class on another action class "
- "that uses group_class: %r" % action_class)
- if 'config' in action_class.__dict__:
- raise ConfigError(
- "Cannot use config class attribute when you use "
- "group_class: %r" % action_class)
- if 'before' in action_class.__dict__:
- raise ConfigError(
- "Cannot define before method when you use "
- "group_class: %r" % action_class)
- if 'after' in action_class.__dict__:
- raise ConfigError(
- "Cannot define after method when you use "
- "group_class: %r" % action_class)
- action_classes.add(group_class)
+ self._fixup_directive_names()
+ self._action_classes = self.get_action_classes()
+
+ grouped_action_classes = sort_action_classes(
+ group_action_classes(self._action_classes.keys()))
# delete any old configuration in case we run this a second time
- for action_class in sort_action_classes(action_classes):
+ for action_class in grouped_action_classes:
self.delete_config(action_class)
# now we create ActionGroup objects for each action class group
@@ -125,7 +120,7 @@ class Configurable(object):
# and we track what config factories we've seen for consistency
# checking
self._factories_seen = {}
- for action_class in sort_action_classes(action_classes):
+ for action_class in grouped_action_classes:
self.setup_config(action_class)
d[action_class] = ActionGroup(action_class,
self.action_extends(action_class))
@@ -678,28 +673,23 @@ class Directive(object):
When used as a decorator this tracks where in the source code
the directive was used for the purposes of error reporting.
"""
- def __init__(self, app_class, action_factory, args, kw,
- code_info, directive_name, logger):
+ def __init__(self, action_factory, code_info, app_class, args, kw):
"""
+ :param action_factory: function that constructs an action instance.
+ :code_info: a :class:`CodeInfo` instance describing where this
+ directive was invoked.
:param app_class: the :class:`dectate.App` subclass that this
directive is used on.
- :param action_factory: function that constructs an action instance.
:args: the positional arguments passed into the directive.
:kw: the keyword arguments passed into the directive.
- :code_info: a :class:`CodeInfo` instance describing where this
- directive was invoked.
- :directive_name: the name of this directive.
- :logger: the logger object to use.
"""
+ self.action_factory = action_factory
+ self.code_info = code_info
self.app_class = app_class
self.configurable = app_class.dectate
- self.action_factory = action_factory
self.args = args
self.kw = kw
- self.code_info = code_info
- self.directive_name = directive_name
self.argument_info = (args, kw)
- self.logger = logger
def action(self):
"""Get the :class:`Action` instance represented by this directive.
@@ -739,8 +729,10 @@ class Directive(object):
:obj: the function or class object to that this directive is used
on.
"""
- if self.logger is None:
- return
+ directive_name = configurable._action_classes[self.action_factory]
+ logger = logging.getLogger('%s.%s' % (
+ configurable.app_class.logger_name,
+ directive_name))
target_dotted_name = dotted_name(configurable.app_class)
is_same = self.app_class is configurable.app_class
@@ -761,13 +753,13 @@ class Directive(object):
sorted(kw.items())])
message = '@%s.%s(%s) on %s' % (
- target_dotted_name, self.directive_name, arguments,
+ target_dotted_name, directive_name, arguments,
func_dotted_name)
if not is_same:
message += ' (from %s)' % dotted_name(self.app_class)
- self.logger.debug(message)
+ logger.debug(message)
class DirectiveAbbreviation(object):
@@ -787,13 +779,11 @@ class DirectiveAbbreviation(object):
combined_kw = directive.kw.copy()
combined_kw.update(kw)
return Directive(
- app_class=directive.app_class,
action_factory=directive.action_factory,
- args=combined_args,
- kw=combined_kw,
code_info=code_info,
- directive_name=directive.directive_name,
- logger=directive.logger)
+ app_class=directive.app_class,
+ args=combined_args,
+ kw=combined_kw)
def commit(*apps):
@@ -840,6 +830,41 @@ def sort_action_classes(action_classes):
return topological_sort(action_classes, lambda c: c.depends)
+def group_action_classes(action_classes):
+ """Group action classes by ``group_class``.
+
+ :param action_classes: iterable of action classes
+ :return: set of action classes grouped together.
+ """
+ # we want to have use group_class for each true Action class
+ result = set()
+ for action_class in action_classes:
+ if not issubclass(action_class, Action):
+ continue
+ group_class = action_class.group_class
+ if group_class is None:
+ group_class = action_class
+ else:
+ if group_class.group_class is not None:
+ raise ConfigError(
+ "Cannot use group_class on another action class "
+ "that uses group_class: %r" % action_class)
+ if 'config' in action_class.__dict__:
+ raise ConfigError(
+ "Cannot use config class attribute when you use "
+ "group_class: %r" % action_class)
+ if 'before' in action_class.__dict__:
+ raise ConfigError(
+ "Cannot define before method when you use "
+ "group_class: %r" % action_class)
+ if 'after' in action_class.__dict__:
+ raise ConfigError(
+ "Cannot define after method when you use "
+ "group_class: %r" % action_class)
+ result.add(group_class)
+ return result
+
+
def expand_actions(actions):
"""Expand any :class:`Composite` instances into :class:`Action` instances.
diff --git a/doc/api.rst b/doc/api.rst
index 510c7bc..ffcb4c4 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -20,6 +20,8 @@ API
:inherited-members:
:members:
+.. autofunction:: directive
+
.. autofunction:: query_tool
.. autofunction:: query_app
diff --git a/doc/usage.rst b/doc/usage.rst
index 29ecb71..e1773ca 100644
--- a/doc/usage.rst
+++ b/doc/usage.rst
@@ -100,29 +100,15 @@ Here are some features of Dectate:
provides the infrastructure to easily construct command-line tools
for querying configuration.
-App classes
------------
+Actions
+-------
-Configuration in Dectate is associated with special *classes* which
-derive from :class:`dectate.App`:
+In Dectate, the simple `plugins` example above looks like this:
.. testcode::
import dectate
- class MyApp(dectate.App):
- pass
-
-Creating a directive
---------------------
-
-We can now use the :meth:`dectate.App.directive` decorator to declare
-a *directive* which executes a special configuration action. Let's
-replicate the simple `plugins` example above using Dectate:
-
-.. testcode::
-
- @MyApp.directive('plugin')
class PluginAction(dectate.Action):
config = {
'plugins': dict
@@ -136,38 +122,53 @@ replicate the simple `plugins` example above using Dectate:
def perform(self, obj, plugins):
plugins[self.name] = obj
+We have formulated a configuration action that affects a ``plugins``
+dict.
+
+App classes
+-----------
+
+Configuration in Dectate is associated with special *classes* which
+derive from :class:`dectate.App`. We also associate the action with
+it as a directive:
+
+.. testcode::
+
+ class PluginApp(dectate.App):
+ plugin = dectate.directive(PluginAction)
+
Let's use it now:
.. testcode::
- @MyApp.plugin('a')
+ @PluginApp.plugin('a')
def f():
pass # do something interesting
- @MyApp.plugin('b')
+ @PluginApp.plugin('b')
def g():
pass # something else interesting
-We have registered the function ``f`` on ``MyApp``. The ``name``
+We have registered the function ``f`` on ``PluginApp``. The ``name``
argument is ``'a'``. We've registered ``g`` under ``'b'``.
-We can now commit the configuration for ``MyApp``:
+We can now commit the configuration for ``PluginApp``:
.. testcode::
- dectate.commit(MyApp)
+ dectate.commit(PluginApp)
Once the commit has successfully completed, we can take a look at the
configuration:
.. doctest::
- >>> sorted(MyApp.config.plugins.items())
+ >>> sorted(PluginApp.config.plugins.items())
[('a', <function f at ...>), ('b', <function g at ...>)]
What are the changes between this and the simple plugins example?
-The main difference is that ``plugin`` decorator is associated with a
+The main difference is that the ``plugin`` decorator is associated with a
class and so is the resulting configuration, which gets stored as the
``plugins`` attribute of :attr:`dectate.App.config`. The other
difference is that we provide an ``identifier`` method in the action
@@ -177,20 +178,20 @@ definition. These differences support configuration *reuse*,
Reuse
~~~~~
-You can reuse configuration by simply subclassing ``MyApp``:
+You can reuse configuration by simply subclassing ``PluginApp``:
.. testcode::
- class SubApp(MyApp):
+ class SubApp(PluginApp):
pass
We commit both classes:
.. testcode::
- dectate.commit(MyApp, SubApp)
+ dectate.commit(PluginApp, SubApp)
-``SubClass`` now contains all the configuration declared for ``MyApp``:
+``SubClass`` now contains all the configuration declared for ``PluginApp``:
>>> sorted(SubApp.config.plugins.items())
[('a', <function f at ...>), ('b', <function g at ...>)]
@@ -205,7 +206,7 @@ Consider this example:
.. testcode::
- class ConflictingApp(MyApp):
+ class ConflictingApp(PluginApp):
pass
@ConflictingApp.plugin('foo')
@@ -256,7 +257,7 @@ additional configuration actions:
def h():
pass # do something interesting
- dectate.commit(MyApp, SubApp)
+ dectate.commit(PluginApp, SubApp)
``SubApp`` now has the additional plugin ``c``:
@@ -265,11 +266,11 @@ additional configuration actions:
>>> sorted(SubApp.config.plugins.items())
[('a', <function f at ...>), ('b', <function g at ...>), ('c', <function h at ...>)]
-But ``MyApp`` is unaffected:
+But ``PluginApp`` is unaffected:
.. doctest::
- >>> sorted(MyApp.config.plugins.items())
+ >>> sorted(PluginApp.config.plugins.items())
[('a', <function f at ...>), ('b', <function g at ...>)]
Overrides
@@ -284,7 +285,7 @@ this in ``SubApp`` by simply reusing the same ``name``:
def x():
pass
- dectate.commit(MyApp, SubApp)
+ dectate.commit(PluginApp, SubApp)
In ``SubApp`` we now have changed the configuration for ``a`` to
register the function ``x`` instead of ``f``. If we had done this for
@@ -296,16 +297,16 @@ lets you override configuration instead:
>>> sorted(SubApp.config.plugins.items())
[('a', <function x at ...>), ('b', <function g at ...>), ('c', <function h at ...>)]
-But ``MyApp`` still uses ``f``:
+But ``PluginApp`` still uses ``f``:
- >>> sorted(MyApp.config.plugins.items())
+ >>> sorted(PluginApp.config.plugins.items())
[('a', <function f at ...>), ('b', <function g at ...>)]
Isolation
~~~~~~~~~
We have already seen in the inheritance and override examples that
-``MyApp`` is isolated from configuration extension and overrides done
+``PluginApp`` is isolated from configuration extension and overrides done
for ``SubApp``. We can in fact entirely isolate configuration from
each other.
@@ -314,10 +315,6 @@ from everything before:
.. testcode::
- class BaseApp(dectate.App):
- pass
-
- @BaseApp.directive('plugin')
class PluginAction2(dectate.Action):
config = {
'plugins': dict
@@ -331,6 +328,9 @@ from everything before:
def perform(self, obj, plugins):
plugins[self.name] = obj
+ class BaseApp(dectate.App):
+ plugin = dectate.directive(PluginAction2)
+
We don't set up any configuration for ``BaseApp``; it's intended to be
part of our framework. Now we create two subclasses:
@@ -369,12 +369,11 @@ This won't affect ``TwoApp`` in any way:
``OneApp`` and ``TwoApp`` are isolated, so configurations are
independent, and cannot conflict or override.
-The Anatomy of a Directive
---------------------------
+The Anatomy of an Action
+------------------------
-Let's consider the directive registration again in detail::
+Let's consider the plugin action in detail::
- @MyApp.directive('plugin')
class PluginAction(dectate.Action):
config = {
'plugins': dict
@@ -390,11 +389,8 @@ Let's consider the directive registration again in detail::
What is going on here?
-* We create a new directive called ``plugin`` on ``MyApp``. It also
- exists for its subclasses.
-
-* The directive is implemented with a custom class called
- ``PluginAction`` that inherits from :class:`dectate.Action`.
+* We implement a custom class called ``PluginAction`` that inherits
+ from :class:`dectate.Action`.
* ``config`` (:attr:`dectate.Action.config`) specifies that this
directive has a configuration effect on ``plugins``. We declare that
@@ -421,14 +417,20 @@ What is going on here?
this case we store ``obj`` under the key ``self.name`` in the
``plugins`` dict.
+We then associate the action with a class as a directive::
+
+ class PluginApp(dectate.App):
+ plugin = dectate.directive(PluginAction)
+
Once we have declared the directive for our framework we can tell
programmers to use it.
Directives have absolutely no effect until *commit* is called, which
we do with ``dectate.commit``. This performs the actions and we can
-then find the result ``MyApp.config`` (:attr:`dectate.App.config`).
+then find the result ``PluginApp.config``
+(:attr:`dectate.App.config`).
-The results are in ``MyApp.config.plugins`` as we set this up with
+The results are in ``PluginApp.config.plugins`` as we set this up with
``config`` in our ``PluginAction``.
Depends
@@ -440,15 +442,11 @@ of directive depends on the former. You can make sure this happens by
using the ``depends`` (:attr:`dectate.Action.depends`) class
attribute.
-First we set up a ``foo`` directive that registers into a ``foos``
+First we set up a ``FooAction`` that registers into a ``foos``
dict:
.. testcode::
- class DependsApp(dectate.App):
- pass
-
- @DependsApp.directive('foo')
class FooAction(dectate.Action):
config = {
'foos': dict
@@ -462,13 +460,12 @@ dict:
def perform(self, obj, foos):
foos[self.name] = obj
-Now we create a ``bar`` directive that depends on ``FooDirective`` and
-uses information in the ``foos`` dict:
+Now we create a ``BarAction`` directive that depends on ``FooAction``
+and uses information in the ``foos`` dict:
.. testcode::
- @DependsApp.directive('bar')
- class BarAction(dectate.Action):
+ class BarAction(dectate.Action):
depends = [FooAction]
config = {
@@ -485,8 +482,19 @@ uses information in the ``foos`` dict:
in_foo = self.name in foos
bars.append((self.name, obj, in_foo))
-We have now ensured that ``BarAction`` actions are performed after
-``FooAction`` action, no matter what order we use them:
+In order to use them we need to hook up the actions as directives
+onto an app class:
+
+.. testcode::
+
+ class DependsApp(dectate.App):
+ foo = dectate.directive(FooAction)
+ bar = dectate.directive(BarAction)
+
+
+Using ``depends`` we have ensured that ``BarAction`` actions are
+performed after ``FooAction`` action, no matter what order we use
+them:
.. testcode::
@@ -515,11 +523,12 @@ We expect ``in_foo`` to be ``True`` for ``a`` but to be ``False`` for
config dependencies
-------------------
-In the example above, the items in ``bars`` depend on the items in ``foos``
-and we've implemented this dependency in the ``perform`` of ``BarDirective``.
+In the example above, the items in ``bars`` depend on the items in
+``foos`` and we've implemented this dependency in the ``perform`` of
+``BarAction``.
-We can instead make the configuration object for the ``BarDirective``
-depend on ``foos``. This way ``BarDirective`` does not need to know
+We can instead make the configuration object for the ``BarAction``
+depend on ``foos``. This way ``BarAction`` does not need to know
about ``foos``. You can declare a dependency between config objects
with the ``factory_arguments`` attribute of the config factory. Any
config object that is created in earlier dependencies of this action,
@@ -527,15 +536,11 @@ or in the action itself, can be listed in ``factory_arguments``. The
key and value in ``factory_arguments`` have to match the key and value
in ``config`` of that earlier action.
-First we create an app with a ``FooAction`` that sets up a ``foos``
-config item as before:
+First we create a ``FooAction`` that sets up a ``foos`` config item as
+before:
.. testcode::
- class ConfigDependsApp(dectate.App):
- pass
-
- @ConfigDependsApp.directive('foo')
class FooAction(dectate.Action):
config = {
'foos': dict
@@ -572,7 +577,6 @@ We create a ``BarAction`` that depends on the ``FooAction`` (so that
.. testcode::
- @ConfigDependsApp.directive('bar')
class BarAction(dectate.Action):
depends = [FooAction]
@@ -589,6 +593,15 @@ We create a ``BarAction`` that depends on the ``FooAction`` (so that
def perform(self, obj, bar):
bar.add(self.name, obj)
+
+And we set them up as directives:
+
+.. testcode::
+
+ class ConfigDependsApp(dectate.App):
+ foo = dectate.directive(FooAction)
+ bar = dectate.directive(BarAction)
+
When we use our directives:
.. testcode::
@@ -625,7 +638,6 @@ so on by setting the special ``app_class_arg`` class attribute:
.. testcode::
- @MyApp.directive('plugin_with_app_class')
class PluginAction(dectate.Action):
config = {
'plugins': dict
@@ -642,6 +654,9 @@ so on by setting the special ``app_class_arg`` class attribute:
plugins[self.name] = obj
app_class.touched = True
+ class MyApp(dectate.App):
+ plugin_with_app_class = dectate.directive(PluginAction)
+
When we now perform this directive:
.. testcode::
@@ -672,10 +687,6 @@ using ``before`` (:meth:`dectate.Action.before`) and ``after``
.. testcode::
- class BeforeAfterApp(dectate.App):
- pass
-
- @BeforeAfterApp.directive('foo')
class FooAction(dectate.Action):
config = {
'foos': list
@@ -697,6 +708,9 @@ using ``before`` (:meth:`dectate.Action.before`) and ``after``
def perform(self, obj, foos):
foos.append((self.name, obj))
+ class BeforeAfterApp(dectate.App):
+ foo = dectate.directive(FooAction)
+
@BeforeAfterApp.foo('a')
def f():
pass
@@ -725,10 +739,6 @@ share their ``config`` and their ``before`` and ``after`` methods.
.. testcode::
- class GroupApp(dectate.App):
- pass
-
- @GroupApp.directive('foo')
class FooAction(dectate.Action):
config = {
'foos': list
@@ -742,49 +752,49 @@ share their ``config`` and their ``before`` and ``after`` methods.
def perform(self, obj, foos):
foos.append((self.name, obj))
-We now create a ``BarDirective`` that groups with ``FooAction``:
+We now create a ``BarAction`` that groups with ``FooAction``:
.. testcode::
- @GroupApp.directive('bar')
class BarAction(dectate.Action):
- group_class = FooAction
+ group_class = FooAction
- def __init__(self, name):
- self.name = name
+ def __init__(self, name):
+ self.name = name
- def identifier(self, foos):
- return self.name
+ def identifier(self, foos):
+ return self.name
- def perform(self, obj, foos):
- foos.append((self.name, obj))
+ def perform(self, obj, foos):
+ foos.append((self.name, obj))
+
+ class GroupApp(dectate.App):
+ foo = dectate.directive(FooAction)
+ bar = dectate.directive(BarAction)
It reuses the ``config`` from ``FooAction``. This means that ``foo``
and ``bar`` can be in conflict:
.. testcode::
- class GroupConflictApp(GroupApp):
- pass
-
- @GroupConflictApp.foo('a')
+ @GroupApp.foo('a')
def f():
pass
- @GroupConflictApp.bar('a')
+ @GroupApp.bar('a')
def g():
pass
.. doctest::
- >>> dectate.commit(GroupConflictApp)
+ >>> dectate.commit(GroupApp)
Traceback (most recent call last):
...
ConflictError: Conflict between:
File "...", line 4
- @GroupConflictApp.foo('a')
+ @GroupApp.foo('a')
File "...", line 8
- @GroupConflictApp.bar('a')
+ @GroupApp.bar('a')
Additional discriminators
-------------------------
@@ -795,10 +805,6 @@ all at once. You can take care of this with the ``discriminators``
.. testcode::
- class DiscriminatorsApp(dectate.App):
- pass
-
- @DiscriminatorsApp.directive('foo')
class FooAction(dectate.Action):
config = {
'foos': dict
@@ -816,6 +822,10 @@ all at once. You can take care of this with the ``discriminators``
def perform(self, obj, foos):
foos[self.name] = obj
+
+ class DiscriminatorsApp(dectate.App):
+ foo = dectate.directive(FooAction)
+
An action now conflicts with an action of the same name *and* with
any action that is in the ``extra`` list:
@@ -849,15 +859,11 @@ Composite actions
When you can define an action entirely in terms of other actions, you
can subclass :class:`dectate.Composite`.
-First we define a normal ``sub`` directive to use in the composite action
+First we define a normal ``SubAction`` to use in the composite action
later:
.. testcode::
- class CompositeApp(dectate.App):
- pass
-
- @CompositeApp.directive('sub')
class SubAction(dectate.Action):
config = {
'my': list
@@ -878,7 +884,6 @@ uses ``SubAction`` in an ``actions``
.. testcode::
- @CompositeApp.directive('composite')
class CompositeAction(dectate.Composite):
def __init__(self, names):
self.names = names
@@ -886,6 +891,14 @@ uses ``SubAction`` in an ``actions``
def actions(self, obj):
return [(SubAction(name), obj) for name in self.names]
+ class CompositeApp(dectate.App):
+ _sub = dectate.directive(SubAction)
+ composite = dectate.directive(CompositeAction)
+
+Note that even though ``_sub`` is not intended to be a public part of
+the API we still need to include it in our :class:`dectate.App`
+subclass, as Dectate does need to know it exists.
+
We can now use it:
.. testcode::
@@ -911,11 +924,7 @@ use the ``with`` statement to do so with less repetition:
.. testcode::
- class WithApp(dectate.App):
- pass
-
- @WithApp.directive('foo')
- class SubAction(dectate.Action):
+ class FooAction(dectate.Action):
config = {
'my': list
}
@@ -930,6 +939,10 @@ use the ``with`` statement to do so with less repetition:
def perform(self, obj, my):
my.append((self.a, self.b, obj))
+
+ class WithApp(dectate.App):
+ foo = dectate.directive(FooAction)
+
Instead of this:
.. testcode::
@@ -1020,7 +1033,7 @@ querying
Dectate keeps a database of committed actions that can be queried by
using :class:`dectate.Query`.
-Here is an example of a query for all the plugin actions on ``MyApp``:
+Here is an example of a query for all the plugin actions on ``PluginApp``:
.. testcode::
@@ -1031,7 +1044,7 @@ We can now run the query:
.. doctest::
:options: +NORMALIZE_WHITESPACE
- >>> list(q(MyApp))
+ >>> list(q(PluginApp))
[(<PluginAction ...>, <function f ...>),
(<PluginAction ...>, <function g ...>)]
@@ -1039,7 +1052,7 @@ We can also filter the query for attributes of the action:
.. doctest::
- >>> list(q.filter(name='a')(MyApp))
+ >>> list(q.filter(name='a')(PluginApp))
[(<PluginAction object ...>, <function f ...>)]
Sometimes the attribute on the action is not the same as the name you
| directive directive confusing
When you use the ``directive`` directive, you need to make sure that the module it is in is imported before you scan any other code. This can be surprising, as you might expect it works like any other Dectate directive, where import order is immaterial.
Perhaps this implies the ``directive`` directive shouldn't be a directive at all. Directives are really like methods. Perhap we should install them like methods on the ``App`` class, like this:
```
class App(dectate.App):
foo = dectate.directive(FooAction)
```
If we make this the way to install directives, we would have no more import issues, as it's impossible to use a new directive without importing its app class first.
What do you think, @href, @taschini?
| morepath/dectate | diff --git a/dectate/tests/fixtures/anapp.py b/dectate/tests/fixtures/anapp.py
index 5d71b05..30566cc 100644
--- a/dectate/tests/fixtures/anapp.py
+++ b/dectate/tests/fixtures/anapp.py
@@ -1,9 +1,22 @@
import dectate
+class FooAction(dectate.Action):
+ def __init__(self, name):
+ self.name = name
+
+ def identifier(self):
+ return self.name
+
+ def perform(self, obj):
+ pass
+
+
class AnApp(dectate.App):
known = "definitely not a directive"
+ foo = dectate.directive(FooAction)
+
def other():
pass
@@ -11,15 +24,3 @@ def other():
class OtherClass(object):
pass
-
-
[email protected]('foo')
-class FooAction(dectate.Action):
- def __init__(self, name):
- self.name = name
-
- def identifier(self):
- return self.name
-
- def perform(self, obj):
- pass
diff --git a/dectate/tests/test_directive.py b/dectate/tests/test_directive.py
index e538179..9004d2e 100644
--- a/dectate/tests/test_directive.py
+++ b/dectate/tests/test_directive.py
@@ -1,4 +1,4 @@
-from dectate.app import App
+from dectate.app import App, directive
from dectate.config import commit, Action, Composite
from dectate.error import ConflictError, ConfigError
@@ -6,10 +6,6 @@ import pytest
def test_simple():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -24,6 +20,9 @@ def test_simple():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -33,11 +32,34 @@ def test_simple():
assert MyApp.config.my == [('hello', f)]
-def test_commit_method():
+def test_decorator():
+
class MyApp(App):
+ @directive
+ class foo(Action):
+ config = {
+ 'my': list
+ }
+
+ def __init__(self, message):
+ self.message = message
+
+ def identifier(self, my):
+ return self.message
+
+ def perform(self, obj, my):
+ my.append((self.message, obj))
+
+ @MyApp.foo('hello')
+ def f():
pass
- @MyApp.directive('foo')
+ commit(MyApp)
+
+ assert MyApp.config.my == [('hello', f)]
+
+
+def test_commit_method():
class MyDirective(Action):
config = {
'my': list
@@ -52,6 +74,9 @@ def test_commit_method():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -63,10 +88,6 @@ def test_commit_method():
def test_conflict_same_directive():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -81,6 +102,9 @@ def test_conflict_same_directive():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -97,13 +121,6 @@ def test_app_inherit():
class Registry(object):
pass
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': Registry
@@ -119,6 +136,12 @@ def test_app_inherit():
my.message = self.message
my.obj = obj
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ pass
+
@MyApp.foo('hello')
def f():
pass
@@ -135,13 +158,6 @@ def test_app_override():
class Registry(object):
pass
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': Registry
@@ -157,6 +173,12 @@ def test_app_override():
my.message = self.message
my.obj = obj
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ pass
+
@MyApp.foo('hello')
def f():
pass
@@ -174,10 +196,6 @@ def test_app_override():
def test_different_group_no_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -192,7 +210,6 @@ def test_different_group_no_conflict():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
config = {
'bar': list
@@ -207,6 +224,10 @@ def test_different_group_no_conflict():
def perform(self, obj, bar):
bar.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -222,10 +243,6 @@ def test_different_group_no_conflict():
def test_same_group_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -240,7 +257,6 @@ def test_same_group_conflict():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
# should now conflict
group_class = FooDirective
@@ -254,6 +270,10 @@ def test_same_group_conflict():
def perform(self, obj, foo):
foo.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -267,10 +287,6 @@ def test_same_group_conflict():
def test_discriminator_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -289,6 +305,9 @@ def test_discriminator_conflict():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo('f', ['a'])
def f():
pass
@@ -302,10 +321,6 @@ def test_discriminator_conflict():
def test_discriminator_same_group_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -324,10 +339,13 @@ def test_discriminator_same_group_conflict():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(FooDirective):
group_class = FooDirective
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.foo('f', ['a'])
def f():
pass
@@ -341,10 +359,6 @@ def test_discriminator_same_group_conflict():
def test_discriminator_no_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -363,6 +377,9 @@ def test_discriminator_no_conflict():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo('f', ['a'])
def f():
pass
@@ -377,10 +394,6 @@ def test_discriminator_no_conflict():
def test_discriminator_different_group_no_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -399,11 +412,14 @@ def test_discriminator_different_group_no_conflict():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(FooDirective):
# will have its own group key so in a different group
depends = [FooDirective]
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.foo('f', ['a'])
def f():
pass
@@ -418,10 +434,6 @@ def test_discriminator_different_group_no_conflict():
def test_depends():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -436,7 +448,6 @@ def test_depends():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
depends = [FooDirective]
@@ -453,6 +464,10 @@ def test_depends():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.bar('a')
def g():
pass
@@ -468,10 +483,6 @@ def test_depends():
def test_composite():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -486,7 +497,6 @@ def test_composite():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -494,6 +504,10 @@ def test_composite():
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
+ class MyApp(App):
+ _sub = directive(SubDirective)
+ composite = directive(CompositeDirective)
+
@MyApp.composite(['a', 'b', 'c'])
def f():
pass
@@ -504,10 +518,6 @@ def test_composite():
def test_composite_change_object():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -525,7 +535,6 @@ def test_composite_change_object():
def other():
pass
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -534,6 +543,10 @@ def test_composite_change_object():
return [(SubDirective(message),
other) for message in self.messages]
+ class MyApp(App):
+ _sub = directive(SubDirective)
+ composite = directive(CompositeDirective)
+
@MyApp.composite(['a', 'b', 'c'])
def f():
pass
@@ -544,10 +557,6 @@ def test_composite_change_object():
def test_composite_private_sub():
- class MyApp(App):
- pass
-
- @MyApp.private_action_class
class SubDirective(Action):
config = {
'my': list
@@ -562,7 +571,6 @@ def test_composite_private_sub():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -570,6 +578,11 @@ def test_composite_private_sub():
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
+ class MyApp(App):
+ # mark sub as private by using the underscore
+ _sub = directive(SubDirective)
+ composite = directive(CompositeDirective)
+
@MyApp.composite(['a', 'b', 'c'])
def f():
pass
@@ -580,10 +593,6 @@ def test_composite_private_sub():
def test_composite_private_composite():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -598,7 +607,6 @@ def test_composite_private_composite():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.private_action_class
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -606,6 +614,10 @@ def test_composite_private_composite():
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
+ class MyApp(App):
+ sub = directive(SubDirective)
+ _composite = directive(CompositeDirective)
+
@MyApp.sub('a')
def f():
pass
@@ -616,10 +628,6 @@ def test_composite_private_composite():
def test_nested_composite():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -634,7 +642,6 @@ def test_nested_composite():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('subcomposite')
class SubCompositeDirective(Composite):
def __init__(self, message):
self.message = message
@@ -643,7 +650,6 @@ def test_nested_composite():
yield SubDirective(self.message + '_0'), obj
yield SubDirective(self.message + '_1'), obj
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -652,6 +658,11 @@ def test_nested_composite():
return [(SubCompositeDirective(message), obj)
for message in self.messages]
+ class MyApp(App):
+ sub = directive(SubDirective)
+ subcomposite = directive(SubCompositeDirective)
+ composite = directive(CompositeDirective)
+
@MyApp.composite(['a', 'b', 'c'])
def f():
pass
@@ -666,10 +677,6 @@ def test_nested_composite():
def test_with_statement_kw():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -688,6 +695,9 @@ def test_with_statement_kw():
class Dummy(object):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+
with MyApp.foo(model=Dummy) as foo:
@foo(name='a')
@@ -707,10 +717,6 @@ def test_with_statement_kw():
def test_with_statement_args():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -726,6 +732,9 @@ def test_with_statement_args():
def perform(self, obj, my):
my.append((self.model, self.name, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+
class Dummy(object):
pass
@@ -757,10 +766,6 @@ def test_before():
assert self.before
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -779,6 +784,9 @@ def test_before():
def before(my):
my.before = True
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo(name='hello')
def f():
pass
@@ -801,10 +809,6 @@ def test_before_without_use():
assert self.before
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -823,6 +827,9 @@ def test_before_without_use():
def before(my):
my.before = True
+ class MyApp(App):
+ foo = directive(FooDirective)
+
commit(MyApp)
assert MyApp.config.my.before
@@ -839,10 +846,6 @@ def test_before_group():
assert self.before
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -861,7 +864,6 @@ def test_before_group():
def before(my):
my.before = True
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
@@ -874,6 +876,10 @@ def test_before_group():
def perform(self, obj, my):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.bar(name='bye')
def f():
pass
@@ -891,10 +897,6 @@ def test_before_group():
def test_config_group():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -909,7 +911,6 @@ def test_config_group():
def perform(self, obj, my):
my.append((self.name, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
@@ -922,6 +923,10 @@ def test_config_group():
def perform(self, obj, my):
my.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.bar(name='bye')
def f():
pass
@@ -947,10 +952,6 @@ def test_before_group_without_use():
assert self.before
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -969,7 +970,6 @@ def test_before_group_without_use():
def before(my):
my.before = True
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
@@ -982,6 +982,10 @@ def test_before_group_without_use():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
commit(MyApp)
assert MyApp.config.my.before
@@ -998,10 +1002,6 @@ def test_after():
assert not self.after
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -1020,6 +1020,9 @@ def test_after():
def after(my):
my.after = True
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo(name='hello')
def f():
pass
@@ -1042,10 +1045,6 @@ def test_after_without_use():
assert not self.after
self.l.append((name, obj))
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': Registry
@@ -1064,6 +1063,9 @@ def test_after_without_use():
def after(my):
my.after = True
+ class MyApp(App):
+ foo = directive(FooDirective)
+
commit(MyApp)
assert MyApp.config.my.after
@@ -1071,10 +1073,6 @@ def test_after_without_use():
def test_action_loop_should_conflict():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1089,6 +1087,9 @@ def test_action_loop_should_conflict():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
for i in range(2):
@MyApp.foo('hello')
def f():
@@ -1099,12 +1100,8 @@ def test_action_loop_should_conflict():
def test_action_init_only_during_commit():
- class MyApp(App):
- pass
-
init_called = []
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1120,6 +1117,9 @@ def test_action_init_only_during_commit():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1132,10 +1132,6 @@ def test_action_init_only_during_commit():
def test_registry_should_exist_even_without_directive_use():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1150,19 +1146,15 @@ def test_registry_should_exist_even_without_directive_use():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
commit(MyApp)
assert MyApp.config.my == []
def test_registry_should_exist_even_without_directive_use_subclass():
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1177,6 +1169,12 @@ def test_registry_should_exist_even_without_directive_use_subclass():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ pass
+
commit(MyApp, SubApp)
assert MyApp.config.my == []
@@ -1184,10 +1182,6 @@ def test_registry_should_exist_even_without_directive_use_subclass():
def test_rerun_commit():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1202,6 +1196,9 @@ def test_rerun_commit():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1215,10 +1212,6 @@ def test_rerun_commit():
def test_rerun_commit_add_directive():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1233,6 +1226,9 @@ def test_rerun_commit_add_directive():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1250,13 +1246,6 @@ def test_rerun_commit_add_directive():
def test_order_subclass():
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1271,6 +1260,12 @@ def test_order_subclass():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ pass
+
@SubApp.foo('c')
def h():
pass
@@ -1289,9 +1284,6 @@ def test_order_subclass():
def test_registry_single_factory_argument():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': list
@@ -1300,7 +1292,6 @@ def test_registry_single_factory_argument():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list,
@@ -1316,6 +1307,9 @@ def test_registry_single_factory_argument():
def perform(self, obj, my, other):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1326,9 +1320,6 @@ def test_registry_single_factory_argument():
def test_registry_factory_argument_introduces_new_registry():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': list
@@ -1337,7 +1328,6 @@ def test_registry_factory_argument_introduces_new_registry():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other
@@ -1352,6 +1342,9 @@ def test_registry_factory_argument_introduces_new_registry():
def perform(self, obj, other):
other.my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1363,12 +1356,6 @@ def test_registry_factory_argument_introduces_new_registry():
def test_registry_factory_argument_introduces_new_registry_subclass():
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
class IsUsedElsewhere(object):
poked = False
@@ -1380,7 +1367,6 @@ def test_registry_factory_argument_introduces_new_registry_subclass():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other
@@ -1396,6 +1382,12 @@ def test_registry_factory_argument_introduces_new_registry_subclass():
assert not other.my.poked
other.my.poked = True
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ pass
+
@MyApp.foo('hello')
def f():
pass
@@ -1409,9 +1401,6 @@ def test_registry_factory_argument_introduces_new_registry_subclass():
def test_registry_multiple_factory_arguments():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': list,
@@ -1422,7 +1411,6 @@ def test_registry_multiple_factory_arguments():
self.my = my
self.my2 = my2
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list,
@@ -1440,6 +1428,9 @@ def test_registry_multiple_factory_arguments():
my.append((self.message, obj))
my2.append('blah')
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1451,9 +1442,6 @@ def test_registry_multiple_factory_arguments():
def test_registry_factory_arguments_depends():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': list
@@ -1462,7 +1450,6 @@ def test_registry_factory_arguments_depends():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'my': list
@@ -1477,7 +1464,6 @@ def test_registry_factory_arguments_depends():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
config = {
'other': Other
@@ -1494,6 +1480,10 @@ def test_registry_factory_arguments_depends():
def perform(self, obj, other):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -1504,9 +1494,6 @@ def test_registry_factory_arguments_depends():
def test_registry_factory_arguments_depends_complex():
- class MyApp(App):
- pass
-
class Registry(object):
pass
@@ -1518,22 +1505,24 @@ def test_registry_factory_arguments_depends_complex():
def __init__(self, registry):
self.registry = registry
- @MyApp.directive('setting')
class SettingAction(Action):
config = {'registry': Registry}
- @MyApp.directive('predicate')
class PredicateAction(Action):
config = {'predicate_registry': PredicateRegistry}
depends = [SettingAction]
- @MyApp.directive('view')
class ViewAction(Action):
config = {'registry': Registry}
depends = [PredicateAction]
+ class MyApp(App):
+ setting = directive(SettingAction)
+ predicate = directive(PredicateAction)
+ view = directive(ViewAction)
+
commit(MyApp)
assert MyApp.config.registry is MyApp.config.predicate_registry.registry
@@ -1551,11 +1540,7 @@ def test_is_committed():
def test_registry_config_inconsistent():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
- class MyDirective(Action):
+ class FooDirective(Action):
config = {
'my': list
}
@@ -1569,8 +1554,7 @@ def test_registry_config_inconsistent():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('bar')
- class MyDirective(Action): # flake8: noqa
+ class BarDirective(Action):
config = {
'my': dict
}
@@ -1584,14 +1568,15 @@ def test_registry_config_inconsistent():
def perform(self, obj, my):
my[self.message] = obj
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_registry_factory_argument_inconsistent():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': list
@@ -1608,7 +1593,6 @@ def test_registry_factory_argument_inconsistent():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other,
@@ -1624,14 +1608,14 @@ def test_registry_factory_argument_inconsistent():
def perform(self, obj, other, yetanother):
pass
+ class MyApp(App):
+ foo = directive(MyDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_registry_factory_argument_and_config_inconsistent():
- class MyApp(App):
- pass
-
class Other(object):
factory_arguments = {
'my': dict
@@ -1640,7 +1624,6 @@ def test_registry_factory_argument_and_config_inconsistent():
def __init__(self, my):
self.my = my
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list,
@@ -1656,33 +1639,37 @@ def test_registry_factory_argument_and_config_inconsistent():
def perform(self, obj, my, other):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
-# Due to PEP 3155, having this class defined at the module top-level ensures
-# that its repr is the same in both Python 2 and 3.
-class MyAppForRepr(App):
- pass
+# making this global to ensure the repr is the same
+# on Python 3.5 and earlier versions (see PEP 3155)
+class ReprDirective(Action):
+ """Doc"""
+ config = {
+ 'my': list
+ }
+ def __init__(self, message):
+ self.message = message
-def test_directive_repr():
+ def identifier(self, my):
+ return self.message
- @MyAppForRepr.directive('foo')
- class MyDirective(Action):
- """Doc"""
- config = {
- 'my': list
- }
+ def perform(self, obj, my):
+ my.append((self.message, obj))
- def __init__(self, message):
- self.message = message
- def identifier(self, my):
- return self.message
+class MyAppForRepr(App):
+ foo = directive(ReprDirective)
- def perform(self, obj, my):
- my.append((self.message, obj))
+
+def test_directive_repr():
+ MyAppForRepr.commit()
assert repr(MyAppForRepr.foo) == (
"<bound method AppMeta.foo of "
@@ -1690,13 +1677,6 @@ def test_directive_repr():
def test_app_class_passed_into_action():
- class MyApp(App):
- touched = []
-
- class SubApp(MyApp):
- touched = []
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -1714,6 +1694,14 @@ def test_app_class_passed_into_action():
app_class.touched.append(None)
my.append((self.message, obj))
+ class MyApp(App):
+ touched = []
+
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ touched = []
+
@MyApp.foo('hello')
def f():
pass
@@ -1732,11 +1720,7 @@ def test_app_class_passed_into_action():
assert SubApp.touched == [None]
-
def test_app_class_passed_into_factory():
- class MyApp(App):
- touched = False
-
class Other(object):
factory_arguments = {
'my': list
@@ -1751,7 +1735,6 @@ def test_app_class_passed_into_factory():
def touch(self):
self.app_class.touched = True
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other
@@ -1766,6 +1749,11 @@ def test_app_class_passed_into_factory():
def perform(self, obj, other):
other.touch()
+ class MyApp(App):
+ touched = False
+
+ foo = directive(MyDirective)
+
@MyApp.foo()
def f():
pass
@@ -1778,9 +1766,6 @@ def test_app_class_passed_into_factory():
def test_app_class_passed_into_factory_no_factory_arguments():
- class MyApp(App):
- touched = False
-
class Other(object):
app_class_arg = True
@@ -1790,7 +1775,6 @@ def test_app_class_passed_into_factory_no_factory_arguments():
def touch(self):
self.app_class.touched = True
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other
@@ -1805,6 +1789,11 @@ def test_app_class_passed_into_factory_no_factory_arguments():
def perform(self, obj, other):
other.touch()
+ class MyApp(App):
+ touched = False
+
+ foo = directive(MyDirective)
+
@MyApp.foo()
def f():
pass
@@ -1817,12 +1806,6 @@ def test_app_class_passed_into_factory_no_factory_arguments():
def test_app_class_passed_into_factory_separation():
- class MyApp(App):
- touched = False
-
- class SubApp(MyApp):
- touched = False
-
class Other(object):
factory_arguments = {
'my': list
@@ -1837,7 +1820,6 @@ def test_app_class_passed_into_factory_separation():
def touch(self):
self.app_class.touched = True
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'other': Other
@@ -1852,6 +1834,13 @@ def test_app_class_passed_into_factory_separation():
def perform(self, obj, other):
other.touch()
+ class MyApp(App):
+ touched = False
+ foo = directive(MyDirective)
+
+ class SubApp(MyApp):
+ touched = False
+
@MyApp.foo()
def f():
pass
@@ -1869,16 +1858,7 @@ def test_app_class_passed_into_factory_separation():
assert SubApp.touched
-
def test_app_class_cleanup():
- class MyApp(App):
- touched = []
-
- @classmethod
- def clean(cls):
- cls.touched = []
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
}
@@ -1894,6 +1874,15 @@ def test_app_class_cleanup():
def perform(self, obj, app_class):
app_class.touched.append(None)
+ class MyApp(App):
+ touched = []
+
+ @classmethod
+ def clean(cls):
+ cls.touched = []
+
+ foo = directive(MyDirective)
+
@MyApp.foo()
def f():
pass
diff --git a/dectate/tests/test_error.py b/dectate/tests/test_error.py
index b166fba..7b3ec15 100644
--- a/dectate/tests/test_error.py
+++ b/dectate/tests/test_error.py
@@ -1,4 +1,4 @@
-from dectate.app import App
+from dectate.app import App, directive
from dectate.config import commit, Action, Composite
from dectate.error import (ConflictError, ConfigError, DirectiveError,
@@ -9,10 +9,6 @@ import pytest
def test_directive_error_in_action():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
def __init__(self, name):
self.name = name
@@ -23,6 +19,9 @@ def test_directive_error_in_action():
def perform(self, obj):
raise DirectiveError("A real problem")
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -37,10 +36,6 @@ def test_directive_error_in_action():
def test_directive_error_in_composite():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Composite):
def __init__(self, name):
self.name = name
@@ -48,6 +43,9 @@ def test_directive_error_in_composite():
def actions(self, obj):
raise DirectiveError("Something went wrong")
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -62,10 +60,6 @@ def test_directive_error_in_composite():
def test_conflict_error():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
def __init__(self, name):
self.name = name
@@ -76,6 +70,9 @@ def test_conflict_error():
def perform(self, obj):
raise DirectiveError("A real problem")
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -95,10 +92,6 @@ def test_conflict_error():
def test_with_statement_error():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
def __init__(self, model, name):
self.model = model
@@ -110,6 +103,9 @@ def test_with_statement_error():
def perform(self, obj):
raise DirectiveError("A real problem")
+ class MyApp(App):
+ foo = directive(FooDirective)
+
class Dummy(object):
pass
@@ -133,10 +129,6 @@ def test_with_statement_error():
def test_composite_codeinfo_propagation():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -151,7 +143,6 @@ def test_composite_codeinfo_propagation():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
@@ -159,6 +150,10 @@ def test_composite_codeinfo_propagation():
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
+ class MyApp(App):
+ _sub = directive(SubDirective)
+ composite = directive(CompositeDirective)
+
@MyApp.composite(['a'])
def f():
pass
@@ -177,10 +172,6 @@ def test_composite_codeinfo_propagation():
def test_type_error_not_enough_arguments():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -195,6 +186,9 @@ def test_type_error_not_enough_arguments():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
# not enough arguments
@MyApp.foo()
def f():
@@ -208,10 +202,6 @@ def test_type_error_not_enough_arguments():
def test_type_error_too_many_arguments():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -226,7 +216,10 @@ def test_type_error_too_many_arguments():
def perform(self, obj, my):
my.append((self.message, obj))
- # not enough arguments
+ class MyApp(App):
+ foo = directive(MyDirective)
+
+ # too many arguments
@MyApp.foo('a', 'b')
def f():
pass
@@ -239,10 +232,6 @@ def test_type_error_too_many_arguments():
def test_cannot_group_class_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -257,29 +246,28 @@ def test_cannot_group_class_group_class():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
def __init__(self, message):
pass
- @MyApp.directive('qux')
class QuxDirective(Action):
group_class = BarDirective # should go to FooDirective instead
def __init__(self, message):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+ qux = directive(QuxDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_cannot_use_config_with_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -294,7 +282,6 @@ def test_cannot_use_config_with_group_class():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
config = {
'bar': list
@@ -305,15 +292,15 @@ def test_cannot_use_config_with_group_class():
def __init__(self, message):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_cann_inherit_config_with_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -328,21 +315,20 @@ def test_cann_inherit_config_with_group_class():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(FooDirective):
group_class = FooDirective
def __init__(self, message):
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
commit(MyApp)
def test_cannot_use_before_with_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -357,7 +343,6 @@ def test_cannot_use_before_with_group_class():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
@@ -365,15 +350,15 @@ def test_cannot_use_before_with_group_class():
def before():
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_can_inherit_before_with_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -392,18 +377,17 @@ def test_can_inherit_before_with_group_class():
def before(foo):
pass
- @MyApp.directive('bar')
class BarDirective(FooDirective):
group_class = FooDirective
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
commit(MyApp)
def test_cannot_use_after_with_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -418,7 +402,6 @@ def test_cannot_use_after_with_group_class():
def perform(self, obj, foo):
foo.append((self.message, obj))
- @MyApp.directive('bar')
class BarDirective(Action):
group_class = FooDirective
@@ -426,15 +409,15 @@ def test_cannot_use_after_with_group_class():
def after():
pass
+ class MyApp(App):
+ foo = directive(FooDirective)
+ bar = directive(BarDirective)
+
with pytest.raises(ConfigError):
commit(MyApp)
def test_action_without_init():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooDirective(Action):
config = {
'foo': list
@@ -446,6 +429,9 @@ def test_action_without_init():
def perform(self, obj, foo):
foo.append(obj)
+ class MyApp(App):
+ foo = directive(FooDirective)
+
@MyApp.foo()
def f():
pass
@@ -456,10 +442,6 @@ def test_action_without_init():
def test_composite_without_init():
- class MyApp(App):
- pass
-
- @MyApp.directive('sub')
class SubDirective(Action):
config = {
'my': list
@@ -474,11 +456,14 @@ def test_composite_without_init():
def perform(self, obj, my):
my.append((self.message, obj))
- @MyApp.directive('composite')
class CompositeDirective(Composite):
def actions(self, obj):
return [(SubDirective(message), obj) for message in ['a', 'b']]
+ class MyApp(App):
+ _sub = directive(SubDirective)
+ composite = directive(CompositeDirective)
+
commit(MyApp)
@MyApp.composite()
diff --git a/dectate/tests/test_logging.py b/dectate/tests/test_logging.py
index 9af8d21..c15a14d 100644
--- a/dectate/tests/test_logging.py
+++ b/dectate/tests/test_logging.py
@@ -1,5 +1,5 @@
import logging
-from dectate.app import App
+from dectate.app import App, directive
from dectate.config import Action, commit
@@ -35,10 +35,6 @@ def test_simple_config_logging():
log.addHandler(test_handler)
log.setLevel(logging.DEBUG)
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -53,6 +49,9 @@ def test_simple_config_logging():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
@@ -76,10 +75,6 @@ def test_subclass_config_logging():
log.addHandler(test_handler)
log.setLevel(logging.DEBUG)
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -94,6 +89,9 @@ def test_subclass_config_logging():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ foo = directive(MyDirective)
+
class SubApp(MyApp):
pass
@@ -127,10 +125,6 @@ def test_override_logger_name():
log.addHandler(test_handler)
log.setLevel(logging.DEBUG)
- class MyApp(App):
- logger_name = 'morepath.directive'
-
- @MyApp.directive('foo')
class MyDirective(Action):
config = {
'my': list
@@ -145,6 +139,11 @@ def test_override_logger_name():
def perform(self, obj, my):
my.append((self.message, obj))
+ class MyApp(App):
+ logger_name = 'morepath.directive'
+
+ foo = directive(MyDirective)
+
@MyApp.foo('hello')
def f():
pass
diff --git a/dectate/tests/test_query.py b/dectate/tests/test_query.py
index dd6b185..6e97bdd 100644
--- a/dectate/tests/test_query.py
+++ b/dectate/tests/test_query.py
@@ -1,14 +1,10 @@
import pytest
from dectate import (
- Query, App, Action, Composite, commit, QueryError, NOT_FOUND)
+ Query, App, Action, Composite, directive, commit, QueryError, NOT_FOUND)
def test_query():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -23,6 +19,9 @@ def test_query():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -42,10 +41,6 @@ def test_query():
def test_query_directive_name():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -60,6 +55,9 @@ def test_query_directive_name():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -79,10 +77,6 @@ def test_query_directive_name():
def test_multi_action_query():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -97,7 +91,6 @@ def test_multi_action_query():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('bar')
class BarAction(Action):
config = {
'registry': list
@@ -112,6 +105,10 @@ def test_multi_action_query():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+ bar = directive(BarAction)
+
@MyApp.foo('a')
def f():
pass
@@ -131,10 +128,6 @@ def test_multi_action_query():
def test_filter():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -149,6 +142,9 @@ def test_filter():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -167,10 +163,6 @@ def test_filter():
def test_filter_multiple_fields():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -190,6 +182,9 @@ def test_filter_multiple_fields():
def perform(self, obj, registry):
registry.append((self.model, self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
class Alpha(object):
pass
@@ -223,10 +218,6 @@ def test_filter_multiple_fields():
def test_filter_not_found():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -241,6 +232,9 @@ def test_filter_not_found():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -257,10 +251,6 @@ def test_filter_not_found():
def test_filter_different_attribute_name():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -279,6 +269,9 @@ def test_filter_different_attribute_name():
def perform(self, obj, registry):
registry.append((self._name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -295,10 +288,6 @@ def test_filter_different_attribute_name():
def test_filter_get_value():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
def filter_get_value(self, name):
return self.kw.get(name, NOT_FOUND)
@@ -312,6 +301,9 @@ def test_filter_get_value():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo(x='a', y='b')
def f():
pass
@@ -333,10 +325,6 @@ def test_filter_get_value():
def test_filter_name_and_get_value():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
filter_name = {
'name': '_name'
@@ -355,6 +343,9 @@ def test_filter_name_and_get_value():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo(name='hello', x='a', y='b')
def f():
pass
@@ -371,10 +362,6 @@ def test_filter_name_and_get_value():
def test_filter_get_value_and_default():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
def filter_get_value(self, name):
return self.kw.get(name, NOT_FOUND)
@@ -389,6 +376,9 @@ def test_filter_get_value_and_default():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo(name='hello', x='a', y='b')
def f():
pass
@@ -405,10 +395,6 @@ def test_filter_get_value_and_default():
def test_filter_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('view')
class ViewAction(Action):
config = {
'registry': list
@@ -427,6 +413,9 @@ def test_filter_class():
def perform(self, obj, registry):
registry.append((self.model, obj))
+ class MyApp(App):
+ view = directive(ViewAction)
+
class Alpha(object):
pass
@@ -467,10 +456,6 @@ def test_filter_class():
def test_query_group_class():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -485,10 +470,13 @@ def test_query_group_class():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('bar')
class BarAction(FooAction):
group_class = FooAction
+ class MyApp(App):
+ foo = directive(FooAction)
+ bar = directive(BarAction)
+
@MyApp.foo('a')
def f():
pass
@@ -508,10 +496,6 @@ def test_query_group_class():
def test_query_on_group_class_action():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -526,10 +510,13 @@ def test_query_on_group_class_action():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('bar')
class BarAction(FooAction):
group_class = FooAction
+ class MyApp(App):
+ foo = directive(FooAction)
+ bar = directive(BarAction)
+
@MyApp.foo('a')
def f():
pass
@@ -549,10 +536,6 @@ def test_query_on_group_class_action():
def test_multi_query_on_group_class_action():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -567,10 +550,13 @@ def test_multi_query_on_group_class_action():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('bar')
class BarAction(FooAction):
group_class = FooAction
+ class MyApp(App):
+ foo = directive(FooAction)
+ bar = directive(BarAction)
+
@MyApp.foo('a')
def f():
pass
@@ -590,13 +576,6 @@ def test_multi_query_on_group_class_action():
def test_inheritance():
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -611,6 +590,12 @@ def test_inheritance():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
+ class SubApp(MyApp):
+ pass
+
@MyApp.foo('a')
def f():
pass
@@ -630,10 +615,6 @@ def test_inheritance():
def test_composite_action():
- class MyApp(App):
- pass
-
- @MyApp.private_action_class
class SubAction(Action):
config = {
'registry': list
@@ -648,7 +629,6 @@ def test_composite_action():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('composite')
class CompositeAction(Composite):
query_classes = [
SubAction
@@ -660,6 +640,10 @@ def test_composite_action():
def actions(self, obj):
return [(SubAction(name), obj) for name in self.names]
+ class MyApp(App):
+ _sub = directive(SubAction)
+ composite = directive(CompositeAction)
+
@MyApp.composite(['a', 'b'])
def f():
pass
@@ -675,10 +659,6 @@ def test_composite_action():
def test_composite_action_without_query_classes():
- class MyApp(App):
- pass
-
- @MyApp.private_action_class
class SubAction(Action):
config = {
'registry': list
@@ -693,7 +673,6 @@ def test_composite_action_without_query_classes():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.directive('composite')
class CompositeAction(Composite):
def __init__(self, names):
self.names = names
@@ -701,6 +680,10 @@ def test_composite_action_without_query_classes():
def actions(self, obj):
return [(SubAction(name), obj) for name in self.names]
+ class MyApp(App):
+ _sub = directive(SubAction)
+ composite = directive(CompositeAction)
+
@MyApp.composite(['a', 'b'])
def f():
pass
@@ -714,10 +697,6 @@ def test_composite_action_without_query_classes():
def test_nested_composite_action():
- class MyApp(App):
- pass
-
- @MyApp.private_action_class
class SubSubAction(Action):
config = {
'registry': list
@@ -732,7 +711,6 @@ def test_nested_composite_action():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @MyApp.private_action_class
class SubAction(Composite):
query_classes = [
SubSubAction
@@ -744,7 +722,6 @@ def test_nested_composite_action():
def actions(self, obj):
return [(SubSubAction(name), obj) for name in self.names]
- @MyApp.directive('composite')
class CompositeAction(Composite):
query_classes = [
SubAction
@@ -757,6 +734,11 @@ def test_nested_composite_action():
for i in range(self.amount):
yield SubAction(['a%s' % i, 'b%s' % i]), obj
+ class MyApp(App):
+ _subsub = directive(SubSubAction)
+ _sub = directive(SubAction)
+ composite = directive(CompositeAction)
+
@MyApp.composite(2)
def f():
pass
@@ -774,13 +756,6 @@ def test_nested_composite_action():
def test_query_action_for_other_app():
- class MyApp(App):
- pass
-
- class OtherApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
config = {
'registry': list
@@ -795,7 +770,6 @@ def test_query_action_for_other_app():
def perform(self, obj, registry):
registry.append((self.name, obj))
- @OtherApp.directive('foo')
class BarAction(Action):
config = {
'registry': list
@@ -810,6 +784,12 @@ def test_query_action_for_other_app():
def perform(self, obj, registry):
registry.append((self.name, obj))
+ class MyApp(App):
+ foo = directive(FooAction)
+
+ class OtherApp(App):
+ bar = directive(BarAction)
+
@MyApp.foo('a')
def f():
pass
diff --git a/dectate/tests/test_tool.py b/dectate/tests/test_tool.py
index ed49afe..cf6b361 100644
--- a/dectate/tests/test_tool.py
+++ b/dectate/tests/test_tool.py
@@ -2,7 +2,7 @@ import pytest
from argparse import ArgumentTypeError
from dectate.config import Action, commit
-from dectate.app import App
+from dectate.app import App, directive
from dectate.tool import (parse_app_class, parse_directive, parse_filters,
convert_filters,
convert_dotted_name, convert_bool,
@@ -122,10 +122,6 @@ def test_convert_filters_value_error():
def test_query_tool_output():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
def __init__(self, name):
self.name = name
@@ -136,6 +132,9 @@ def test_query_tool_output():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -155,8 +154,18 @@ def test_query_tool_output():
def test_query_tool_output_multiple_apps():
+ class FooAction(Action):
+ def __init__(self, name):
+ self.name = name
+
+ def identifier(self):
+ return self.name
+
+ def perform(self, obj):
+ pass
+
class Base(App):
- pass
+ foo = directive(FooAction)
class AlphaApp(Base):
pass
@@ -167,17 +176,6 @@ def test_query_tool_output_multiple_apps():
class GammaApp(Base):
pass
- @Base.directive('foo')
- class FooAction(Action):
- def __init__(self, name):
- self.name = name
-
- def identifier(self):
- return self.name
-
- def perform(self, obj):
- pass
-
@AlphaApp.foo('a')
def f():
pass
@@ -194,10 +192,6 @@ def test_query_tool_output_multiple_apps():
def test_query_app():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
filter_convert = {
'count': int
@@ -212,6 +206,9 @@ def test_query_app():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo(1)
def f():
pass
@@ -228,10 +225,6 @@ def test_query_app():
def test_query_tool_uncommitted():
- class MyApp(App):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
def __init__(self, name):
self.name = name
@@ -242,6 +235,9 @@ def test_query_tool_uncommitted():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
@MyApp.foo('a')
def f():
pass
@@ -277,13 +273,6 @@ def test_app_without_directive():
def test_inheritance():
- class MyApp(App):
- pass
-
- class SubApp(MyApp):
- pass
-
- @MyApp.directive('foo')
class FooAction(Action):
filter_convert = {
'count': int
@@ -298,6 +287,12 @@ def test_inheritance():
def perform(self, obj):
pass
+ class MyApp(App):
+ foo = directive(FooAction)
+
+ class SubApp(MyApp):
+ pass
+
@MyApp.foo(1)
def f():
pass
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 6
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test,coverage,pep8,docs]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-remove-stale-bytecode pytest-flake8",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"develop_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
bleach==4.1.0
build==0.9.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
check-manifest==0.48
colorama==0.4.5
coverage==6.2
cryptography==40.0.2
-e git+https://github.com/morepath/dectate.git@704615da9f6bf57d57e4f42d3a710475de777b08#egg=dectate
distlib==0.3.9
docutils==0.17.1
filelock==3.4.1
flake8==5.0.4
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
jeepney==0.7.1
Jinja2==3.0.3
keyring==23.4.1
mando==0.7.1
MarkupSafe==2.0.1
mccabe==0.7.0
packaging==21.3
pep440==0.1.1
pep517==0.13.1
pkginfo==1.10.0
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pyroma==4.0
pytest==7.0.1
pytest-cov==4.0.0
pytest-flake8==1.1.1
pytest-remove-stale-bytecode==5.0.1
pytz==2025.2
radon==6.0.1
readme-renderer==34.0
requests==2.27.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-e git+ssh://[email protected]/nebius/swebench_matterhorn.git@ae4d15b4472bd322342107dd10c47d793189f5b2#egg=swebench_matterhorn
toml==0.10.2
tomli==1.2.3
tox==3.28.0
tqdm==4.64.1
twine==3.8.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
webencodings==0.5.1
zest.releaser==7.3.0
zipp==3.6.0
| name: dectate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- bleach==4.1.0
- build==0.9.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- check-manifest==0.48
- colorama==0.4.5
- coverage==6.2
- cryptography==40.0.2
- distlib==0.3.9
- docutils==0.17.1
- filelock==3.4.1
- flake8==5.0.4
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jeepney==0.7.1
- jinja2==3.0.3
- keyring==23.4.1
- mando==0.7.1
- markupsafe==2.0.1
- mccabe==0.7.0
- packaging==21.3
- pep440==0.1.1
- pep517==0.13.1
- pkginfo==1.10.0
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pyroma==4.0
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-flake8==1.1.1
- pytest-remove-stale-bytecode==5.0.1
- pytz==2025.2
- radon==6.0.1
- readme-renderer==34.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- tqdm==4.64.1
- twine==3.8.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- webencodings==0.5.1
- zest-releaser==7.3.0
- zipp==3.6.0
prefix: /opt/conda/envs/dectate
| [
"dectate/tests/test_directive.py::test_simple",
"dectate/tests/test_directive.py::test_decorator",
"dectate/tests/test_directive.py::test_commit_method",
"dectate/tests/test_directive.py::test_conflict_same_directive",
"dectate/tests/test_directive.py::test_app_inherit",
"dectate/tests/test_directive.py::test_app_override",
"dectate/tests/test_directive.py::test_different_group_no_conflict",
"dectate/tests/test_directive.py::test_same_group_conflict",
"dectate/tests/test_directive.py::test_discriminator_conflict",
"dectate/tests/test_directive.py::test_discriminator_same_group_conflict",
"dectate/tests/test_directive.py::test_discriminator_no_conflict",
"dectate/tests/test_directive.py::test_discriminator_different_group_no_conflict",
"dectate/tests/test_directive.py::test_depends",
"dectate/tests/test_directive.py::test_composite",
"dectate/tests/test_directive.py::test_composite_change_object",
"dectate/tests/test_directive.py::test_composite_private_sub",
"dectate/tests/test_directive.py::test_composite_private_composite",
"dectate/tests/test_directive.py::test_nested_composite",
"dectate/tests/test_directive.py::test_with_statement_kw",
"dectate/tests/test_directive.py::test_with_statement_args",
"dectate/tests/test_directive.py::test_before",
"dectate/tests/test_directive.py::test_before_without_use",
"dectate/tests/test_directive.py::test_before_group",
"dectate/tests/test_directive.py::test_config_group",
"dectate/tests/test_directive.py::test_before_group_without_use",
"dectate/tests/test_directive.py::test_after",
"dectate/tests/test_directive.py::test_after_without_use",
"dectate/tests/test_directive.py::test_action_loop_should_conflict",
"dectate/tests/test_directive.py::test_action_init_only_during_commit",
"dectate/tests/test_directive.py::test_registry_should_exist_even_without_directive_use",
"dectate/tests/test_directive.py::test_registry_should_exist_even_without_directive_use_subclass",
"dectate/tests/test_directive.py::test_rerun_commit",
"dectate/tests/test_directive.py::test_rerun_commit_add_directive",
"dectate/tests/test_directive.py::test_order_subclass",
"dectate/tests/test_directive.py::test_registry_single_factory_argument",
"dectate/tests/test_directive.py::test_registry_factory_argument_introduces_new_registry",
"dectate/tests/test_directive.py::test_registry_factory_argument_introduces_new_registry_subclass",
"dectate/tests/test_directive.py::test_registry_multiple_factory_arguments",
"dectate/tests/test_directive.py::test_registry_factory_arguments_depends",
"dectate/tests/test_directive.py::test_registry_factory_arguments_depends_complex",
"dectate/tests/test_directive.py::test_is_committed",
"dectate/tests/test_directive.py::test_registry_config_inconsistent",
"dectate/tests/test_directive.py::test_registry_factory_argument_inconsistent",
"dectate/tests/test_directive.py::test_registry_factory_argument_and_config_inconsistent",
"dectate/tests/test_directive.py::test_directive_repr",
"dectate/tests/test_directive.py::test_app_class_passed_into_action",
"dectate/tests/test_directive.py::test_app_class_passed_into_factory",
"dectate/tests/test_directive.py::test_app_class_passed_into_factory_no_factory_arguments",
"dectate/tests/test_directive.py::test_app_class_passed_into_factory_separation",
"dectate/tests/test_directive.py::test_app_class_cleanup",
"dectate/tests/test_error.py::test_directive_error_in_action",
"dectate/tests/test_error.py::test_directive_error_in_composite",
"dectate/tests/test_error.py::test_conflict_error",
"dectate/tests/test_error.py::test_with_statement_error",
"dectate/tests/test_error.py::test_composite_codeinfo_propagation",
"dectate/tests/test_error.py::test_type_error_not_enough_arguments",
"dectate/tests/test_error.py::test_type_error_too_many_arguments",
"dectate/tests/test_error.py::test_cannot_group_class_group_class",
"dectate/tests/test_error.py::test_cannot_use_config_with_group_class",
"dectate/tests/test_error.py::test_cann_inherit_config_with_group_class",
"dectate/tests/test_error.py::test_cannot_use_before_with_group_class",
"dectate/tests/test_error.py::test_can_inherit_before_with_group_class",
"dectate/tests/test_error.py::test_cannot_use_after_with_group_class",
"dectate/tests/test_error.py::test_action_without_init",
"dectate/tests/test_error.py::test_composite_without_init",
"dectate/tests/test_logging.py::test_intercept_logging",
"dectate/tests/test_logging.py::test_simple_config_logging",
"dectate/tests/test_logging.py::test_subclass_config_logging",
"dectate/tests/test_logging.py::test_override_logger_name",
"dectate/tests/test_query.py::test_query",
"dectate/tests/test_query.py::test_query_directive_name",
"dectate/tests/test_query.py::test_multi_action_query",
"dectate/tests/test_query.py::test_filter",
"dectate/tests/test_query.py::test_filter_multiple_fields",
"dectate/tests/test_query.py::test_filter_not_found",
"dectate/tests/test_query.py::test_filter_different_attribute_name",
"dectate/tests/test_query.py::test_filter_get_value",
"dectate/tests/test_query.py::test_filter_name_and_get_value",
"dectate/tests/test_query.py::test_filter_get_value_and_default",
"dectate/tests/test_query.py::test_filter_class",
"dectate/tests/test_query.py::test_query_group_class",
"dectate/tests/test_query.py::test_query_on_group_class_action",
"dectate/tests/test_query.py::test_multi_query_on_group_class_action",
"dectate/tests/test_query.py::test_inheritance",
"dectate/tests/test_query.py::test_composite_action",
"dectate/tests/test_query.py::test_composite_action_without_query_classes",
"dectate/tests/test_query.py::test_nested_composite_action",
"dectate/tests/test_query.py::test_query_action_for_other_app",
"dectate/tests/test_tool.py::test_parse_app_class_main",
"dectate/tests/test_tool.py::test_parse_app_class_cannot_import",
"dectate/tests/test_tool.py::test_parse_app_class_not_a_class",
"dectate/tests/test_tool.py::test_parse_app_class_no_app_class",
"dectate/tests/test_tool.py::test_parse_directive_main",
"dectate/tests/test_tool.py::test_parse_directive_no_attribute",
"dectate/tests/test_tool.py::test_parse_directive_not_a_directive",
"dectate/tests/test_tool.py::test_parse_filters_main",
"dectate/tests/test_tool.py::test_parse_filters_error",
"dectate/tests/test_tool.py::test_convert_filters_main",
"dectate/tests/test_tool.py::test_convert_filters_default",
"dectate/tests/test_tool.py::test_convert_filters_error",
"dectate/tests/test_tool.py::test_convert_filters_value_error",
"dectate/tests/test_tool.py::test_query_tool_output",
"dectate/tests/test_tool.py::test_query_tool_output_multiple_apps",
"dectate/tests/test_tool.py::test_query_app",
"dectate/tests/test_tool.py::test_query_tool_uncommitted",
"dectate/tests/test_tool.py::test_convert_bool",
"dectate/tests/test_tool.py::test_convert_dotted_name_builtin",
"dectate/tests/test_tool.py::test_app_without_directive",
"dectate/tests/test_tool.py::test_inheritance"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 780 | [
"CHANGES.txt",
"dectate/__init__.py",
"doc/usage.rst",
"dectate/app.py",
"dectate/config.py",
"doc/api.rst"
]
| [
"CHANGES.txt",
"dectate/__init__.py",
"doc/usage.rst",
"dectate/app.py",
"dectate/config.py",
"doc/api.rst"
]
|
|
conan-io__conan-508 | 2167f1f59f670b87acb69efd117f79ff506ed99f | 2016-09-27 16:24:01 | 2167f1f59f670b87acb69efd117f79ff506ed99f | diff --git a/conans/client/deps_builder.py b/conans/client/deps_builder.py
index 341d1c41a..83c9c8821 100644
--- a/conans/client/deps_builder.py
+++ b/conans/client/deps_builder.py
@@ -416,7 +416,7 @@ class DepsBuilder(object):
def _create_new_node(self, current_node, dep_graph, requirement, public_deps, name_req):
""" creates and adds a new node to the dependency graph
"""
- conanfile_path = self._retriever.get_conanfile(requirement.conan_reference)
+ conanfile_path = self._retriever.get_recipe(requirement.conan_reference)
output = ScopedOutput(str(requirement.conan_reference), self._output)
dep_conanfile = self._loader.load_conan(conanfile_path, output)
if dep_conanfile:
diff --git a/conans/client/proxy.py b/conans/client/proxy.py
index 57ee121b9..cdecc8855 100644
--- a/conans/client/proxy.py
+++ b/conans/client/proxy.py
@@ -70,17 +70,17 @@ class ConanProxy(object):
remote = self._registry.get_ref(package_reference.conan)
self._manifest_manager.check_package(package_reference, remote)
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference):
output = ScopedOutput(str(conan_reference), self._out)
def _refresh():
- conan_dir_path = self._client_cache.export(conan_reference)
- rmdir(conan_dir_path)
+ export_path = self._client_cache.export(conan_reference)
+ rmdir(export_path)
# It might need to remove shortpath
rmdir(self._client_cache.source(conan_reference), True)
current_remote, _ = self._get_remote(conan_reference)
output.info("Retrieving from remote '%s'..." % current_remote.name)
- self._remote_manager.get_conanfile(conan_reference, current_remote)
+ self._remote_manager.get_recipe(conan_reference, export_path, current_remote)
if self._update:
output.info("Updated!")
else:
@@ -88,7 +88,6 @@ class ConanProxy(object):
# check if it is in disk
conanfile_path = self._client_cache.conanfile(conan_reference)
-
path_exist = path_exists(conanfile_path, self._client_cache.store)
if path_exist:
@@ -122,7 +121,7 @@ class ConanProxy(object):
"to replace it." % (remote.name, conan_reference))
else:
- self._retrieve_conanfile(conan_reference, output)
+ self._retrieve_recipe(conan_reference, output)
if self._manifest_manager:
remote = self._registry.get_ref(conan_reference)
@@ -146,13 +145,14 @@ class ConanProxy(object):
return 0
- def _retrieve_conanfile(self, conan_reference, output):
+ def _retrieve_recipe(self, conan_reference, output):
""" returns the requested conanfile object, retrieving it from
remotes if necessary. Can raise NotFoundException
"""
def _retrieve_from_remote(remote):
output.info("Trying with '%s'..." % remote.name)
- result = self._remote_manager.get_conanfile(conan_reference, remote)
+ export_path = self._client_cache.export(conan_reference)
+ result = self._remote_manager.get_recipe(conan_reference, export_path, remote)
self._registry.set_ref(conan_reference, remote)
return result
@@ -261,7 +261,8 @@ class ConanProxy(object):
def download_packages(self, reference, package_ids):
assert(isinstance(package_ids, list))
remote, _ = self._get_remote(reference)
- self._remote_manager.get_conanfile(reference, remote)
+ export_path = self._client_cache.export(reference)
+ self._remote_manager.get_recipe(reference, export_path, remote)
self._registry.set_ref(reference, remote)
output = ScopedOutput(str(reference), self._out)
for package_id in package_ids:
@@ -280,7 +281,8 @@ class ConanProxy(object):
try:
output.info("Looking for package %s in remote '%s' " % (package_id, remote.name))
# Will raise if not found NotFoundException
- self._remote_manager.get_package(package_reference, remote)
+ package_path = self._client_cache.package(package_reference)
+ self._remote_manager.get_package(package_reference, package_path, remote)
output.success('Package installed %s' % package_id)
return True
except ConanConnectionError:
diff --git a/conans/client/remote_manager.py b/conans/client/remote_manager.py
index 819cfa96c..1d716f8cb 100644
--- a/conans/client/remote_manager.py
+++ b/conans/client/remote_manager.py
@@ -1,16 +1,17 @@
-from conans.errors import ConanException, ConanConnectionError
+import os
+import shutil
+import tarfile
+import time
+import traceback
+
from requests.exceptions import ConnectionError
-from conans.util.files import save, tar_extract, rmdir
+
+from conans.errors import ConanException, ConanConnectionError
+from conans.util.files import tar_extract, rmdir, relative_dirs, mkdir
from conans.util.log import logger
-import traceback
-import os
from conans.paths import PACKAGE_TGZ_NAME, CONANINFO, CONAN_MANIFEST, CONANFILE, EXPORT_TGZ_NAME
-from io import BytesIO
-import tarfile
from conans.util.files import gzopen_without_timestamps
from conans.util.files import touch
-import shutil
-import time
class RemoteManager(object):
@@ -85,35 +86,35 @@ class RemoteManager(object):
returns (ConanDigest, remote_name)"""
return self._call_remote(remote, "get_package_digest", package_reference)
- def get_conanfile(self, conan_reference, remote):
+ def get_recipe(self, conan_reference, dest_folder, remote):
"""
Read the conans from remotes
Will iterate the remotes to find the conans unless remote was specified
- returns (dict relative_filepath:content , remote_name)"""
- export_files = self._call_remote(remote, "get_conanfile", conan_reference)
- export_folder = self._client_cache.export(conan_reference)
- uncompress_files(export_files, export_folder, EXPORT_TGZ_NAME)
+ returns (dict relative_filepath:abs_path , remote_name)"""
+ zipped_files = self._call_remote(remote, "get_recipe", conan_reference, dest_folder)
+ files = unzip_and_get_files(zipped_files, dest_folder, EXPORT_TGZ_NAME)
# Make sure that the source dir is deleted
rmdir(self._client_cache.source(conan_reference), True)
# TODO: Download only the CONANFILE file and only download the rest of files
# in install if needed (not found remote package)
+ return files
- def get_package(self, package_reference, remote):
+ def get_package(self, package_reference, dest_folder, remote):
"""
Read the conans package from remotes
Will iterate the remotes to find the conans unless remote was specified
- returns (dict relative_filepath:content , remote_name)"""
- package_files = self._call_remote(remote, "get_package", package_reference)
- destination_dir = self._client_cache.package(package_reference)
- uncompress_files(package_files, destination_dir, PACKAGE_TGZ_NAME)
-
+ returns (dict relative_filepath:abs_path , remote_name)"""
+ zipped_files = self._call_remote(remote, "get_package", package_reference, dest_folder)
+ files = unzip_and_get_files(zipped_files, dest_folder, PACKAGE_TGZ_NAME)
# Issue #214 https://github.com/conan-io/conan/issues/214
- for dirname, _, files in os.walk(destination_dir):
+ for dirname, _, files in os.walk(dest_folder):
for fname in files:
touch(os.path.join(dirname, fname))
+ return files
+
def search(self, remote, pattern=None, ignorecase=True):
"""
Search exported conans information from remotes
@@ -211,20 +212,27 @@ def compress_files(files, name, excluded, dest_dir):
return ret
-def uncompress_files(files, folder, name):
+def unzip_and_get_files(files, destination_dir, tgz_name):
+ '''Moves all files from package_files, {relative_name: tmp_abs_path}
+ to destination_dir, unzipping the "tgz_name" if found'''
+
+ tgz_file = files.pop(tgz_name, None)
+ if tgz_file:
+ uncompress_file(tgz_file, destination_dir)
+
+ return relative_dirs(destination_dir)
+
+
+def uncompress_file(src_path, dest_folder):
try:
- for file_name, content in files:
- if os.path.basename(file_name) == name:
- # Unzip the file and not keep the tgz
- tar_extract(BytesIO(content), folder)
- else:
- save(os.path.join(folder, file_name), content)
+ with open(src_path, 'rb') as file_handler:
+ tar_extract(file_handler, dest_folder)
except Exception as e:
- error_msg = "Error while downloading/extracting files to %s\n%s\n" % (folder, str(e))
+ error_msg = "Error while downloading/extracting files to %s\n%s\n" % (dest_folder, str(e))
# try to remove the files
try:
- if os.path.exists(folder):
- shutil.rmtree(folder)
+ if os.path.exists(dest_folder):
+ shutil.rmtree(dest_folder)
error_msg += "Folder removed"
except Exception as e:
error_msg += "Folder not removed, files/package might be damaged, remove manually"
diff --git a/conans/client/rest/auth_manager.py b/conans/client/rest/auth_manager.py
index 966a2773d..24cb1f43a 100644
--- a/conans/client/rest/auth_manager.py
+++ b/conans/client/rest/auth_manager.py
@@ -142,12 +142,12 @@ class ConanApiAuthManager(object):
return self._rest_client.get_package_digest(package_reference)
@input_credentials_if_unauthorized
- def get_conanfile(self, conan_reference):
- return self._rest_client.get_conanfile(conan_reference)
+ def get_recipe(self, conan_reference, dest_folder):
+ return self._rest_client.get_recipe(conan_reference, dest_folder)
@input_credentials_if_unauthorized
- def get_package(self, package_reference):
- return self._rest_client.get_package(package_reference)
+ def get_package(self, package_reference, dest_folder):
+ return self._rest_client.get_package(package_reference, dest_folder)
@input_credentials_if_unauthorized
def search(self, pattern, ignorecase):
diff --git a/conans/client/rest/rest_client.py b/conans/client/rest/rest_client.py
index 68e28d8e7..fcf6affe2 100644
--- a/conans/client/rest/rest_client.py
+++ b/conans/client/rest/rest_client.py
@@ -12,6 +12,7 @@ from conans.model.manifest import FileTreeManifest
from conans.client.rest.uploader_downloader import Uploader, Downloader
from conans.model.ref import ConanFileReference
from six.moves.urllib.parse import urlsplit, parse_qs
+import tempfile
def handle_return_deserializer(deserializer=None):
@@ -107,7 +108,7 @@ class RestApiClient(object):
contents = {key: decode_text(value) for key, value in dict(contents).items()}
return FileTreeManifest.loads(contents[CONAN_MANIFEST])
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference, dest_folder):
"""Gets a dict of filename:contents from conans"""
# Get the conanfile snapshot first
url = "%s/conans/%s/download_urls" % (self._remote_api_url, "/".join(conan_reference))
@@ -117,12 +118,10 @@ class RestApiClient(object):
raise NotFoundException("Conan '%s' doesn't have a %s!" % (conan_reference, CONANFILE))
# TODO: Get fist an snapshot and compare files and download only required?
+ file_paths = self.download_files_to_folder(urls, dest_folder, self._output)
+ return file_paths
- # Download the resources
- contents = self.download_files(urls, self._output)
- return contents
-
- def get_package(self, package_reference):
+ def get_package(self, package_reference, dest_folder):
"""Gets a dict of filename:contents from package"""
url = "%s/conans/%s/packages/%s/download_urls" % (self._remote_api_url,
"/".join(package_reference.conan),
@@ -133,8 +132,8 @@ class RestApiClient(object):
# TODO: Get fist an snapshot and compare files and download only required?
# Download the resources
- contents = self.download_files(urls, self._output)
- return contents
+ file_paths = self.download_files_to_folder(urls, dest_folder, self._output)
+ return file_paths
def upload_conan(self, conan_reference, the_files):
"""
@@ -361,6 +360,25 @@ class RestApiClient(object):
output.writeln("")
yield os.path.normpath(filename), contents
+ def download_files_to_folder(self, file_urls, to_folder, output=None):
+ """
+ :param: file_urls is a dict with {filename: abs_path}
+
+ It writes downloaded files to disk (appending to file, only keeps chunks in memory)
+ """
+ downloader = Downloader(self.requester, output, self.VERIFY_SSL)
+ ret = {}
+ for filename, resource_url in file_urls.items():
+ if output:
+ output.writeln("Downloading %s" % filename)
+ auth, _ = self._file_server_capabilities(resource_url)
+ abs_path = os.path.join(to_folder, filename)
+ downloader.download(resource_url, abs_path, auth=auth)
+ if output:
+ output.writeln("")
+ ret[filename] = abs_path
+ return ret
+
def upload_files(self, file_urls, files, output):
t1 = time.time()
failed = {}
diff --git a/conans/client/rest/uploader_downloader.py b/conans/client/rest/uploader_downloader.py
index eec4d8e84..475dfb450 100644
--- a/conans/client/rest/uploader_downloader.py
+++ b/conans/client/rest/uploader_downloader.py
@@ -125,8 +125,10 @@ class Downloader(object):
if self.output:
print_progress(self.output, units)
last_progress = units
-
- return bytes(ret)
+ if not file_path:
+ return bytes(ret)
+ else:
+ return
except Exception as e:
logger.debug(e.__class__)
logger.debug(traceback.format_exc())
diff --git a/conans/server/store/file_manager.py b/conans/server/store/file_manager.py
index a7f95ab65..6b6d4295b 100644
--- a/conans/server/store/file_manager.py
+++ b/conans/server/store/file_manager.py
@@ -17,7 +17,7 @@ class FileManager(object):
self._storage_adapter = storage_adapter
# ############ SNAPSHOTS
- def get_conanfile(self, conan_reference):
+ def get_recipe(self, conan_reference):
conanfile_path = self.paths.conanfile(conan_reference)
return self._storage_adapter.get_file(conanfile_path)
| Memory error when download very big packages
`conan install node/6.1.0@silkedit/stable -s compiler="Visual Studio" -s compiler.version=14`
Downloader.download method keeps in memory too much data.
```
DEBUG :uploader_downloader.py[74]: <type 'exceptions.MemoryError'> [2016-09-23 15:15:02,983]
DEBUG :uploader_downloader.py[75]: Traceback (most recent call last):
File "c:\python27\lib\site-packages\conans\client\rest\uploader_downloader.py", line 62, in download
ret.extend(data)
MemoryError
``` | conan-io/conan | diff --git a/conans/test/download_test.py b/conans/test/download_test.py
index e1ed5a986..e41c6e29a 100644
--- a/conans/test/download_test.py
+++ b/conans/test/download_test.py
@@ -69,7 +69,7 @@ class DownloadTest(unittest.TestCase):
client2.remote_manager,
"default")
- installer.get_conanfile(conan_ref)
+ installer.get_recipe(conan_ref)
installer.get_package(package_ref, force_build=False)
reg_path = client2.paths.export(ConanFileReference.loads("Hello/1.2.1/frodo/stable"))
diff --git a/conans/test/model/order_libs_test.py b/conans/test/model/order_libs_test.py
index 5e70ef0cc..99dfa93aa 100644
--- a/conans/test/model/order_libs_test.py
+++ b/conans/test/model/order_libs_test.py
@@ -48,7 +48,7 @@ class Retriever(object):
content = base_content % (name, self._reqs(requires), name, self._libs(name))
save(conan_path, content)
- def get_conanfile(self, conan_ref):
+ def get_recipe(self, conan_ref):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
return conan_path
diff --git a/conans/test/model/transitive_reqs_test.py b/conans/test/model/transitive_reqs_test.py
index 441b7f8f5..a8ec49e2e 100644
--- a/conans/test/model/transitive_reqs_test.py
+++ b/conans/test/model/transitive_reqs_test.py
@@ -36,7 +36,7 @@ class Retriever(object):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
save(conan_path, content)
- def get_conanfile(self, conan_ref):
+ def get_recipe(self, conan_ref):
conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE)
return conan_path
diff --git a/conans/test/remote_manager_test.py b/conans/test/remote_manager_test.py
index 8e02eaf41..3362245b2 100644
--- a/conans/test/remote_manager_test.py
+++ b/conans/test/remote_manager_test.py
@@ -1,17 +1,20 @@
+import os
+import tempfile
import unittest
-from conans.client.remote_manager import RemoteManager
+
from mock import Mock
+
+from conans.client.client_cache import ClientCache
+from conans.client.remote_manager import RemoteManager
+from conans.client.remote_registry import Remote
from conans.errors import NotFoundException
from conans.model.ref import ConanFileReference, PackageReference
+from conans.model.manifest import FileTreeManifest
+from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO
from conans.test.tools import TestBufferConanOutput, TestClient
from conans.test.utils.test_files import temp_folder
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
-from conans.client.remote_registry import Remote
-from conans.client.client_cache import ClientCache
from conans.util.files import save
-from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO
-import os
-from conans.model.manifest import FileTreeManifest
class MockRemoteClient(object):
@@ -19,8 +22,13 @@ class MockRemoteClient(object):
def __init__(self):
self.upload_package = Mock()
self.get_conan_digest = Mock()
- self.get_conanfile = Mock(return_value=[("one.txt", "ONE")])
- self.get_package = Mock(return_value=[("one.txt", "ONE")])
+ tmp_folder = tempfile.mkdtemp(suffix='conan_download')
+ save(os.path.join(tmp_folder, "one.txt"), "ONE")
+ self.get_recipe = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")})
+
+ tmp_folder = tempfile.mkdtemp(suffix='conan_download')
+ save(os.path.join(tmp_folder, "one.txt"), "ONE")
+ self.get_package = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")})
self.remote_url = None
self.raise_count = 0
@@ -78,10 +86,10 @@ class RemoteManagerTest(unittest.TestCase):
self.manager.get_conan_digest(self.conan_reference, Remote("other", "url"))
self.assertTrue(self.remote_client.get_conan_digest.called)
- self.assertFalse(self.remote_client.get_conanfile.called)
- self.manager.get_conanfile(self.conan_reference, Remote("other", "url"))
- self.assertTrue(self.remote_client.get_conanfile.called)
+ self.assertFalse(self.remote_client.get_recipe.called)
+ self.manager.get_recipe(self.conan_reference, temp_folder(), Remote("other", "url"))
+ self.assertTrue(self.remote_client.get_recipe.called)
self.assertFalse(self.remote_client.get_package.called)
- self.manager.get_package(self.package_reference, Remote("other", "url"))
+ self.manager.get_package(self.package_reference, temp_folder(), Remote("other", "url"))
self.assertTrue(self.remote_client.get_package.called)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 7
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"conans/requirements.txt",
"conans/requirements_server.txt",
"conans/requirements_dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
beautifulsoup4==4.12.3
boto==2.42.0
bottle==0.12.25
certifi==2021.5.30
colorama==0.3.9
-e git+https://github.com/conan-io/conan.git@2167f1f59f670b87acb69efd117f79ff506ed99f#egg=conan
coverage==6.2
fasteners==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==1.3.0
nose==1.3.7
nose-parameterized==0.5.0
packaging==21.3
passlib==1.6.5
patch==1.16
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
PyJWT==1.4.2
pyparsing==3.1.4
pytest==7.0.1
PyYAML==3.12
requests==2.11.1
six==1.17.0
soupsieve==2.3.2.post1
tomli==1.2.3
typing_extensions==4.1.1
waitress==2.0.0
WebOb==1.8.9
WebTest==2.0.35
zipp==3.6.0
| name: conan
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- beautifulsoup4==4.12.3
- boto==2.42.0
- bottle==0.12.25
- colorama==0.3.9
- coverage==6.2
- fasteners==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==1.3.0
- nose==1.3.7
- nose-parameterized==0.5.0
- packaging==21.3
- passlib==1.6.5
- patch==1.16
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyjwt==1.4.2
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==3.12
- requests==2.11.1
- six==1.17.0
- soupsieve==2.3.2.post1
- tomli==1.2.3
- typing-extensions==4.1.1
- waitress==2.0.0
- webob==1.8.9
- webtest==2.0.35
- zipp==3.6.0
prefix: /opt/conda/envs/conan
| [
"conans/test/model/order_libs_test.py::ConanRequirementsTest::test_diamond_no_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic_transitive_option",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_conditional",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_dep_requires_clear",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_options_solved",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_conflict_solved",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_no_conflict",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_diamond_no_conflict_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_propagate_indirect_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_remove_build_requires",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_remove_two_build_requires",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_simple_override",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_diamond_private",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_private",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_transitive_two_levels_wrong_options",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_version_requires_change",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_transitive_two_levels_options"
]
| []
| [
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic",
"conans/test/model/transitive_reqs_test.py::ConanRequirementsTest::test_basic_option",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_basic",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config_remove",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_config_remove2",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_errors",
"conans/test/model/transitive_reqs_test.py::CoreSettingsTest::test_new_configure",
"conans/test/remote_manager_test.py::RemoteManagerTest::test_no_remotes"
]
| []
| MIT License | 781 | [
"conans/client/remote_manager.py",
"conans/server/store/file_manager.py",
"conans/client/rest/rest_client.py",
"conans/client/deps_builder.py",
"conans/client/proxy.py",
"conans/client/rest/uploader_downloader.py",
"conans/client/rest/auth_manager.py"
]
| [
"conans/client/remote_manager.py",
"conans/server/store/file_manager.py",
"conans/client/rest/rest_client.py",
"conans/client/deps_builder.py",
"conans/client/proxy.py",
"conans/client/rest/uploader_downloader.py",
"conans/client/rest/auth_manager.py"
]
|
|
zalando-stups__senza-365 | fe537a4234d2dd978ef0ff04fba8e5507dad203d | 2016-09-28 08:02:15 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/cloudformation.py b/senza/manaus/cloudformation.py
index b8529be..a53b2e9 100644
--- a/senza/manaus/cloudformation.py
+++ b/senza/manaus/cloudformation.py
@@ -124,6 +124,11 @@ class CloudFormationStack:
for resource in resources:
resource_type = resource["ResourceType"]
if resource_type == ResourceType.route53_record_set:
+ physical_resource_id = resource.get('PhysicalResourceId')
+ if physical_resource_id is None:
+ # if there is no Physical Resource Id we can't fetch the
+ # record
+ continue
records = Route53.get_records(name=resource['PhysicalResourceId'])
for record in records:
if (record.set_identifier is None or
| Unknown Error: 'PhysicalResourceId'
I got a `senza delete` error:
Unknown Error: 'PhysicalResourceId'.
Please create an issue with the content of /tmp/senza-traceback-8ecz_cyz
****************************************senza-traceback-8ecz_cyz****************************************************
`Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/senza/error_handling.py", line 82, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 835, in delete
for r in stack.resources:
File "/usr/local/lib/python3.5/dist-packages/senza/manaus/cloudformation.py", line 127, in resources
records = Route53.get_records(name=resource['PhysicalResourceId'])
KeyError: 'PhysicalResourceId'` | zalando-stups/senza | diff --git a/tests/test_manaus/test_cloudformation.py b/tests/test_manaus/test_cloudformation.py
index f700c77..44b868a 100644
--- a/tests/test_manaus/test_cloudformation.py
+++ b/tests/test_manaus/test_cloudformation.py
@@ -99,6 +99,12 @@ def test_cf_resources(monkeypatch):
'PhysicalResourceId': 'myapp1.example.com',
'ResourceStatus': 'CREATE_COMPLETE',
'ResourceType': 'AWS::Route53::RecordSet'},
+ {'LastUpdatedTimestamp': datetime(2016, 7, 20, 7, 3,
+ 45, 70000,
+ tzinfo=timezone.utc),
+ 'LogicalResourceId': 'ThisWillBeIgnored',
+ 'ResourceStatus': 'CREATE_COMPLETE',
+ 'ResourceType': 'AWS::Route53::RecordSet'},
{'LastUpdatedTimestamp': datetime(2016, 7, 20, 7, 3,
43, 871000,
tzinfo=timezone.utc),
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@fe537a4234d2dd978ef0ff04fba8e5507dad203d#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_cloudformation.py::test_cf_resources"
]
| []
| [
"tests/test_manaus/test_cloudformation.py::test_get_by_stack_name",
"tests/test_manaus/test_cloudformation.py::test_get_stacks",
"tests/test_manaus/test_cloudformation.py::test_get_by_stack_name_not_found",
"tests/test_manaus/test_cloudformation.py::test_template",
"tests/test_manaus/test_cloudformation.py::test_stack_update"
]
| []
| Apache License 2.0 | 782 | [
"senza/manaus/cloudformation.py"
]
| [
"senza/manaus/cloudformation.py"
]
|
|
LibraryOfCongress__bagit-python-72 | 5489148ab1f365fcd518bcbc626a9f84b22640a2 | 2016-09-28 19:10:04 | 7684c797b19602c8ea9f36475a0ac26be8903b75 | diff --git a/bagit.py b/bagit.py
index 18c61da..d97b4b5 100755
--- a/bagit.py
+++ b/bagit.py
@@ -47,6 +47,8 @@ from os.path import abspath, isdir, isfile, join
LOGGER = logging.getLogger(__name__)
+VERSION = '1.5.4'
+
# standard bag-info.txt metadata
STANDARD_BAG_INFO_HEADERS = [
'Source-Organization',
@@ -141,7 +143,7 @@ def make_bag(bag_dir, bag_info=None, processes=1, checksum=None):
if 'Bagging-Date' not in bag_info:
bag_info['Bagging-Date'] = date.strftime(date.today(), "%Y-%m-%d")
if 'Bag-Software-Agent' not in bag_info:
- bag_info['Bag-Software-Agent'] = 'bagit.py <http://github.com/libraryofcongress/bagit-python>'
+ bag_info['Bag-Software-Agent'] = 'bagit.py v' + VERSION + ' <http://github.com/libraryofcongress/bagit-python>'
bag_info['Payload-Oxum'] = Oxum
_make_tag_file('bag-info.txt', bag_info)
diff --git a/setup.py b/setup.py
index e91730d..ee8c326 100644
--- a/setup.py
+++ b/setup.py
@@ -2,6 +2,8 @@ from sys import exit, version
from setuptools import setup
+import bagit
+
if version < '2.6.0':
print("python 2.6 or higher is required")
exit(1)
@@ -24,10 +26,11 @@ try:
except:
requirements.append("hashlib")
+version = bagit.VERSION
setup(
name = 'bagit',
- version = '1.5.4',
+ version = version,
url = 'https://libraryofcongress.github.io/bagit-python/',
author = 'Ed Summers',
author_email = '[email protected]',
| Bag-Software-Agent include version?
It would be nice if the default Bag-Software-Agent had a version in it. Currently it looks like:
bagit.py <http://github.com/libraryofcongress/bagit-python>
But maybe something like this would be better, for discovering bags created with (gasp) a version of bagit.py that has a bug?
bagit.py v1.5.3 <http://github.com/libraryofcongress/bagit-python>
| LibraryOfCongress/bagit-python | diff --git a/test.py b/test.py
index 5dc8fbf..285174c 100644
--- a/test.py
+++ b/test.py
@@ -303,7 +303,7 @@ class TestSingleProcessValidation(unittest.TestCase):
info = {'Bagging-Date': '1970-01-01', 'Contact-Email': '[email protected]'}
bag = bagit.make_bag(self.tmpdir, checksum=['sha1'], bag_info=info)
self.assertTrue(os.path.isfile(j(self.tmpdir, 'tagmanifest-sha1.txt')))
- self.assertEqual(bag.entries['bag-info.txt']['sha1'], 'd7f086508df433e5d7464b5a3835d5501df14404')
+ self.assertEqual(bag.entries['bag-info.txt']['sha1'], 'ec70407d895d4e550bc0a7ea40a82ad653d136e5')
def test_validate_unreadable_file(self):
bag = bagit.make_bag(self.tmpdir, checksum=["md5"])
@@ -360,7 +360,7 @@ class TestBag(unittest.TestCase):
self.assertTrue('Contact-Email: [email protected]' in bag_info_txt)
self.assertTrue('Bagging-Date: 1970-01-01' in bag_info_txt)
self.assertTrue('Payload-Oxum: 991765.5' in bag_info_txt)
- self.assertTrue('Bag-Software-Agent: bagit.py <http://github.com/libraryofcongress/bagit-python>' in bag_info_txt)
+ self.assertTrue('Bag-Software-Agent: bagit.py v1.5.4 <http://github.com/libraryofcongress/bagit-python>' in bag_info_txt)
# check tagmanifest-md5.txt
self.assertTrue(os.path.isfile(j(self.tmpdir, 'tagmanifest-md5.txt')))
@@ -368,7 +368,7 @@ class TestBag(unittest.TestCase):
tagmanifest_txt = tm.read()
self.assertTrue('9e5ad981e0d29adc278f6a294b8c2aca bagit.txt' in tagmanifest_txt)
self.assertTrue('a0ce6631a2a6d1a88e6d38453ccc72a5 manifest-md5.txt' in tagmanifest_txt)
- self.assertTrue('6a5090e27cb29d5dda8a0142fbbdf37e bag-info.txt' in tagmanifest_txt)
+ self.assertTrue('bfe59ad8af1a227d27c191b4178c399f bag-info.txt' in tagmanifest_txt)
def test_make_bag_sha1_manifest(self):
bagit.make_bag(self.tmpdir, checksum=['sha1'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
-e git+https://github.com/LibraryOfCongress/bagit-python.git@5489148ab1f365fcd518bcbc626a9f84b22640a2#egg=bagit
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: bagit-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/bagit-python
| [
"test.py::TestSingleProcessValidation::test_sha1_tagfile",
"test.py::TestMultiprocessValidation::test_sha1_tagfile",
"test.py::TestBag::test_make_bag"
]
| [
"test.py::TestSingleProcessValidation::test_validate_unreadable_file",
"test.py::TestMultiprocessValidation::test_validate_unreadable_file"
]
| [
"test.py::TestSingleProcessValidation::test_allow_extraneous_dirs_in_base",
"test.py::TestSingleProcessValidation::test_allow_extraneous_files_in_base",
"test.py::TestSingleProcessValidation::test_bom_in_bagit_txt",
"test.py::TestSingleProcessValidation::test_handle_directory_end_slash_gracefully",
"test.py::TestSingleProcessValidation::test_make_bag_md5_sha1_sha256_manifest",
"test.py::TestSingleProcessValidation::test_make_bag_md5_sha256_manifest",
"test.py::TestSingleProcessValidation::test_make_bag_sha1_sha256_manifest",
"test.py::TestSingleProcessValidation::test_missing_file",
"test.py::TestSingleProcessValidation::test_missing_manifest_raises_error",
"test.py::TestSingleProcessValidation::test_missing_tagfile_raises_error",
"test.py::TestSingleProcessValidation::test_mixed_case_checksums",
"test.py::TestSingleProcessValidation::test_multiple_oxum_values",
"test.py::TestSingleProcessValidation::test_validate_fast",
"test.py::TestSingleProcessValidation::test_validate_fast_without_oxum",
"test.py::TestSingleProcessValidation::test_validate_flipped_bit",
"test.py::TestSingleProcessValidation::test_validate_optional_tagfile",
"test.py::TestSingleProcessValidation::test_validate_optional_tagfile_in_directory",
"test.py::TestSingleProcessValidation::test_validate_slow_without_oxum_extra_file",
"test.py::TestSingleProcessValidation::test_validation_error_details",
"test.py::TestMultiprocessValidation::test_allow_extraneous_dirs_in_base",
"test.py::TestMultiprocessValidation::test_allow_extraneous_files_in_base",
"test.py::TestMultiprocessValidation::test_bom_in_bagit_txt",
"test.py::TestMultiprocessValidation::test_handle_directory_end_slash_gracefully",
"test.py::TestMultiprocessValidation::test_make_bag_md5_sha1_sha256_manifest",
"test.py::TestMultiprocessValidation::test_make_bag_md5_sha256_manifest",
"test.py::TestMultiprocessValidation::test_make_bag_sha1_sha256_manifest",
"test.py::TestMultiprocessValidation::test_missing_file",
"test.py::TestMultiprocessValidation::test_missing_manifest_raises_error",
"test.py::TestMultiprocessValidation::test_missing_tagfile_raises_error",
"test.py::TestMultiprocessValidation::test_mixed_case_checksums",
"test.py::TestMultiprocessValidation::test_multiple_oxum_values",
"test.py::TestMultiprocessValidation::test_validate_fast",
"test.py::TestMultiprocessValidation::test_validate_fast_without_oxum",
"test.py::TestMultiprocessValidation::test_validate_flipped_bit",
"test.py::TestMultiprocessValidation::test_validate_optional_tagfile",
"test.py::TestMultiprocessValidation::test_validate_optional_tagfile_in_directory",
"test.py::TestMultiprocessValidation::test_validate_slow_without_oxum_extra_file",
"test.py::TestMultiprocessValidation::test_validation_error_details",
"test.py::TestBag::test_bag_class",
"test.py::TestBag::test_bag_constructor",
"test.py::TestBag::test_carriage_return_manifest",
"test.py::TestBag::test_default_bagging_date",
"test.py::TestBag::test_garbage_in_bagit_txt",
"test.py::TestBag::test_has_oxum",
"test.py::TestBag::test_is_valid",
"test.py::TestBag::test_make_bag_multiprocessing",
"test.py::TestBag::test_make_bag_sha1_manifest",
"test.py::TestBag::test_make_bag_sha256_manifest",
"test.py::TestBag::test_make_bag_sha512_manifest",
"test.py::TestBag::test_make_bag_unknown_algorithm",
"test.py::TestBag::test_make_bag_with_data_dir_present",
"test.py::TestBag::test_make_bag_with_newline",
"test.py::TestBag::test_missing_tagmanifest_valid",
"test.py::TestBag::test_multiple_meta_values",
"test.py::TestBag::test_payload_permissions",
"test.py::TestBag::test_save_baginfo",
"test.py::TestBag::test_save_baginfo_with_sha1",
"test.py::TestBag::test_save_manifests",
"test.py::TestBag::test_save_manifests_deleted_files",
"test.py::TestBag::test_save_only_baginfo",
"test.py::TestBag::test_unicode_in_tags"
]
| []
| null | 783 | [
"setup.py",
"bagit.py"
]
| [
"setup.py",
"bagit.py"
]
|
|
Azure__azure-data-lake-store-python-83 | 9143406b26b3dc695c325b4fbf563c2093e9982f | 2016-09-29 04:13:44 | 9143406b26b3dc695c325b4fbf563c2093e9982f | diff --git a/adlfs/cli.py b/adlfs/cli.py
index f8d2a1f..320033f 100644
--- a/adlfs/cli.py
+++ b/adlfs/cli.py
@@ -91,13 +91,13 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
def _parse_ownership(self, ownership):
if ':' in ownership:
- user, group = ownership.split(':')
- if not user:
- user = None
+ owner, group = ownership.split(':')
+ if not owner:
+ owner = None
else:
- user = ownership
+ owner = ownership
group = None
- return user, group
+ return owner, group
def do_chown(self, line):
parser = argparse.ArgumentParser(prog="chown", add_help=False)
@@ -105,10 +105,10 @@ class AzureDataLakeFSCommand(cmd.Cmd, object):
parser.add_argument('files', type=str, nargs='+')
args = parser.parse_args(line.split())
- user, group = self._parse_ownership(args.ownership)
+ owner, group = self._parse_ownership(args.ownership)
for f in args.files:
- self._fs.chown(f, user=user, group=group)
+ self._fs.chown(f, owner=owner, group=group)
def help_chown(self):
print("chown owner[:group] file ...")
| SetOwner and SetPermission are now supported, need tests
Please add tests and recordings for these two APIs now that they are supported | Azure/azure-data-lake-store-python | diff --git a/tests/recordings/test_cli/test_chgrp.yaml b/tests/recordings/test_cli/test_chgrp.yaml
new file mode 100644
index 0000000..5a4b97d
--- /dev/null
+++ b/tests/recordings/test_cli/test_chgrp.yaml
@@ -0,0 +1,113 @@
+interactions:
+- request:
+ body: '123456'
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['6']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE']
+ Pragma: [no-cache]
+ Server-Perf: ['[bd1d8cd5-2d95-4f5f-8a53-2c3d201e18a6][ AuthTime::916.891327850232::PostAuthTime::207.840105100379
+ ][S-HdfsGetFileStatusV2 :: 00:00:005 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:061 ms]%0a[S-FsAppendStream
+ :: 00:00:147 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:147
+ ms]%0a[S-FsAppendStream :: 00:00:034 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:268 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bd1d8cd5-2d95-4f5f-8a53-2c3d201e18a6]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[ad84325f-4c08-46a9-bb80-de4087dec9c1][ AuthTime::862.151547083054::PostAuthTime::171.917123971919
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [ad84325f-4c08-46a9-bb80-de4087dec9c1]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"bar","type":"FILE","blockSize":268435456,"accessTime":1475122175622,"modificationTime":1475122175761,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['290']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[886a16e5-6b2d-4dc4-ae05-51f0e7426d8a][ AuthTime::990.876410298956::PostAuthTime::215.538071122432
+ ][S-HdfsListStatus :: 00:00:031 ms]%0a[LISTSTATUS :: 00:00:031 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [886a16e5-6b2d-4dc4-ae05-51f0e7426d8a]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?recursive=False&OP=DELETE
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[ba03af52-3d38-43d3-b915-87fe720fe72b][ AuthTime::844.619155876924::PostAuthTime::280.969511600577
+ ][S-FsDelete :: 00:00:083 ms]%0a[DELETE :: 00:00:091 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [ba03af52-3d38-43d3-b915-87fe720fe72b]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/recordings/test_cli/test_chown.yaml b/tests/recordings/test_cli/test_chown.yaml
new file mode 100644
index 0000000..9b1b17c
--- /dev/null
+++ b/tests/recordings/test_cli/test_chown.yaml
@@ -0,0 +1,165 @@
+interactions:
+- request:
+ body: '123456'
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['6']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:36 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?overwrite=true&write=true&OP=CREATE']
+ Pragma: [no-cache]
+ Server-Perf: ['[a70cb23e-2849-41d0-92d6-ecd4f18309d6][ AuthTime::982.749286031361::PostAuthTime::208.26758150447
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:005 ms]%0a[S-FsOpenStream :: 00:00:036 ms]%0a[S-FsAppendStream
+ :: 00:00:196 ms]%0a[BufferingTime :: 00:00:000 ms]%0a[WriteTime :: 00:00:196
+ ms]%0a[S-FsAppendStream :: 00:00:033 ms]%0a[S-FsCloseHandle :: 00:00:001
+ ms]%0a[CREATE :: 00:00:296 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [a70cb23e-2849-41d0-92d6-ecd4f18309d6]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?owner=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[87b33574-03ce-45bb-97fd-5c35cae5c524][ AuthTime::945.117335675742::PostAuthTime::220.66992995868
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [87b33574-03ce-45bb-97fd-5c35cae5c524]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[bfe54b19-e547-4b94-a69d-f8bcee43e4c8][ AuthTime::932.288884678858::PostAuthTime::213.400070392087
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bfe54b19-e547-4b94-a69d-f8bcee43e4c8]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?group=foo&owner=foo&OP=SETOWNER
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:09:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[bba612c2-e2c2-480f-b7f9-4883970dc590][ AuthTime::1009.26643436432::PostAuthTime::203.13625267926
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [bba612c2-e2c2-480f-b7f9-4883970dc590]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":6,"pathSuffix":"bar","type":"FILE","blockSize":268435456,"accessTime":1475122177420,"modificationTime":1475122177607,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['290']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[590d1d73-6cab-46a8-b06f-559563296587][ AuthTime::907.482928030873::PostAuthTime::203.563559727943
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [590d1d73-6cab-46a8-b06f-559563296587]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/foo/bar?recursive=False&OP=DELETE
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:09:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[83093651-8421-45c0-a12d-468c16ec2267][ AuthTime::990.019830332411::PostAuthTime::239.058784084587
+ ][S-FsDelete :: 00:00:095 ms]%0a[DELETE :: 00:00:103 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [83093651-8421-45c0-a12d-468c16ec2267]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/recordings/test_core/test_chown.yaml b/tests/recordings/test_core/test_chown.yaml
new file mode 100644
index 0000000..aa4693e
--- /dev/null
+++ b/tests/recordings/test_core/test_chown.yaml
@@ -0,0 +1,164 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ ContentLength: ['0']
+ Date: ['Thu, 29 Sep 2016 03:59:59 GMT']
+ Expires: ['-1']
+ Location: ['https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=CREATE&overwrite=true&write=true']
+ Pragma: [no-cache]
+ Server-Perf: ['[d869d515-b22a-4605-ac9e-4e0f7c5b6d8c][ AuthTime::897.647641440444::PostAuthTime::207.840282868059
+ ][S-HdfsGetFileStatusV2 :: 00:00:006 ms]%0a[S-HdfsCheckAccess :: 00:00:002
+ ms]%0a[S-FsDelete :: 00:00:006 ms]%0a[S-FsOpenStream :: 00:00:044 ms]%0a[BufferingTime
+ :: 00:00:000 ms]%0a[WriteTime :: 00:00:000 ms]%0a[S-FsAppendStream :: 00:00:027
+ ms]%0a[S-FsCloseHandle :: 00:00:001 ms]%0a[CREATE :: 00:00:096 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [d869d515-b22a-4605-ac9e-4e0f7c5b6d8c]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ User-Agent: [python-requests/2.11.1]
+ method: GET
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir?OP=LISTSTATUS
+ response:
+ body: {string: '{"FileStatuses":{"FileStatus":[{"length":0,"pathSuffix":"a","type":"FILE","blockSize":268435456,"accessTime":1475121600130,"modificationTime":1475121600130,"replication":1,"permission":"770","owner":"49b2f9ec-818a-49ca-9424-249e1f19f7d7","group":"49b2f9ec-818a-49ca-9424-249e1f19f7d7"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['288']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[b7c2f7c8-7e37-46e8-8332-33364ce07654][ AuthTime::909.193157359269::PostAuthTime::209.122979279719
+ ][S-HdfsListStatus :: 00:00:011 ms]%0a[LISTSTATUS :: 00:00:012 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [b7c2f7c8-7e37-46e8-8332-33364ce07654]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&owner=foo
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[c9893c0b-74c5-4731-9d7e-30d5438e89bb][ AuthTime::1052.88411988168::PostAuthTime::266.856088873341
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [c9893c0b-74c5-4731-9d7e-30d5438e89bb]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&group=bar
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[9847e1b4-5dbd-42ca-974e-9dd95db3f30a][ AuthTime::0::PostAuthTime::0
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [9847e1b4-5dbd-42ca-974e-9dd95db3f30a]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: PUT
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=SETOWNER&owner=foo&group=bar
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['0']
+ Date: ['Thu, 29 Sep 2016 04:00:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[5e3ddf51-8148-4d3b-a582-5f48a8ba63a9][ AuthTime::946.399490920039::PostAuthTime::216.393195845251
+ ][SETOWNER :: 00:00:000 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [5e3ddf51-8148-4d3b-a582-5f48a8ba63a9]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: ['*/*']
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ User-Agent: [python-requests/2.11.1]
+ method: DELETE
+ uri: https://fakestore.azuredatalakestore.net/webhdfs/v1/azure_test_dir/a?OP=DELETE&recursive=True
+ response:
+ body: {string: '{"boolean":true}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Length: ['16']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 29 Sep 2016 04:00:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server-Perf: ['[b928f672-0d84-4a07-9932-55e7b00d2e3f][ AuthTime::1142.69292351484::PostAuthTime::316.892012097492
+ ][S-FsDelete :: 00:00:073 ms]%0a[DELETE :: 00:00:082 ms]%0a']
+ Status: ['0x0']
+ Strict-Transport-Security: [max-age=15724800; includeSubDomains]
+ X-Content-Type-Options: [nosniff]
+ x-ms-request-id: [b928f672-0d84-4a07-9932-55e7b00d2e3f]
+ x-ms-webhdfs-version: [16.07.18.01]
+ status: {code: 200, message: OK}
+version: 1
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 20b84f3..1b14260 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -55,6 +55,13 @@ def test_cat(capsys, azure, client):
assert read_stdout(capsys) == '123456'
+@my_vcr.use_cassette
+def test_chgrp(capsys, azure, client):
+ with setup_file(azure) as azurefile:
+ client.onecmd('chgrp foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+
@my_vcr.use_cassette
def test_chmod(capsys, azure, client):
with setup_file(azure) as azurefile:
@@ -68,6 +75,19 @@ def test_chmod(capsys, azure, client):
assert 'permission = 550' in read_stdout(capsys)
+@my_vcr.use_cassette
+def test_chown(capsys, azure, client):
+ with setup_file(azure) as azurefile:
+ client.onecmd('chown foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+ client.onecmd('chown :foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+ client.onecmd('chown foo:foo ' + azurefile)
+ assert not read_stdout(capsys)
+
+
@my_vcr.use_cassette
def test_df(capsys, azure, client):
client.onecmd('df')
diff --git a/tests/test_core.py b/tests/test_core.py
index 8c6216f..a986219 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -605,6 +605,27 @@ def test_chmod(azure):
azure.chmod(test_dir / 'deep', '770')
+@my_vcr.use_cassette
+def test_chown(azure):
+ with azure_teardown(azure):
+ azure.touch(a)
+
+ # Account doesn't have permission to change owner
+ owner = azure.info(a)['owner']
+ azure.chown(a, owner='foo')
+ assert owner == azure.info(a)['owner']
+
+ # Account doesn't have permission to change group
+ group = azure.info(a)['group']
+ azure.chown(a, group='bar')
+ assert group == azure.info(a)['group']
+
+ # Account doesn't have permission to change owner/group
+ azure.chown(a, owner='foo', group='bar')
+ assert owner == azure.info(a)['owner']
+ assert group == azure.info(a)['group']
+
+
@pytest.mark.skipif(sys.platform != 'win32', reason="requires windows")
def test_backslash():
from adlfs.core import AzureDLPath
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"vcrpy"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==1.2.7
-e git+https://github.com/Azure/azure-data-lake-store-python.git@9143406b26b3dc695c325b4fbf563c2093e9982f#egg=adlfs
attrs==22.2.0
azure==4.0.0
azure-applicationinsights==0.1.1
azure-batch==4.1.3
azure-common==1.1.28
azure-core==1.24.2
azure-cosmosdb-nspkg==2.0.2
azure-cosmosdb-table==1.0.6
azure-datalake-store==0.0.53
azure-eventgrid==1.3.0
azure-graphrbac==0.40.0
azure-keyvault==1.1.0
azure-loganalytics==0.1.1
azure-mgmt==4.0.0
azure-mgmt-advisor==1.0.1
azure-mgmt-applicationinsights==0.1.1
azure-mgmt-authorization==0.50.0
azure-mgmt-batch==5.0.1
azure-mgmt-batchai==2.0.0
azure-mgmt-billing==0.2.0
azure-mgmt-cdn==3.1.0
azure-mgmt-cognitiveservices==3.0.0
azure-mgmt-commerce==1.0.1
azure-mgmt-compute==4.6.2
azure-mgmt-consumption==2.0.0
azure-mgmt-containerinstance==1.5.0
azure-mgmt-containerregistry==2.8.0
azure-mgmt-containerservice==4.4.0
azure-mgmt-cosmosdb==0.4.1
azure-mgmt-datafactory==0.6.0
azure-mgmt-datalake-analytics==0.6.0
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.5.0
azure-mgmt-datamigration==1.0.0
azure-mgmt-devspaces==0.1.0
azure-mgmt-devtestlabs==2.2.0
azure-mgmt-dns==2.1.0
azure-mgmt-eventgrid==1.0.0
azure-mgmt-eventhub==2.6.0
azure-mgmt-hanaonazure==0.1.1
azure-mgmt-iotcentral==0.1.0
azure-mgmt-iothub==0.5.0
azure-mgmt-iothubprovisioningservices==0.2.0
azure-mgmt-keyvault==1.1.0
azure-mgmt-loganalytics==0.2.0
azure-mgmt-logic==3.0.0
azure-mgmt-machinelearningcompute==0.4.1
azure-mgmt-managementgroups==0.1.0
azure-mgmt-managementpartner==0.1.1
azure-mgmt-maps==0.1.0
azure-mgmt-marketplaceordering==0.1.0
azure-mgmt-media==1.0.1
azure-mgmt-monitor==0.5.2
azure-mgmt-msi==0.2.0
azure-mgmt-network==2.7.0
azure-mgmt-notificationhubs==2.1.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-policyinsights==0.1.0
azure-mgmt-powerbiembedded==2.0.0
azure-mgmt-rdbms==1.9.0
azure-mgmt-recoveryservices==0.3.0
azure-mgmt-recoveryservicesbackup==0.3.0
azure-mgmt-redis==5.0.0
azure-mgmt-relay==0.1.0
azure-mgmt-reservations==0.2.1
azure-mgmt-resource==2.2.0
azure-mgmt-scheduler==2.0.0
azure-mgmt-search==2.1.0
azure-mgmt-servicebus==0.5.3
azure-mgmt-servicefabric==0.2.0
azure-mgmt-signalr==0.1.1
azure-mgmt-sql==0.9.1
azure-mgmt-storage==2.0.0
azure-mgmt-subscription==0.2.0
azure-mgmt-trafficmanager==0.50.0
azure-mgmt-web==0.35.0
azure-nspkg==3.0.2
azure-servicebus==0.21.1
azure-servicefabric==6.3.0.0
azure-servicemanagement-legacy==0.20.8
azure-storage-blob==1.5.0
azure-storage-common==1.4.2
azure-storage-file==1.4.0
azure-storage-queue==1.4.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
msal==1.27.0
msrest==0.7.1
msrestazure==0.6.4.post1
multidict==5.2.0
oauthlib==3.2.2
packaging==21.3
pathlib2==2.3.7.post1
pluggy==1.0.0
py==1.11.0
pycparser==2.21
PyJWT==2.4.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
vcrpy==4.1.1
wrapt==1.16.0
yarl==1.7.2
zipp==3.6.0
| name: azure-data-lake-store-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==1.2.7
- attrs==22.2.0
- azure==4.0.0
- azure-applicationinsights==0.1.1
- azure-batch==4.1.3
- azure-common==1.1.28
- azure-core==1.24.2
- azure-cosmosdb-nspkg==2.0.2
- azure-cosmosdb-table==1.0.6
- azure-datalake-store==0.0.53
- azure-eventgrid==1.3.0
- azure-graphrbac==0.40.0
- azure-keyvault==1.1.0
- azure-loganalytics==0.1.1
- azure-mgmt==4.0.0
- azure-mgmt-advisor==1.0.1
- azure-mgmt-applicationinsights==0.1.1
- azure-mgmt-authorization==0.50.0
- azure-mgmt-batch==5.0.1
- azure-mgmt-batchai==2.0.0
- azure-mgmt-billing==0.2.0
- azure-mgmt-cdn==3.1.0
- azure-mgmt-cognitiveservices==3.0.0
- azure-mgmt-commerce==1.0.1
- azure-mgmt-compute==4.6.2
- azure-mgmt-consumption==2.0.0
- azure-mgmt-containerinstance==1.5.0
- azure-mgmt-containerregistry==2.8.0
- azure-mgmt-containerservice==4.4.0
- azure-mgmt-cosmosdb==0.4.1
- azure-mgmt-datafactory==0.6.0
- azure-mgmt-datalake-analytics==0.6.0
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.5.0
- azure-mgmt-datamigration==1.0.0
- azure-mgmt-devspaces==0.1.0
- azure-mgmt-devtestlabs==2.2.0
- azure-mgmt-dns==2.1.0
- azure-mgmt-eventgrid==1.0.0
- azure-mgmt-eventhub==2.6.0
- azure-mgmt-hanaonazure==0.1.1
- azure-mgmt-iotcentral==0.1.0
- azure-mgmt-iothub==0.5.0
- azure-mgmt-iothubprovisioningservices==0.2.0
- azure-mgmt-keyvault==1.1.0
- azure-mgmt-loganalytics==0.2.0
- azure-mgmt-logic==3.0.0
- azure-mgmt-machinelearningcompute==0.4.1
- azure-mgmt-managementgroups==0.1.0
- azure-mgmt-managementpartner==0.1.1
- azure-mgmt-maps==0.1.0
- azure-mgmt-marketplaceordering==0.1.0
- azure-mgmt-media==1.0.1
- azure-mgmt-monitor==0.5.2
- azure-mgmt-msi==0.2.0
- azure-mgmt-network==2.7.0
- azure-mgmt-notificationhubs==2.1.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-policyinsights==0.1.0
- azure-mgmt-powerbiembedded==2.0.0
- azure-mgmt-rdbms==1.9.0
- azure-mgmt-recoveryservices==0.3.0
- azure-mgmt-recoveryservicesbackup==0.3.0
- azure-mgmt-redis==5.0.0
- azure-mgmt-relay==0.1.0
- azure-mgmt-reservations==0.2.1
- azure-mgmt-resource==2.2.0
- azure-mgmt-scheduler==2.0.0
- azure-mgmt-search==2.1.0
- azure-mgmt-servicebus==0.5.3
- azure-mgmt-servicefabric==0.2.0
- azure-mgmt-signalr==0.1.1
- azure-mgmt-sql==0.9.1
- azure-mgmt-storage==2.0.0
- azure-mgmt-subscription==0.2.0
- azure-mgmt-trafficmanager==0.50.0
- azure-mgmt-web==0.35.0
- azure-nspkg==3.0.2
- azure-servicebus==0.21.1
- azure-servicefabric==6.3.0.0
- azure-servicemanagement-legacy==0.20.8
- azure-storage-blob==1.5.0
- azure-storage-common==1.4.2
- azure-storage-file==1.4.0
- azure-storage-queue==1.4.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- msal==1.27.0
- msrest==0.7.1
- msrestazure==0.6.4.post1
- multidict==5.2.0
- oauthlib==3.2.2
- packaging==21.3
- pathlib2==2.3.7.post1
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyjwt==2.4.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- vcrpy==4.1.1
- wrapt==1.16.0
- yarl==1.7.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-data-lake-store-python
| [
"tests/test_cli.py::test_chown"
]
| [
"tests/test_core.py::test_ls_touch",
"tests/test_core.py::test_rm",
"tests/test_core.py::test_bad_open",
"tests/test_core.py::test_info",
"tests/test_core.py::test_exists",
"tests/test_core.py::test_read_delimited_block"
]
| [
"tests/test_cli.py::test_cat",
"tests/test_cli.py::test_chgrp",
"tests/test_cli.py::test_chmod",
"tests/test_cli.py::test_df",
"tests/test_cli.py::test_du",
"tests/test_cli.py::test_exists",
"tests/test_cli.py::test_get",
"tests/test_cli.py::test_head",
"tests/test_cli.py::test_head_bytes",
"tests/test_cli.py::test_info",
"tests/test_cli.py::test_ls",
"tests/test_cli.py::test_ls_detailed",
"tests/test_cli.py::test_mkdir_and_rmdir",
"tests/test_cli.py::test_mv",
"tests/test_cli.py::test_put",
"tests/test_cli.py::test_tail",
"tests/test_cli.py::test_tail_bytes",
"tests/test_cli.py::test_touch_and_rm",
"tests/test_core.py::test_simple",
"tests/test_core.py::test_idempotent_connect",
"tests/test_core.py::test_pickle",
"tests/test_core.py::test_seek",
"tests/test_core.py::test_concat",
"tests/test_core.py::test_errors",
"tests/test_core.py::test_glob_walk",
"tests/test_core.py::test_df",
"tests/test_core.py::test_move",
"tests/test_core.py::test_cat",
"tests/test_core.py::test_full_read",
"tests/test_core.py::test_tail_head",
"tests/test_core.py::test_readline",
"tests/test_core.py::test_touch_exists",
"tests/test_core.py::test_write_in_read_mode",
"tests/test_core.py::test_readlines",
"tests/test_core.py::test_put",
"tests/test_core.py::test_get",
"tests/test_core.py::test_du",
"tests/test_core.py::test_text_bytes",
"tests/test_core.py::test_append",
"tests/test_core.py::test_write_empty",
"tests/test_core.py::test_write_blocks",
"tests/test_core.py::test_gzip",
"tests/test_core.py::test_fooable",
"tests/test_core.py::test_closed",
"tests/test_core.py::test_TextIOWrapper",
"tests/test_core.py::test_array",
"tests/test_core.py::test_delimiters_newline",
"tests/test_core.py::test_delimiters_dash",
"tests/test_core.py::test_chmod",
"tests/test_core.py::test_chown",
"tests/test_core.py::test_forward_slash"
]
| []
| MIT License | 784 | [
"adlfs/cli.py"
]
| [
"adlfs/cli.py"
]
|
|
zalando-stups__senza-367 | a3630b4bdf09dd65df63c019bed23066a86efba6 | 2016-09-29 09:52:13 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/utils.py b/senza/manaus/utils.py
new file mode 100644
index 0000000..97b8d55
--- /dev/null
+++ b/senza/manaus/utils.py
@@ -0,0 +1,15 @@
+from typing import Dict, Optional # noqa: F401
+
+from botocore.exceptions import ClientError
+
+__all__ = ["extract_client_error_code"]
+
+
+def extract_client_error_code(exception: ClientError) -> Optional[str]:
+ """
+ Extracts the client error code from a boto ClientError exception. Returns
+ None if it fails.
+ """
+ error = exception.response.get('Error', {}) # type: Dict[str, Optional[str]]
+ error_code = error.get('Code')
+ return error_code
diff --git a/senza/traffic.py b/senza/traffic.py
index 60bc6b0..5eb8351 100644
--- a/senza/traffic.py
+++ b/senza/traffic.py
@@ -8,11 +8,13 @@ import dns.resolver
from clickclick import Action, action, ok, print_table, warning
from .aws import StackReference, get_stacks, get_tag
+from .manaus import ClientError
from .manaus.boto_proxy import BotoClientProxy
from .manaus.cloudformation import CloudFormationStack, ResourceType
from .manaus.exceptions import ELBNotFound, StackNotFound, StackNotUpdated
from .manaus.route53 import (RecordType, Route53, Route53HostedZone,
convert_cname_records_to_alias)
+from .manaus.utils import extract_client_error_code
PERCENT_RESOLUTION = 2
FULL_PERCENTAGE = PERCENT_RESOLUTION * 100
@@ -254,7 +256,12 @@ def get_stack_versions(stack_name: str, region: str) -> Iterator[StackVersion]:
for res in details.resource_summaries.all():
if res.resource_type == 'AWS::ElasticLoadBalancing::LoadBalancer':
elb = BotoClientProxy('elb', region)
- lbs = elb.describe_load_balancers(LoadBalancerNames=[res.physical_resource_id])
+ try:
+ lbs = elb.describe_load_balancers(LoadBalancerNames=[res.physical_resource_id])
+ except ClientError as e:
+ error_code = extract_client_error_code(e)
+ if error_code == 'LoadBalancerNotFound':
+ continue
lb_dns_name.append(lbs['LoadBalancerDescriptions'][0]['DNSName'])
elif res.resource_type == 'AWS::Route53::RecordSet':
if 'version' not in res.logical_id.lower():
| Traffic command should continue when a stack is being turned down
If a stack is being deleted `senza traffic ...` fails with an error like:
<details>
```
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/senza/error_handling.py", line 66, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 1073, in traffic
print_version_traffic(ref, region)
File "/usr/local/lib/python3.5/dist-packages/senza/traffic.py", line 292, in print_version_traffic
versions = list(get_stack_versions(stack_ref.name, region))
File "/usr/local/lib/python3.5/dist-packages/senza/traffic.py", line 228, in get_stack_versions
lbs = elb.describe_load_balancers(LoadBalancerNames=[res.physical_resource_id])
File "/usr/local/lib/python3.5/dist-packages/botocore/client.py", line 278, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.5/dist-packages/botocore/client.py", line 572, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (LoadBalancerNotFound) when calling the DescribeLoadBalancers operation: Cannot find Load Balancer xxx-XXXX
```
</details>
It should be able to continue and check the remaining stacks/elbs | zalando-stups/senza | diff --git a/tests/test_traffic.py b/tests/test_traffic.py
index 33a022d..0e3995b 100644
--- a/tests/test_traffic.py
+++ b/tests/test_traffic.py
@@ -1,7 +1,10 @@
from unittest.mock import MagicMock
+
+import botocore.exceptions
from senza.aws import SenzaStackSummary
-from senza.traffic import get_stack_versions, StackVersion, get_weights, resolve_to_ip_addresses
from senza.manaus.route53 import RecordType
+from senza.traffic import (StackVersion, get_stack_versions, get_weights,
+ resolve_to_ip_addresses)
def test_get_stack_versions(monkeypatch):
@@ -38,7 +41,20 @@ def test_get_stack_versions(monkeypatch):
return_value=[SenzaStackSummary(stack), SenzaStackSummary({'StackStatus': 'ROLLBACK_COMPLETE',
'StackName': 'my-stack-1'})]))
stack_version = list(get_stack_versions('my-stack', 'my-region'))
- assert stack_version == [StackVersion('my-stack', '1', ['myapp.example.org'], ['elb-dns-name'], ['some-arn'])]
+ assert stack_version == [StackVersion('my-stack', '1',
+ ['myapp.example.org'],
+ ['elb-dns-name'],
+ ['some-arn'])]
+
+ elb.describe_load_balancers.side_effect = botocore.exceptions.ClientError(
+ {'Error': {'Code': 'LoadBalancerNotFound'}},
+ 'foobar'
+ )
+ stack_version = list(get_stack_versions('my-stack', 'my-region'))
+ assert stack_version == [StackVersion('my-stack', '1',
+ ['myapp.example.org'],
+ [],
+ ['some-arn'])]
def test_get_weights(monkeypatch):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
clickclick==20.10.2
coverage==6.2
dnspython==1.15.0
dnspython3==1.15.0
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
jmespath==0.10.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pystache==0.6.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-mock==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
raven==6.10.0
requests==2.27.1
s3transfer==0.5.2
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@a3630b4bdf09dd65df63c019bed23066a86efba6#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- charset-normalizer==2.0.12
- click==8.0.4
- clickclick==20.10.2
- coverage==6.2
- dnspython==1.15.0
- dnspython3==1.15.0
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jmespath==0.10.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pystache==0.6.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- raven==6.10.0
- requests==2.27.1
- s3transfer==0.5.2
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/senza
| [
"tests/test_traffic.py::test_get_stack_versions"
]
| []
| [
"tests/test_traffic.py::test_get_weights",
"tests/test_traffic.py::test_resolve_to_ip_addresses"
]
| []
| Apache License 2.0 | 785 | [
"senza/traffic.py",
"senza/manaus/utils.py"
]
| [
"senza/traffic.py",
"senza/manaus/utils.py"
]
|
|
jboss-dockerfiles__dogen-52 | ec71541a5a3b30972dc26ef51eb75baaecd7ee73 | 2016-09-29 14:30:36 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/cli.py b/dogen/cli.py
index 79eafa7..25200ef 100644
--- a/dogen/cli.py
+++ b/dogen/cli.py
@@ -43,12 +43,6 @@ class CLI(object):
plugins = self.get_plugins()
- for plugin in plugins:
- key, description = plugins[plugin].info()
- epilog += "\n * %s:\t%s" % (key, description)
-
- parser.epilog = epilog
-
parser.add_argument(
'-v', '--verbose', action='store_true', help='Verbose output')
@@ -60,11 +54,16 @@ class CLI(object):
parser.add_argument('--scripts-path', help='Location of the scripts directory containing script packages.')
parser.add_argument('--additional-script', action='append', help='Location of additional script (can be url). Can be specified multiple times.')
parser.add_argument('--template', help='Path to custom template (can be url)')
- parser.add_argument('--plugin', action='append', help='Plugin to be enabled. Can be specified multiple times.')
parser.add_argument('path', help="Path to yaml descriptor to process")
parser.add_argument('output', help="Path to directory where generated files should be saved")
+ for plugin in plugins:
+ key, description = plugins[plugin].info()
+ epilog += "\n * %s:\t%s" % (key, description)
+ parser = plugins[plugin].inject_args(parser)
+
+ parser.epilog = epilog
args = parser.parse_args()
if args.verbose:
@@ -72,22 +71,16 @@ class CLI(object):
else:
self.log.setLevel(logging.INFO)
- if args.skip_ssl_verification:
- ssl_verify = False
- else:
- ssl_verify = None
self.log.debug("Running version %s", version)
enabled_plugins = []
- if args.plugin:
- for plugin in plugins:
- if plugins[plugin].info()[0] in args.plugin:
- enabled_plugins.append(plugins[plugin])
+ for plugin in plugins:
+ enabled_plugins.append(plugins[plugin])
try:
- Generator(self.log, args.path, args.output, template=args.template, scripts_path=args.scripts_path, additional_scripts=args.additional_script, without_sources=args.without_sources, plugins=enabled_plugins, ssl_verify=ssl_verify).run()
+ Generator(self.log, args=args, plugins=enabled_plugins).run()
except KeyboardInterrupt as e:
pass
except Error as e:
diff --git a/dogen/generator.py b/dogen/generator.py
index 0099e67..98fa73e 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -18,22 +18,25 @@ from dogen import version, DEFAULT_SCRIPT_EXEC, DEFAULT_SCRIPT_USER
from dogen.errors import Error
class Generator(object):
- def __init__(self, log, descriptor, output, template=None, scripts_path=None, additional_scripts=None, without_sources=False, plugins=[], ssl_verify=None):
+ def __init__(self, log, args, plugins=[]):
self.log = log
self.pwd = os.path.realpath(os.path.dirname(os.path.realpath(__file__)))
- self.descriptor = os.path.realpath(descriptor)
- self.without_sources = without_sources
- self.output = output
+ self.descriptor = os.path.realpath(args.path)
+ self.without_sources = args.without_sources
+ self.output = args.output
self.dockerfile = os.path.join(self.output, "Dockerfile")
- self.template = template
- self.scripts_path = scripts_path
- self.additional_scripts = additional_scripts
+ self.template = args.template
+ self.scripts_path = args.scripts_path
+ self.additional_scripts = args.additional_script
+
+ ssl_verify = None
+ if args.skip_ssl_verification:
+ ssl_verify = False
self.ssl_verify = ssl_verify
self.plugins = []
-
for plugin in plugins:
- self.plugins.append(plugin(self))
+ self.plugins.append(plugin(self, args))
def _fetch_file(self, location, output=None):
"""
diff --git a/dogen/plugin.py b/dogen/plugin.py
index ef231a3..66269bf 100644
--- a/dogen/plugin.py
+++ b/dogen/plugin.py
@@ -1,10 +1,15 @@
class Plugin(object):
- def __init__(self, dogen):
+ def __init__(self, dogen, args):
self.dogen = dogen
self.log = dogen.log
self.descriptor = dogen.descriptor
self.output = dogen.output
+ self.args = args
+
+ @staticmethod
+ def inject_args(parser):
+ return parser
def prepare(self, **kwargs):
pass
diff --git a/dogen/plugins/cct.py b/dogen/plugins/cct.py
index 05e66f6..c5bd31d 100644
--- a/dogen/plugins/cct.py
+++ b/dogen/plugins/cct.py
@@ -11,8 +11,8 @@ class CCT(Plugin):
def info():
return "cct", "Support for configuring images via cct"
- def __init__(self, dogen):
- super(CCT, self).__init__(dogen)
+ def __init__(self, dogen, args):
+ super(CCT, self).__init__(dogen, args)
def extend_schema(self, parent_schema):
"""
diff --git a/dogen/plugins/dist_git.py b/dogen/plugins/dist_git.py
index 0c119cd..3ba47f6 100644
--- a/dogen/plugins/dist_git.py
+++ b/dogen/plugins/dist_git.py
@@ -11,15 +11,26 @@ class DistGitPlugin(Plugin):
def info():
return "dist-git", "Support for dist-git repositories"
- def __init__(self, dogen):
- super(DistGitPlugin, self).__init__(dogen)
+ @staticmethod
+ def inject_args(parser):
+ parser.add_argument('--enable-dist-git', action='store_true', help='Enables dist-git plugin')
+ return parser
+
+ def __init__(self, dogen, args):
+ super(DistGitPlugin, self).__init__(dogen, args)
+ if not self.args.enable_dist_git:
+ return
self.git = Git(self.log, os.path.dirname(self.descriptor), self.output)
def prepare(self, cfg):
+ if not self.args.enable_dist_git:
+ return
self.git.prepare()
self.git.clean_scripts()
def after_sources(self, files):
+ if not self.args.enable_dist_git:
+ return
self.git.update_lookaside_cache(files)
self.git.update()
diff --git a/dogen/plugins/rpm.py b/dogen/plugins/rpm.py
index 30fa047..0f6abf2 100644
--- a/dogen/plugins/rpm.py
+++ b/dogen/plugins/rpm.py
@@ -9,8 +9,8 @@ class RPM(Plugin):
def info():
return "rpm","Support for injecting custom rpms"
- def __init__(self, dogen):
- super(RPM, self).__init__(dogen)
+ def __init__(self, dogen, args):
+ super(RPM, self).__init__(dogen, args)
self.rpms_directory = os.path.join(os.path.dirname(self.descriptor), "rpms")
def extend_schema(self, parent_schema):
| Better way for invoking plugins
Currently plugins are invoked via --plugin [name] command line option. it complicates stuff with requiring different command lines for project with different plugins and its unneeded.
Plugins can be invoked automatically and have its own trigger action:
* command line option
* key in image.yaml
* directory existence
With this approach we also need to document it/create plugin development guidelines | jboss-dockerfiles/dogen | diff --git a/tests/test_cct_plugin.py b/tests/test_cct_plugin.py
index 1cc4c24..ff2954c 100644
--- a/tests/test_cct_plugin.py
+++ b/tests/test_cct_plugin.py
@@ -14,7 +14,7 @@ class MockDogen():
class TestCCTPlugin(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_cct_plugin')
- self.cct = cct.CCT(dogen=MockDogen())
+ self.cct = cct.CCT(dogen=MockDogen(), args=None)
def teardown(self):
shutil.rmtree(self.workdir)
diff --git a/tests/test_schema.py b/tests/test_schema.py
index ea3acf0..25abf46 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -1,3 +1,4 @@
+import argparse
import unittest
import mock
import os
@@ -10,7 +11,10 @@ class TestSchemaMeta(type):
def __new__(mcls, name, bases, dict):
def gen_test(path, good):
def test(self):
- generator = Generator(self.log, path, "target")
+ args = argparse.Namespace(path=path, output="target", without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
+ generator = Generator(self.log, args)
if good:
generator.configure()
else:
diff --git a/tests/test_unit_generate.py b/tests/test_unit_generate.py
index a1660c9..06b0b97 100644
--- a/tests/test_unit_generate.py
+++ b/tests/test_unit_generate.py
@@ -1,3 +1,4 @@
+import argparse
import unittest
import mock
import tempfile
@@ -19,7 +20,10 @@ class TestGenerateCustomRepoFiles(unittest.TestCase):
f.write(self.basic_config.encode())
f.write("dogen:\n ssl_verify: true".encode())
- self.generator = Generator(self.log, self.descriptor.name, "target")
+ args = argparse.Namespace(path=self.descriptor.name, output="target", without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
self.generator.configure()
def tearDown(self):
diff --git a/tests/test_unit_generate_configuration.py b/tests/test_unit_generate_configuration.py
index 401ecab..adcbafd 100644
--- a/tests/test_unit_generate_configuration.py
+++ b/tests/test_unit_generate_configuration.py
@@ -1,3 +1,4 @@
+import argparse
import unittest
import mock
import six
@@ -19,12 +20,14 @@ class TestConfig(unittest.TestCase):
self.log = mock.Mock()
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.descriptor.write(self.basic_config.encode())
-
+ self.args = argparse.Namespace(path=self.descriptor.name, output="target", without_sources=False,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
def tearDown(self):
os.remove(self.descriptor.name)
def test_default_values(self):
- self.generator = Generator(self.log, self.descriptor.name, "target")
+ self.generator = Generator(self.log, self.args)
self.assertEqual(self.generator.output, "target")
self.assertEqual(self.generator.dockerfile, "target/Dockerfile")
self.assertEqual(self.generator.descriptor, self.descriptor.name)
@@ -39,7 +42,7 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n version: 99999.9.9-dev1".encode())
- self.generator = Generator(self.log, self.descriptor.name, "target")
+ self.generator = Generator(self.log, self.args)
with self.assertRaises(Error) as cm:
self.generator.configure()
@@ -51,7 +54,7 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n ssl_verify: false".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
self.assertFalse(generator.ssl_verify)
@@ -59,7 +62,7 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n ssl_verify: true".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
self.assertTrue(generator.ssl_verify)
@@ -67,31 +70,25 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n template: custom-template.jinja".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
self.assertEqual(generator.template, "custom-template.jinja")
def test_custom_template_in_cli_should_override_in_descriptor(self):
with self.descriptor as f:
f.write("dogen:\n template: custom-template.jinja".encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", template="cli-template.jinja")
+ args = self.args
+ args.template="cli-template.jinja"
+ generator = Generator(self.log, args)
generator.configure()
self.assertEqual(generator.template, "cli-template.jinja")
- def test_do_not_skip_ssl_verification_in_cli_true_should_override_descriptor(self):
- with self.descriptor as f:
- f.write("dogen:\n ssl_verify: false".encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", ssl_verify=True)
- generator.configure()
- self.assertTrue(generator.ssl_verify)
-
def test_do_not_skip_ssl_verification_in_cli_false_should_override_descriptor(self):
with self.descriptor as f:
f.write("dogen:\n ssl_verify: true".encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", ssl_verify=False)
+ args = self.args
+ args.skip_ssl_verification=True
+ generator = Generator(self.log, args)
generator.configure()
self.assertFalse(generator.ssl_verify)
@@ -100,7 +97,7 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n scripts_path: custom-scripts".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
mock_patch.assert_called_with('custom-scripts')
self.assertEqual(generator.scripts_path, "custom-scripts")
@@ -109,8 +106,9 @@ class TestConfig(unittest.TestCase):
def test_custom_scripts_dir_in_cli_should_override_in_descriptor(self, mock_patch):
with self.descriptor as f:
f.write("dogen:\n template: custom-scripts".encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", scripts_path="custom-scripts-cli")
+ args = self.args
+ args.scripts_path="custom-scripts-cli"
+ generator = Generator(self.log, args)
generator.configure()
mock_patch.assert_called_with('custom-scripts-cli')
self.assertEqual(generator.scripts_path, "custom-scripts-cli")
@@ -120,7 +118,7 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n scripts_path: custom-scripts".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
mock_patch.assert_called_with('custom-scripts')
self.assertEqual(generator.scripts_path, "custom-scripts")
@@ -129,15 +127,16 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write("dogen:\n additional_scripts:\n - http://host/somescript".encode())
- generator = Generator(self.log, self.descriptor.name, "target")
+ generator = Generator(self.log, self.args)
generator.configure()
self.assertEqual(generator.additional_scripts, ["http://host/somescript"])
def test_custom_additional_scripts_in_cli_should_override_in_descriptor(self):
with self.descriptor as f:
f.write("dogen:\n additional_scripts:\n - http://host/somescript".encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", additional_scripts=["https://otherhost/otherscript"])
+ args = self.args
+ args.additional_script=["https://otherhost/otherscript"]
+ generator = Generator(self.log, args)
generator.configure()
self.assertEqual(generator.additional_scripts, ["https://otherhost/otherscript"])
@@ -146,8 +145,9 @@ class TestConfig(unittest.TestCase):
"""Helper method for tests around script exec value"""
with self.descriptor as f:
f.write(cfg.encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", scripts_path="scripts")
+ args = self.args
+ args.scripts_path="scripts"
+ generator = Generator(self.log, args)
generator.configure()
generator._handle_scripts()
self.assertEqual(generator.cfg['scripts'][0]['exec'], exec_to_test)
@@ -202,8 +202,9 @@ class TestConfig(unittest.TestCase):
with self.descriptor as f:
f.write(cfg.encode())
-
- generator = Generator(self.log, self.descriptor.name, "target", scripts_path="scripts")
+ args = self.args
+ args.scripts_path="scripts"
+ generator = Generator(self.log, args)
generator.configure()
generator._handle_scripts()
self.assertEqual(generator.cfg['scripts'][0]['user'], user_to_test)
@@ -255,7 +256,9 @@ class TestConfig(unittest.TestCase):
"make sure _handle_scripts doesn't blow up when there are no scripts"
self.descriptor.close()
- generator = Generator(self.log, self.descriptor.name, "target", scripts_path="scripts")
+ args = self.args
+ args.scripts_path="scripts"
+ generator = Generator(self.log, args)
generator.configure()
generator._handle_scripts()
# success if no stack trace thrown
diff --git a/tests/test_unit_generate_handle_files.py b/tests/test_unit_generate_handle_files.py
index 1956c2a..51bd803 100644
--- a/tests/test_unit_generate_handle_files.py
+++ b/tests/test_unit_generate_handle_files.py
@@ -1,3 +1,4 @@
+import argparse
import unittest
import mock
import six
@@ -9,7 +10,10 @@ from dogen.tools import Tools
class TestURL(unittest.TestCase):
def setUp(self):
self.log = mock.Mock()
- self.generator = Generator(self.log, "image.yaml", "target")
+ args = argparse.Namespace(path="image.yaml", output="target", without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
def test_local_file(self):
self.assertFalse(Tools.is_url("a_file.tmp"))
@@ -23,7 +27,10 @@ class TestURL(unittest.TestCase):
class TestFetchFile(unittest.TestCase):
def setUp(self):
self.log = mock.Mock()
- self.generator = Generator(self.log, "image.yaml", "target")
+ args = argparse.Namespace(path="image.yaml", output="target", without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
@mock.patch('dogen.generator.requests.get')
def test_fetching_with_filename(self, mock_requests):
@@ -55,10 +62,17 @@ class TestFetchFile(unittest.TestCase):
class TestCustomTemplateHandling(unittest.TestCase):
def setUp(self):
self.log = mock.Mock()
- self.generator = Generator(self.log, "image.yaml", "target", template="http://host/custom-template")
+ args = argparse.Namespace(path="image.yaml", output="target", without_sources=None,
+ template="http://host/custom-template", scripts_path=None,
+ additional_script=None, skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
def test_do_not_fail_if_no_template_is_provided(self):
- self.generator = Generator(self.log, "image.yaml", "target")
+ args = argparse.Namespace(path="image.yaml", output="target", without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
+
fetch_file_mock = mock.Mock()
self.generator._fetch_file = fetch_file_mock
diff --git a/tests/test_user.py b/tests/test_user.py
index 1c87429..e261886 100644
--- a/tests/test_user.py
+++ b/tests/test_user.py
@@ -1,3 +1,4 @@
+import argparse
import tempfile
import unittest
import mock
@@ -28,7 +29,9 @@ class TestUser(unittest.TestCase):
self.yaml = os.path.join(self.workdir, "image.yaml")
self.target = os.path.join(self.workdir, "target")
os.mkdir(self.target)
-
+ self.args = argparse.Namespace(path=self.yaml, output=self.target, without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None)
with open(self.yaml, 'wb') as f:
f.write(self.basic_config.encode())
@@ -44,7 +47,7 @@ class TestUser(unittest.TestCase):
with open(self.yaml, 'ab') as f:
f.write("user: 1347".encode())
- generator = Generator(self.log, self.yaml, self.target)
+ generator = Generator(self.log, self.args)
generator.configure()
generator.render_from_template()
@@ -61,7 +64,7 @@ class TestUser(unittest.TestCase):
instruction in the Dockerfile, immediately before the CMD,
defaulting to uid 0.
"""
- generator = Generator(self.log, self.yaml, self.target)
+ generator = Generator(self.log, self.args)
generator.configure()
generator.render_from_template()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 6
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@ec71541a5a3b30972dc26ef51eb75baaecd7ee73#egg=dogen
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==2.8
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pykwalify==1.8.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.8.1
ruamel.yaml==0.18.3
ruamel.yaml.clib==0.2.8
six==1.10.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- docopt==0.6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==2.8
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pykwalify==1.8.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.8.1
- ruamel-yaml==0.18.3
- ruamel-yaml-clib==0.2.8
- six==1.10.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_cct_plugin.py::TestCCTPlugin::test_default_user_root",
"tests/test_unit_generate.py::TestGenerateCustomRepoFiles::test_custom_repo_files_should_add_two",
"tests/test_unit_generate.py::TestGenerateCustomRepoFiles::test_custom_repo_files_should_not_fail",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_additional_scripts_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_additional_scripts_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_default",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_values",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_cli_false_should_override_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_provided_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_supplied_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_fail_if_version_mismatch",
"tests/test_unit_generate_configuration.py::TestConfig::test_no_scripts_defined",
"tests/test_unit_generate_configuration.py::TestConfig::test_scripts_dir_found_by_convention",
"tests/test_unit_generate_configuration.py::TestConfig::test_skip_ssl_verification_in_descriptor",
"tests/test_unit_generate_handle_files.py::TestURL::test_local_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_http_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_https_file",
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_filename",
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_tmpfile",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_do_not_fail_if_no_template_is_provided",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_success",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_with_error",
"tests/test_user.py::TestUser::test_default_cmd_user",
"tests/test_user.py::TestUser::test_set_cmd_user"
]
| []
| []
| []
| MIT License | 786 | [
"dogen/plugins/dist_git.py",
"dogen/generator.py",
"dogen/plugins/rpm.py",
"dogen/cli.py",
"dogen/plugins/cct.py",
"dogen/plugin.py"
]
| [
"dogen/plugins/dist_git.py",
"dogen/generator.py",
"dogen/plugins/rpm.py",
"dogen/cli.py",
"dogen/plugins/cct.py",
"dogen/plugin.py"
]
|
|
Pylons__webob-286 | 8bed3b0112df7b1de3755a37924979799749e5f2 | 2016-09-30 05:10:52 | 5ec5ca2e45b70ff4ee9a2c74c77a04b71d6290fd | bertjwregeer: ```
# master before this branch
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 9430 10604 144 73
# Fixes applied
(howareyou)alexandra:howareyou xistence$ howareyou -b hello -f pyramid
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 8112 12327 123 72
```
bertjwregeer: ```
# WebOb 1.6.1
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 5299 18870 74 58
# WebOb with this branch
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 5051 19797 71 56
``` | diff --git a/webob/descriptors.py b/webob/descriptors.py
index 5fd26eb..15867ce 100644
--- a/webob/descriptors.py
+++ b/webob/descriptors.py
@@ -146,10 +146,7 @@ def header_getter(header, rfc_section):
r._headerlist.append((header, value))
def fdel(r):
- items = r._headerlist
- for i in range(len(items)-1, -1, -1):
- if items[i][0].lower() == key:
- del items[i]
+ r._headerlist[:] = [(k, v) for (k, v) in r._headerlist if k.lower() != key]
return property(fget, fset, fdel, doc)
diff --git a/webob/headers.py b/webob/headers.py
index e2f5e34..55385c8 100644
--- a/webob/headers.py
+++ b/webob/headers.py
@@ -21,11 +21,7 @@ class ResponseHeaders(MultiDict):
def getall(self, key):
key = key.lower()
- result = []
- for k, v in self._items:
- if k.lower() == key:
- result.append(v)
- return result
+ return [v for (k, v) in self._items if k.lower() == key]
def mixed(self):
r = self.dict_of_lists()
@@ -42,10 +38,7 @@ class ResponseHeaders(MultiDict):
def __setitem__(self, key, value):
norm_key = key.lower()
- items = self._items
- for i in range(len(items)-1, -1, -1):
- if items[i][0].lower() == norm_key:
- del items[i]
+ self._items[:] = [(k, v) for (k, v) in self._items if k.lower() != norm_key]
self._items.append((key, value))
def __delitem__(self, key):
diff --git a/webob/response.py b/webob/response.py
index 607ac34..98a5699 100644
--- a/webob/response.py
+++ b/webob/response.py
@@ -212,8 +212,25 @@ class Response(object):
else:
self._headerlist = headerlist
- # Set up the content_type
- content_type = content_type or self.default_content_type
+ # Set the encoding for the Response to charset, so if a charset is
+ # passed but the Content-Type does not allow for a charset, we can
+ # still encode text_type body's.
+ # r = Response(
+ # content_type='application/foo',
+ # charset='UTF-8',
+ # body=u'somebody')
+ # Should work without issues, and the header will be correctly set to
+ # Content-Type: application/foo with no charset on it.
+
+ encoding = None
+ if charset is not _marker:
+ encoding = charset
+
+ # Does the status code have a body or not?
+ code_has_body = (
+ self._status[0] != '1' and
+ self._status[:3] not in ('204', '205', '304')
+ )
# We only set the content_type to the one passed to the constructor or
# the default content type if there is none that exists AND there was
@@ -223,27 +240,59 @@ class Response(object):
#
# Also allow creation of a empty Response with just the status set to a
# Response with empty body, such as Response(status='204 No Content')
- # without the default content_type being set
-
- if (
- self.content_type is None and
- headerlist is None and
- _code_has_body(self.status_code)
- ):
- self.content_type = content_type
-
- # Set up the charset
+ # without the default content_type being set (since empty bodies have
+ # no Content-Type)
#
- # In contrast with the above, if a charset is not set but there is a
- # content_type we will set the default charset if the content_type
- # allows for a charset.
+ # Check if content_type is set because default_content_type could be
+ # None, in which case there is no content_type, and thus we don't need
+ # to anything
- if self.content_type:
- if not self.charset and charset is not _marker:
- self.charset = charset
- elif not self.charset and self.default_charset:
- if _content_type_has_charset(self.content_type):
- self.charset = self.default_charset
+ content_type = content_type or self.default_content_type
+
+ if headerlist is None and code_has_body and content_type:
+ # Set up the charset, if the content_type doesn't already have one
+
+ has_charset = 'charset=' in content_type
+
+ # If the Content-Type already has a charset, we don't set the user
+ # provided charset on the Content-Type, so we shouldn't use it as
+ # the encoding for text_type based body's.
+ if has_charset:
+ encoding = None
+
+ # Do not use the default_charset for the encoding because we
+ # want things like
+ # Response(content_type='image/jpeg',body=u'foo') to raise when
+ # trying to encode the body.
+
+ new_charset = encoding
+
+ if (
+ not has_charset and
+ charset is _marker and
+ self.default_charset
+ ):
+ new_charset = self.default_charset
+
+ # Optimize for the default_content_type as shipped by
+ # WebOb, becuase we know that 'text/html' has a charset,
+ # otherwise add a charset if the content_type has a charset.
+ #
+ # Even if the user supplied charset explicitly, we do not add
+ # it to the Content-Type unless it has has a charset, instead
+ # the user supplied charset is solely used for encoding the
+ # body if it is a text_type
+
+ if (
+ new_charset and
+ (
+ content_type == 'text/html' or
+ _content_type_has_charset(content_type)
+ )
+ ):
+ content_type += '; charset=' + new_charset
+
+ self._headerlist.append(('Content-Type', content_type))
# Set up conditional response
if conditional_response is None:
@@ -251,17 +300,30 @@ class Response(object):
else:
self.conditional_response = bool(conditional_response)
- # Set up app_iter
- if app_iter is None:
+ # Set up app_iter if the HTTP Status code has a body
+ if app_iter is None and code_has_body:
if isinstance(body, text_type):
- encoding = self.charset
+ # Fall back to trying self.charset if encoding is not set. In
+ # most cases encoding will be set to the default value.
+ encoding = encoding or self.charset
if encoding is None:
raise TypeError(
"You cannot set the body to a text value without a "
"charset")
body = body.encode(encoding)
app_iter = [body]
- self.headers['Content-Length'] = str(len(body))
+
+ if headerlist is not None:
+ self._headerlist[:] = [
+ (k, v)
+ for (k, v)
+ in self._headerlist
+ if k.lower() != 'content-length'
+ ]
+ self._headerlist.append(('Content-Length', str(len(body))))
+ elif app_iter is None or not code_has_body:
+ app_iter = [b'']
+
self._app_iter = app_iter
# Loop through all the remaining keyword arguments
@@ -439,7 +501,7 @@ class Response(object):
The headers in a dictionary-like object
"""
if self._headers is None:
- self._headers = ResponseHeaders.view_list(self.headerlist)
+ self._headers = ResponseHeaders.view_list(self._headerlist)
return self._headers
def _headers__set(self, value):
@@ -1206,6 +1268,23 @@ class Response(object):
if set_content_md5:
self.content_md5 = md5_digest
+ @staticmethod
+ def _make_location_absolute(environ, value):
+ if SCHEME_RE.search(value):
+ return value
+
+ new_location = urlparse.urljoin(_request_uri(environ), value)
+ return new_location
+
+ def _abs_headerlist(self, environ):
+ # Build the headerlist, if we have a Location header, make it absolute
+ return [
+ (k, v) if k.lower() != 'location'
+ else (k, self._make_location_absolute(environ, v))
+ for (k, v)
+ in self._headerlist
+ ]
+
#
# __call__, conditional_response_app
#
@@ -1216,27 +1295,15 @@ class Response(object):
"""
if self.conditional_response:
return self.conditional_response_app(environ, start_response)
+
headerlist = self._abs_headerlist(environ)
+
start_response(self.status, headerlist)
if environ['REQUEST_METHOD'] == 'HEAD':
# Special case here...
return EmptyResponse(self._app_iter)
return self._app_iter
- def _abs_headerlist(self, environ):
- """Returns a headerlist, with the Location header possibly
- made absolute given the request environ.
- """
- headerlist = list(self.headerlist)
- for i, (name, value) in enumerate(headerlist):
- if name.lower() == 'location':
- if SCHEME_RE.search(value):
- break
- new_location = urlparse.urljoin(_request_uri(environ), value)
- headerlist[i] = (name, new_location)
- break
- return headerlist
-
_safe_methods = ('GET', 'HEAD')
def conditional_response_app(self, environ, start_response):
@@ -1248,7 +1315,9 @@ class Response(object):
* Range (406 Partial Content; only on GET, HEAD)
"""
req = BaseRequest(environ)
+
headerlist = self._abs_headerlist(environ)
+
method = environ.get('REQUEST_METHOD', 'GET')
if method in self._safe_methods:
status304 = False
@@ -1436,13 +1505,6 @@ class EmptyResponse(object):
__next__ = next # py3
-def _code_has_body(status_code):
- return (
- (not (100 <= status_code < 199)) and
- (status_code != 204) and
- (status_code != 304)
- )
-
def _is_xml(content_type):
return (
content_type.startswith('application/xml') or
| performance degradation on trunk
I thought I'd try Morepath with webob trunk. It's a lot slower than before. But I'll demonstrate with Pyramid, as I don't want to confuse the issue by using development versions of Morepath. I use this benchmark:
https://github.com/faassen/howareyou
When I run Pyramid against webob 1.6.1:
```
$ howareyou -b hello -f pyramid
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 4051 24687 74 58
```
When I run Pyramid against webob trunk:
```
Benchmark : hello
Frameworks: pyramid
ms rps tcalls funcs
pyramid 7410 13495 144 73
```
Here is the top profile functions with WebOb 1.6.1:
```
ncalls tottime percall cumtime percall filename:lineno(function)
100000 0.661 0.000 3.921 0.000 router.py:66(handle_request)
100000 0.427 0.000 0.646 0.000 response.py:87(__init__)
100000 0.340 0.000 4.744 0.000 router.py:179(invoke_subrequest)
100000 0.239 0.000 0.456 0.000 traversal.py:632(__call__)
100000 0.238 0.000 0.285 0.000 response.py:1043(_abs_headerlist)
300000 0.236 0.000 0.273 0.000 interface.py:518(__hash__)
100000 0.219 0.000 5.828 0.000 run.py:46(<lambda>)
100000 0.204 0.000 5.524 0.000 router.py:227(__call__)
100000 0.170 0.000 0.767 0.000 urldispatch.py:80(__call__)
200000 0.152 0.000 0.249 0.000 decorator.py:39(__get__)
```
With master:
```
ncalls tottime percall cumtime percall filename:lineno(function)
100000 0.910 0.000 5.402 0.000 response.py:176(__init__)
700000 0.731 0.000 0.883 0.000 headers.py:15(__getitem__)
100000 0.688 0.000 8.905 0.000 router.py:66(handle_request)
700000 0.586 0.000 1.469 0.000 _abcoll.py:379(get)
300000 0.477 0.000 0.715 0.000 headers.py:43(__setitem__)
300000 0.372 0.000 1.505 0.000 response.py:762(_content_type__get)
100000 0.366 0.000 9.808 0.000 router.py:179(invoke_subrequest)
1000000 0.302 0.000 0.511 0.000 response.py:432(_headers__get)
100000 0.268 0.000 0.808 0.000 response.py:731(_charset__set)
100000 0.254 0.000 0.801 0.000 response.py:796(_content_type__set)
```
Various response header manipulation code now shows up in the profile whereas before it did not.
What's interesting is that if I run the profiler against the development version of Morepath, I see almost the same webob stuff dominate the benchmarks in the same order, while usually the webob stuff is actually not in the top 10.
| Pylons/webob | diff --git a/tests/test_request.py b/tests/test_request.py
index 524214d..9650702 100644
--- a/tests/test_request.py
+++ b/tests/test_request.py
@@ -3282,7 +3282,7 @@ class TestRequest_functional(object):
assert res.status == '200 OK'
from webob.headers import ResponseHeaders
assert isinstance(res.headers, ResponseHeaders)
- assert list(res.headers.items()) == [('Content-Type', 'text/plain; charset=UTF-8')]
+ assert list(res.headers.items()) == [('Content-Type', 'text/plain')]
assert res.body == b'Hi!'
def test_call_WSGI_app_204(self):
@@ -3329,7 +3329,7 @@ class TestRequest_functional(object):
assert res.status == '200 OK'
from webob.headers import ResponseHeaders
assert isinstance(res.headers, ResponseHeaders)
- assert list(res.headers.items()) == [('Content-Type', 'text/plain; charset=UTF-8')]
+ assert list(res.headers.items()) == [('Content-Type', 'text/plain')]
assert res.body == b'Hi!'
def equal_req(self, req, inp):
diff --git a/tests/test_response.py b/tests/test_response.py
index 867d433..fd4615a 100644
--- a/tests/test_response.py
+++ b/tests/test_response.py
@@ -18,7 +18,7 @@ def teardown_module(module):
def simple_app(environ, start_response):
start_response('200 OK', [
- ('Content-Type', 'text/html; charset=utf8'),
+ ('Content-Type', 'text/html; charset=UTF-8'),
])
return ['OK']
@@ -28,7 +28,7 @@ def test_response():
assert res.status == '200 OK'
assert res.status_code == 200
assert res.body == "OK"
- assert res.charset == 'utf8'
+ assert res.charset == "UTF-8"
assert res.content_type == 'text/html'
res.status = 404
assert res.status == '404 Not Found'
@@ -158,7 +158,7 @@ def test_cookies():
r2 = res.merge_cookies(simple_app)
r2 = BaseRequest.blank('/').get_response(r2)
assert r2.headerlist == [
- ('Content-Type', 'text/html; charset=utf8'),
+ ('Content-Type', 'text/html; charset=UTF-8'),
('Set-Cookie', 'x=test; Path=/'),
]
@@ -476,19 +476,6 @@ def test_has_body():
messing_with_privates._app_iter = None
assert not messing_with_privates.has_body
-def test_content_type_in_headerlist():
- # Couldn't manage to clone Response in order to modify class
- # attributes safely. Shouldn't classes be fresh imported for every
- # test?
- default_content_type = Response.default_content_type
- Response.default_content_type = None
- try:
- res = Response(headerlist=[('Content-Type', 'text/html')], charset='utf8')
- assert res._headerlist
- assert res.charset == 'utf8'
- finally:
- Response.default_content_type = default_content_type
-
def test_str_crlf():
res = Response('test')
assert '\r\n' in str(res)
@@ -989,6 +976,7 @@ def test_cache_control_get():
def test_location():
res = Response()
+ res.status = '301'
res.location = '/test.html'
assert res.location == '/test.html'
req = Request.blank('/')
@@ -1195,20 +1183,35 @@ def test_decode_content_gzip():
res.decode_content()
assert res.body == b'abc'
-def test__abs_headerlist_location_with_scheme():
- res = Response()
- res.content_encoding = 'gzip'
- res.headerlist = [('Location', 'http:')]
- result = res._abs_headerlist({})
- assert result, [('Location' == 'http:')]
-
-def test__abs_headerlist_location_no_scheme():
- res = Response()
- res.content_encoding = 'gzip'
- res.headerlist = [('Location', '/abc')]
- result = res._abs_headerlist({'wsgi.url_scheme': 'http',
- 'HTTP_HOST': 'example.com:80'})
- assert result == [('Location', 'http://example.com/abc')]
+def test__make_location_absolute_has_scheme_only():
+ result = Response._make_location_absolute(
+ {
+ 'wsgi.url_scheme': 'http',
+ 'HTTP_HOST': 'example.com:80'
+ },
+ 'http:'
+ )
+ assert result == 'http:'
+
+def test__make_location_absolute_path():
+ result = Response._make_location_absolute(
+ {
+ 'wsgi.url_scheme': 'http',
+ 'HTTP_HOST': 'example.com:80'
+ },
+ '/abc'
+ )
+ assert result == 'http://example.com/abc'
+
+def test__make_location_absolute_already_absolute():
+ result = Response._make_location_absolute(
+ {
+ 'wsgi.url_scheme': 'http',
+ 'HTTP_HOST': 'example.com:80'
+ },
+ 'https://funcptr.net/'
+ )
+ assert result == 'https://funcptr.net/'
def test_response_set_body_file1():
data = b'abc'
@@ -1241,3 +1244,72 @@ def test_cache_expires_set_zero_then_nonzero():
assert not res.cache_control.no_store
assert not res.cache_control.must_revalidate
assert res.cache_control.max_age == 1
+
+def test_default_content_type():
+ class NoDefault(Response):
+ default_content_type = None
+
+ res = NoDefault()
+ assert res.content_type is None
+
+def test_default_charset():
+ class DefaultCharset(Response):
+ default_charset = 'UTF-16'
+
+ res = DefaultCharset()
+ assert res.content_type == 'text/html'
+ assert res.charset == 'UTF-16'
+ assert res.headers['Content-Type'] == 'text/html; charset=UTF-16'
+
+def test_header_list_no_defaults():
+ res = Response(headerlist=[])
+ assert res.headerlist == [('Content-Length', '0')]
+ assert res.content_type is None
+ assert res.charset is None
+ assert res.body == b''
+
+def test_204_has_no_body():
+ res = Response(status='204 No Content')
+ assert res.body == b''
+ assert res.content_length is None
+ assert res.headerlist == []
+
+def test_204_app_iter_set():
+ res = Response(status='204', app_iter=[b'test'])
+ assert res.body == b''
+ assert res.content_length is None
+ assert res.headerlist == []
+
+def test_explicit_charset():
+ res = Response(charset='UTF-16')
+ assert res.content_type == 'text/html'
+ assert res.charset == 'UTF-16'
+
+def test_set_content_type():
+ res = Response(content_type='application/json')
+ res.content_type = 'application/foo'
+ assert res.content_type == 'application/foo'
+
+def test_raises_no_charset():
+ with pytest.raises(TypeError):
+ Response(content_type='image/jpeg', body=text_(b'test'))
+
+def test_raises_none_charset():
+ with pytest.raises(TypeError):
+ Response(
+ content_type='image/jpeg',
+ body=text_(b'test'),
+ charset=None)
+
+def test_doesnt_raise_with_charset_content_type_has_no_charset():
+ res = Response(content_type='image/jpeg', body=text_(b'test'), charset='utf-8')
+ assert res.body == b'test'
+ assert res.content_type == 'image/jpeg'
+ assert res.charset is None
+
+def test_content_type_has_charset():
+ res = Response(content_type='application/foo; charset=UTF-8', body=text_(b'test'))
+ assert res.body == b'test'
+ assert res.content_type == 'application/foo'
+ assert res.charset == 'UTF-8'
+ assert res.headers['Content-Type'] == 'application/foo; charset=UTF-8'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 1.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/Pylons/webob.git@8bed3b0112df7b1de3755a37924979799749e5f2#egg=WebOb
| name: webob
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- pytest-cov==6.0.0
prefix: /opt/conda/envs/webob
| [
"tests/test_request.py::TestRequest_functional::test_call_WSGI_app",
"tests/test_request.py::TestRequest_functional::test_get_response_catch_exc_info_true",
"tests/test_response.py::test__make_location_absolute_has_scheme_only",
"tests/test_response.py::test__make_location_absolute_path",
"tests/test_response.py::test__make_location_absolute_already_absolute",
"tests/test_response.py::test_204_has_no_body",
"tests/test_response.py::test_204_app_iter_set",
"tests/test_response.py::test_doesnt_raise_with_charset_content_type_has_no_charset"
]
| [
"tests/test_response.py::test_response_file_body_tell"
]
| [
"tests/test_request.py::TestRequestCommon::test_ctor_environ_getter_raises_WTF",
"tests/test_request.py::TestRequestCommon::test_ctor_wo_environ_raises_WTF",
"tests/test_request.py::TestRequestCommon::test_ctor_w_environ",
"tests/test_request.py::TestRequestCommon::test_ctor_w_non_utf8_charset",
"tests/test_request.py::TestRequestCommon::test_scheme",
"tests/test_request.py::TestRequestCommon::test_body_file_getter",
"tests/test_request.py::TestRequestCommon::test_body_file_getter_seekable",
"tests/test_request.py::TestRequestCommon::test_body_file_getter_cache",
"tests/test_request.py::TestRequestCommon::test_body_file_getter_unreadable",
"tests/test_request.py::TestRequestCommon::test_body_file_setter_w_bytes",
"tests/test_request.py::TestRequestCommon::test_body_file_setter_non_bytes",
"tests/test_request.py::TestRequestCommon::test_body_file_deleter",
"tests/test_request.py::TestRequestCommon::test_body_file_raw",
"tests/test_request.py::TestRequestCommon::test_body_file_seekable_input_not_seekable",
"tests/test_request.py::TestRequestCommon::test_body_file_seekable_input_is_seekable",
"tests/test_request.py::TestRequestCommon::test_urlvars_getter_w_paste_key",
"tests/test_request.py::TestRequestCommon::test_urlvars_getter_w_wsgiorg_key",
"tests/test_request.py::TestRequestCommon::test_urlvars_getter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_urlvars_setter_w_paste_key",
"tests/test_request.py::TestRequestCommon::test_urlvars_setter_w_wsgiorg_key",
"tests/test_request.py::TestRequestCommon::test_urlvars_setter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_urlvars_deleter_w_paste_key",
"tests/test_request.py::TestRequestCommon::test_urlvars_deleter_w_wsgiorg_key_non_empty_tuple",
"tests/test_request.py::TestRequestCommon::test_urlvars_deleter_w_wsgiorg_key_empty_tuple",
"tests/test_request.py::TestRequestCommon::test_urlvars_deleter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_urlargs_getter_w_paste_key",
"tests/test_request.py::TestRequestCommon::test_urlargs_getter_w_wsgiorg_key",
"tests/test_request.py::TestRequestCommon::test_urlargs_getter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_urlargs_setter_w_paste_key",
"tests/test_request.py::TestRequestCommon::test_urlargs_setter_w_wsgiorg_key",
"tests/test_request.py::TestRequestCommon::test_urlargs_setter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_urlargs_deleter_w_wsgiorg_key",
"tests/test_request.py::TestRequestCommon::test_urlargs_deleter_w_wsgiorg_key_empty",
"tests/test_request.py::TestRequestCommon::test_urlargs_deleter_wo_keys",
"tests/test_request.py::TestRequestCommon::test_cookies_empty_environ",
"tests/test_request.py::TestRequestCommon::test_cookies_is_mutable",
"tests/test_request.py::TestRequestCommon::test_cookies_w_webob_parsed_cookies_matching_source",
"tests/test_request.py::TestRequestCommon::test_cookies_w_webob_parsed_cookies_mismatched_source",
"tests/test_request.py::TestRequestCommon::test_set_cookies",
"tests/test_request.py::TestRequestCommon::test_body_getter",
"tests/test_request.py::TestRequestCommon::test_body_setter_None",
"tests/test_request.py::TestRequestCommon::test_body_setter_non_string_raises",
"tests/test_request.py::TestRequestCommon::test_body_setter_value",
"tests/test_request.py::TestRequestCommon::test_body_deleter_None",
"tests/test_request.py::TestRequestCommon::test_json_body",
"tests/test_request.py::TestRequestCommon::test_json_body_array",
"tests/test_request.py::TestRequestCommon::test_text_body",
"tests/test_request.py::TestRequestCommon::test__text_get_without_charset",
"tests/test_request.py::TestRequestCommon::test__text_set_without_charset",
"tests/test_request.py::TestRequestCommon::test_POST_not_POST_or_PUT",
"tests/test_request.py::TestRequestCommon::test_POST_existing_cache_hit",
"tests/test_request.py::TestRequestCommon::test_PUT_missing_content_type",
"tests/test_request.py::TestRequestCommon::test_PATCH_missing_content_type",
"tests/test_request.py::TestRequestCommon::test_POST_missing_content_type",
"tests/test_request.py::TestRequestCommon::test_POST_json_no_content_type",
"tests/test_request.py::TestRequestCommon::test_PUT_bad_content_type",
"tests/test_request.py::TestRequestCommon::test_POST_multipart",
"tests/test_request.py::TestRequestCommon::test_GET_reflects_query_string",
"tests/test_request.py::TestRequestCommon::test_GET_updates_query_string",
"tests/test_request.py::TestRequestCommon::test_cookies_wo_webob_parsed_cookies",
"tests/test_request.py::TestRequestCommon::test_copy_get",
"tests/test_request.py::TestRequestCommon::test_remove_conditional_headers_accept_encoding",
"tests/test_request.py::TestRequestCommon::test_remove_conditional_headers_if_modified_since",
"tests/test_request.py::TestRequestCommon::test_remove_conditional_headers_if_none_match",
"tests/test_request.py::TestRequestCommon::test_remove_conditional_headers_if_range",
"tests/test_request.py::TestRequestCommon::test_remove_conditional_headers_range",
"tests/test_request.py::TestRequestCommon::test_is_body_readable_POST",
"tests/test_request.py::TestRequestCommon::test_is_body_readable_PATCH",
"tests/test_request.py::TestRequestCommon::test_is_body_readable_GET",
"tests/test_request.py::TestRequestCommon::test_is_body_readable_unknown_method_and_content_length",
"tests/test_request.py::TestRequestCommon::test_is_body_readable_special_flag",
"tests/test_request.py::TestRequestCommon::test_cache_control_reflects_environ",
"tests/test_request.py::TestRequestCommon::test_cache_control_updates_environ",
"tests/test_request.py::TestRequestCommon::test_cache_control_set_dict",
"tests/test_request.py::TestRequestCommon::test_cache_control_set_object",
"tests/test_request.py::TestRequestCommon::test_cache_control_gets_cached",
"tests/test_request.py::TestRequestCommon::test_call_application_calls_application",
"tests/test_request.py::TestRequestCommon::test_call_application_provides_write",
"tests/test_request.py::TestRequestCommon::test_call_application_closes_iterable_when_mixed_w_write_calls",
"tests/test_request.py::TestRequestCommon::test_call_application_raises_exc_info",
"tests/test_request.py::TestRequestCommon::test_call_application_returns_exc_info",
"tests/test_request.py::TestRequestCommon::test_blank__method_subtitution",
"tests/test_request.py::TestRequestCommon::test_blank__ctype_in_env",
"tests/test_request.py::TestRequestCommon::test_blank__ctype_in_headers",
"tests/test_request.py::TestRequestCommon::test_blank__ctype_as_kw",
"tests/test_request.py::TestRequestCommon::test_blank__str_post_data_for_unsupported_ctype",
"tests/test_request.py::TestRequestCommon::test_blank__post_urlencoded",
"tests/test_request.py::TestRequestCommon::test_blank__post_multipart",
"tests/test_request.py::TestRequestCommon::test_blank__post_files",
"tests/test_request.py::TestRequestCommon::test_blank__post_file_w_wrong_ctype",
"tests/test_request.py::TestRequestCommon::test_from_bytes_extra_data",
"tests/test_request.py::TestRequestCommon::test_as_bytes_skip_body",
"tests/test_request.py::TestBaseRequest::test_method",
"tests/test_request.py::TestBaseRequest::test_http_version",
"tests/test_request.py::TestBaseRequest::test_script_name",
"tests/test_request.py::TestBaseRequest::test_path_info",
"tests/test_request.py::TestBaseRequest::test_content_length_getter",
"tests/test_request.py::TestBaseRequest::test_content_length_setter_w_str",
"tests/test_request.py::TestBaseRequest::test_remote_user",
"tests/test_request.py::TestBaseRequest::test_remote_addr",
"tests/test_request.py::TestBaseRequest::test_query_string",
"tests/test_request.py::TestBaseRequest::test_server_name",
"tests/test_request.py::TestBaseRequest::test_server_port_getter",
"tests/test_request.py::TestBaseRequest::test_server_port_setter_with_string",
"tests/test_request.py::TestBaseRequest::test_uscript_name",
"tests/test_request.py::TestBaseRequest::test_upath_info",
"tests/test_request.py::TestBaseRequest::test_upath_info_set_unicode",
"tests/test_request.py::TestBaseRequest::test_content_type_getter_no_parameters",
"tests/test_request.py::TestBaseRequest::test_content_type_getter_w_parameters",
"tests/test_request.py::TestBaseRequest::test_content_type_setter_w_None",
"tests/test_request.py::TestBaseRequest::test_content_type_setter_existing_paramter_no_new_paramter",
"tests/test_request.py::TestBaseRequest::test_content_type_deleter_clears_environ_value",
"tests/test_request.py::TestBaseRequest::test_content_type_deleter_no_environ_value",
"tests/test_request.py::TestBaseRequest::test_headers_getter",
"tests/test_request.py::TestBaseRequest::test_headers_setter",
"tests/test_request.py::TestBaseRequest::test_no_headers_deleter",
"tests/test_request.py::TestBaseRequest::test_client_addr_xff_singleval",
"tests/test_request.py::TestBaseRequest::test_client_addr_xff_multival",
"tests/test_request.py::TestBaseRequest::test_client_addr_prefers_xff",
"tests/test_request.py::TestBaseRequest::test_client_addr_no_xff",
"tests/test_request.py::TestBaseRequest::test_client_addr_no_xff_no_remote_addr",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_and_no_port",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_and_standard_port",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_and_oddball_port",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_https_and_no_port",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_https_and_standard_port",
"tests/test_request.py::TestBaseRequest::test_host_port_w_http_host_https_and_oddball_port",
"tests/test_request.py::TestBaseRequest::test_host_port_wo_http_host",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_and_no_port",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_and_standard_port",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_and_oddball_port",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_https_and_no_port",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_https_and_standard_port",
"tests/test_request.py::TestBaseRequest::test_host_url_w_http_host_https_and_oddball_port",
"tests/test_request.py::TestBaseRequest::test_host_url_wo_http_host",
"tests/test_request.py::TestBaseRequest::test_application_url",
"tests/test_request.py::TestBaseRequest::test_path_url",
"tests/test_request.py::TestBaseRequest::test_path",
"tests/test_request.py::TestBaseRequest::test_path_qs_no_qs",
"tests/test_request.py::TestBaseRequest::test_path_qs_w_qs",
"tests/test_request.py::TestBaseRequest::test_url_no_qs",
"tests/test_request.py::TestBaseRequest::test_url_w_qs",
"tests/test_request.py::TestBaseRequest::test_relative_url_to_app_true_wo_leading_slash",
"tests/test_request.py::TestBaseRequest::test_relative_url_to_app_true_w_leading_slash",
"tests/test_request.py::TestBaseRequest::test_relative_url_to_app_false_other_w_leading_slash",
"tests/test_request.py::TestBaseRequest::test_relative_url_to_app_false_other_wo_leading_slash",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_empty",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_just_leading_slash",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_non_empty_no_pattern",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_non_empty_w_pattern_miss",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_non_empty_w_pattern_hit",
"tests/test_request.py::TestBaseRequest::test_path_info_pop_skips_empty_elements",
"tests/test_request.py::TestBaseRequest::test_path_info_peek_empty",
"tests/test_request.py::TestBaseRequest::test_path_info_peek_just_leading_slash",
"tests/test_request.py::TestBaseRequest::test_path_info_peek_non_empty",
"tests/test_request.py::TestBaseRequest::test_is_xhr_no_header",
"tests/test_request.py::TestBaseRequest::test_is_xhr_header_miss",
"tests/test_request.py::TestBaseRequest::test_is_xhr_header_hit",
"tests/test_request.py::TestBaseRequest::test_host_getter_w_HTTP_HOST",
"tests/test_request.py::TestBaseRequest::test_host_getter_wo_HTTP_HOST",
"tests/test_request.py::TestBaseRequest::test_host_setter",
"tests/test_request.py::TestBaseRequest::test_host_deleter_hit",
"tests/test_request.py::TestBaseRequest::test_host_deleter_miss",
"tests/test_request.py::TestBaseRequest::test_domain_nocolon",
"tests/test_request.py::TestBaseRequest::test_domain_withcolon",
"tests/test_request.py::TestBaseRequest::test_encget_raises_without_default",
"tests/test_request.py::TestBaseRequest::test_encget_doesnt_raises_with_default",
"tests/test_request.py::TestBaseRequest::test_encget_with_encattr",
"tests/test_request.py::TestBaseRequest::test_encget_with_encattr_latin_1",
"tests/test_request.py::TestBaseRequest::test_encget_no_encattr",
"tests/test_request.py::TestBaseRequest::test_relative_url",
"tests/test_request.py::TestBaseRequest::test_header_getter",
"tests/test_request.py::TestBaseRequest::test_json_body",
"tests/test_request.py::TestBaseRequest::test_host_get",
"tests/test_request.py::TestBaseRequest::test_host_get_w_no_http_host",
"tests/test_request.py::TestLegacyRequest::test_method",
"tests/test_request.py::TestLegacyRequest::test_http_version",
"tests/test_request.py::TestLegacyRequest::test_script_name",
"tests/test_request.py::TestLegacyRequest::test_path_info",
"tests/test_request.py::TestLegacyRequest::test_content_length_getter",
"tests/test_request.py::TestLegacyRequest::test_content_length_setter_w_str",
"tests/test_request.py::TestLegacyRequest::test_remote_user",
"tests/test_request.py::TestLegacyRequest::test_remote_addr",
"tests/test_request.py::TestLegacyRequest::test_query_string",
"tests/test_request.py::TestLegacyRequest::test_server_name",
"tests/test_request.py::TestLegacyRequest::test_server_port_getter",
"tests/test_request.py::TestLegacyRequest::test_server_port_setter_with_string",
"tests/test_request.py::TestLegacyRequest::test_uscript_name",
"tests/test_request.py::TestLegacyRequest::test_upath_info",
"tests/test_request.py::TestLegacyRequest::test_upath_info_set_unicode",
"tests/test_request.py::TestLegacyRequest::test_content_type_getter_no_parameters",
"tests/test_request.py::TestLegacyRequest::test_content_type_getter_w_parameters",
"tests/test_request.py::TestLegacyRequest::test_content_type_setter_w_None",
"tests/test_request.py::TestLegacyRequest::test_content_type_setter_existing_paramter_no_new_paramter",
"tests/test_request.py::TestLegacyRequest::test_content_type_deleter_clears_environ_value",
"tests/test_request.py::TestLegacyRequest::test_content_type_deleter_no_environ_value",
"tests/test_request.py::TestLegacyRequest::test_headers_getter",
"tests/test_request.py::TestLegacyRequest::test_headers_setter",
"tests/test_request.py::TestLegacyRequest::test_no_headers_deleter",
"tests/test_request.py::TestLegacyRequest::test_client_addr_xff_singleval",
"tests/test_request.py::TestLegacyRequest::test_client_addr_xff_multival",
"tests/test_request.py::TestLegacyRequest::test_client_addr_prefers_xff",
"tests/test_request.py::TestLegacyRequest::test_client_addr_no_xff",
"tests/test_request.py::TestLegacyRequest::test_client_addr_no_xff_no_remote_addr",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_and_no_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_and_standard_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_and_oddball_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_https_and_no_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_https_and_standard_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_w_http_host_https_and_oddball_port",
"tests/test_request.py::TestLegacyRequest::test_host_port_wo_http_host",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_and_no_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_and_standard_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_and_oddball_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_https_and_no_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_https_and_standard_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_w_http_host_https_and_oddball_port",
"tests/test_request.py::TestLegacyRequest::test_host_url_wo_http_host",
"tests/test_request.py::TestLegacyRequest::test_application_url",
"tests/test_request.py::TestLegacyRequest::test_path_url",
"tests/test_request.py::TestLegacyRequest::test_path",
"tests/test_request.py::TestLegacyRequest::test_path_qs_no_qs",
"tests/test_request.py::TestLegacyRequest::test_path_qs_w_qs",
"tests/test_request.py::TestLegacyRequest::test_url_no_qs",
"tests/test_request.py::TestLegacyRequest::test_url_w_qs",
"tests/test_request.py::TestLegacyRequest::test_relative_url_to_app_true_wo_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_relative_url_to_app_true_w_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_relative_url_to_app_false_other_w_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_relative_url_to_app_false_other_wo_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_empty",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_just_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_non_empty_no_pattern",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_non_empty_w_pattern_miss",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_non_empty_w_pattern_hit",
"tests/test_request.py::TestLegacyRequest::test_path_info_pop_skips_empty_elements",
"tests/test_request.py::TestLegacyRequest::test_path_info_peek_empty",
"tests/test_request.py::TestLegacyRequest::test_path_info_peek_just_leading_slash",
"tests/test_request.py::TestLegacyRequest::test_path_info_peek_non_empty",
"tests/test_request.py::TestLegacyRequest::test_is_xhr_no_header",
"tests/test_request.py::TestLegacyRequest::test_is_xhr_header_miss",
"tests/test_request.py::TestLegacyRequest::test_is_xhr_header_hit",
"tests/test_request.py::TestLegacyRequest::test_host_getter_w_HTTP_HOST",
"tests/test_request.py::TestLegacyRequest::test_host_getter_wo_HTTP_HOST",
"tests/test_request.py::TestLegacyRequest::test_host_setter",
"tests/test_request.py::TestLegacyRequest::test_host_deleter_hit",
"tests/test_request.py::TestLegacyRequest::test_host_deleter_miss",
"tests/test_request.py::TestLegacyRequest::test_encget_raises_without_default",
"tests/test_request.py::TestLegacyRequest::test_encget_doesnt_raises_with_default",
"tests/test_request.py::TestLegacyRequest::test_encget_with_encattr",
"tests/test_request.py::TestLegacyRequest::test_encget_no_encattr",
"tests/test_request.py::TestLegacyRequest::test_relative_url",
"tests/test_request.py::TestLegacyRequest::test_header_getter",
"tests/test_request.py::TestLegacyRequest::test_json_body",
"tests/test_request.py::TestLegacyRequest::test_host_get_w_http_host",
"tests/test_request.py::TestLegacyRequest::test_host_get_w_no_http_host",
"tests/test_request.py::TestRequestConstructorWarnings::test_ctor_w_unicode_errors",
"tests/test_request.py::TestRequestConstructorWarnings::test_ctor_w_decode_param_names",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_set",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_set_nonadhoc",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_get",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_get_missing",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_del",
"tests/test_request.py::TestRequestWithAdhocAttr::test_adhoc_attrs_del_missing",
"tests/test_request.py::TestRequest_functional::test_gets",
"tests/test_request.py::TestRequest_functional::test_gets_with_query_string",
"tests/test_request.py::TestRequest_functional::test_language_parsing1",
"tests/test_request.py::TestRequest_functional::test_language_parsing2",
"tests/test_request.py::TestRequest_functional::test_language_parsing3",
"tests/test_request.py::TestRequest_functional::test_mime_parsing1",
"tests/test_request.py::TestRequest_functional::test_mime_parsing2",
"tests/test_request.py::TestRequest_functional::test_mime_parsing3",
"tests/test_request.py::TestRequest_functional::test_accept_best_match",
"tests/test_request.py::TestRequest_functional::test_from_mimeparse",
"tests/test_request.py::TestRequest_functional::test_headers",
"tests/test_request.py::TestRequest_functional::test_bad_cookie",
"tests/test_request.py::TestRequest_functional::test_cookie_quoting",
"tests/test_request.py::TestRequest_functional::test_path_quoting",
"tests/test_request.py::TestRequest_functional::test_params",
"tests/test_request.py::TestRequest_functional::test_copy_body",
"tests/test_request.py::TestRequest_functional::test_already_consumed_stream",
"tests/test_request.py::TestRequest_functional::test_broken_seek",
"tests/test_request.py::TestRequest_functional::test_set_body",
"tests/test_request.py::TestRequest_functional::test_broken_clen_header",
"tests/test_request.py::TestRequest_functional::test_nonstr_keys",
"tests/test_request.py::TestRequest_functional::test_authorization",
"tests/test_request.py::TestRequest_functional::test_as_bytes",
"tests/test_request.py::TestRequest_functional::test_as_text",
"tests/test_request.py::TestRequest_functional::test_req_kw_none_val",
"tests/test_request.py::TestRequest_functional::test_env_keys",
"tests/test_request.py::TestRequest_functional::test_repr_nodefault",
"tests/test_request.py::TestRequest_functional::test_request_noenviron_param",
"tests/test_request.py::TestRequest_functional::test_unexpected_kw",
"tests/test_request.py::TestRequest_functional::test_conttype_set_del",
"tests/test_request.py::TestRequest_functional::test_headers2",
"tests/test_request.py::TestRequest_functional::test_host_url",
"tests/test_request.py::TestRequest_functional::test_path_info_p",
"tests/test_request.py::TestRequest_functional::test_urlvars_property",
"tests/test_request.py::TestRequest_functional::test_urlargs_property",
"tests/test_request.py::TestRequest_functional::test_host_property",
"tests/test_request.py::TestRequest_functional::test_body_property",
"tests/test_request.py::TestRequest_functional::test_repr_invalid",
"tests/test_request.py::TestRequest_functional::test_from_garbage_file",
"tests/test_request.py::TestRequest_functional::test_from_file_patch",
"tests/test_request.py::TestRequest_functional::test_from_bytes",
"tests/test_request.py::TestRequest_functional::test_from_text",
"tests/test_request.py::TestRequest_functional::test_blank",
"tests/test_request.py::TestRequest_functional::test_post_does_not_reparse",
"tests/test_request.py::TestRequest_functional::test_middleware_body",
"tests/test_request.py::TestRequest_functional::test_body_file_noseek",
"tests/test_request.py::TestRequest_functional::test_cgi_escaping_fix",
"tests/test_request.py::TestRequest_functional::test_content_type_none",
"tests/test_request.py::TestRequest_functional::test_body_file_seekable",
"tests/test_request.py::TestRequest_functional::test_request_init",
"tests/test_request.py::TestRequest_functional::test_request_query_and_POST_vars",
"tests/test_request.py::TestRequest_functional::test_request_put",
"tests/test_request.py::TestRequest_functional::test_request_patch",
"tests/test_request.py::TestRequest_functional::test_call_WSGI_app_204",
"tests/test_request.py::TestRequest_functional::test_call_WSGI_app_no_content_type",
"tests/test_request.py::TestFakeCGIBody::test_encode_multipart_value_type_options",
"tests/test_request.py::TestFakeCGIBody::test_encode_multipart_no_boundary",
"tests/test_request.py::TestFakeCGIBody::test_repr",
"tests/test_request.py::TestFakeCGIBody::test_fileno",
"tests/test_request.py::TestFakeCGIBody::test_iter",
"tests/test_request.py::TestFakeCGIBody::test_readline",
"tests/test_request.py::TestFakeCGIBody::test_read_bad_content_type",
"tests/test_request.py::TestFakeCGIBody::test_read_urlencoded",
"tests/test_request.py::TestFakeCGIBody::test_readable",
"tests/test_request.py::Test_cgi_FieldStorage__repr__patch::test_with_file",
"tests/test_request.py::Test_cgi_FieldStorage__repr__patch::test_without_file",
"tests/test_request.py::TestLimitedLengthFile::test_fileno",
"tests/test_request.py::Test_environ_from_url::test_environ_from_url",
"tests/test_request.py::Test_environ_from_url::test_environ_from_url_highorder_path_info",
"tests/test_request.py::Test_environ_from_url::test_fileupload_mime_type_detection",
"tests/test_request.py::TestRequestMultipart::test_multipart_with_charset",
"tests/test_response.py::test_response",
"tests/test_response.py::test_set_response_status_binary",
"tests/test_response.py::test_set_response_status_str_no_reason",
"tests/test_response.py::test_set_response_status_str_generic_reason",
"tests/test_response.py::test_set_response_status_code",
"tests/test_response.py::test_set_response_status_bad",
"tests/test_response.py::test_set_response_status_code_generic_reason",
"tests/test_response.py::test_content_type",
"tests/test_response.py::test_init_content_type_w_charset",
"tests/test_response.py::test_init_adds_default_charset_when_not_json",
"tests/test_response.py::test_init_no_charset_when_json",
"tests/test_response.py::test_init_keeps_specified_charset_when_json",
"tests/test_response.py::test_init_doesnt_add_default_content_type_with_bodyless_status",
"tests/test_response.py::test_cookies",
"tests/test_response.py::test_unicode_cookies_error_raised",
"tests/test_response.py::test_unicode_cookies_warning_issued",
"tests/test_response.py::test_cookies_warning_issued_backwards_compat",
"tests/test_response.py::test_cookies_raises_typeerror",
"tests/test_response.py::test_http_only_cookie",
"tests/test_response.py::test_headers",
"tests/test_response.py::test_response_copy",
"tests/test_response.py::test_response_copy_content_md5",
"tests/test_response.py::test_HEAD_closes",
"tests/test_response.py::test_HEAD_conditional_response_returns_empty_response",
"tests/test_response.py::test_HEAD_conditional_response_range_empty_response",
"tests/test_response.py::test_conditional_response_if_none_match_false",
"tests/test_response.py::test_conditional_response_if_none_match_true",
"tests/test_response.py::test_conditional_response_if_none_match_weak",
"tests/test_response.py::test_conditional_response_if_modified_since_false",
"tests/test_response.py::test_conditional_response_if_modified_since_true",
"tests/test_response.py::test_conditional_response_range_not_satisfiable_response",
"tests/test_response.py::test_HEAD_conditional_response_range_not_satisfiable_response",
"tests/test_response.py::test_md5_etag",
"tests/test_response.py::test_md5_etag_set_content_md5",
"tests/test_response.py::test_decode_content_defaults_to_identity",
"tests/test_response.py::test_decode_content_with_deflate",
"tests/test_response.py::test_content_length",
"tests/test_response.py::test_app_iter_range",
"tests/test_response.py::test_app_iter_range_inner_method",
"tests/test_response.py::test_has_body",
"tests/test_response.py::test_str_crlf",
"tests/test_response.py::test_from_file",
"tests/test_response.py::test_from_file2",
"tests/test_response.py::test_from_text_file",
"tests/test_response.py::test_from_file_w_leading_space_in_header",
"tests/test_response.py::test_file_bad_header",
"tests/test_response.py::test_from_file_not_unicode_headers",
"tests/test_response.py::test_file_with_http_version",
"tests/test_response.py::test_file_with_http_version_more_status",
"tests/test_response.py::test_set_status",
"tests/test_response.py::test_set_headerlist",
"tests/test_response.py::test_request_uri_no_script_name",
"tests/test_response.py::test_request_uri_https",
"tests/test_response.py::test_app_iter_range_starts_after_iter_end",
"tests/test_response.py::test_resp_write_app_iter_non_list",
"tests/test_response.py::test_response_file_body_writelines",
"tests/test_response.py::test_response_file_body_tell_text",
"tests/test_response.py::test_response_write_non_str",
"tests/test_response.py::test_response_file_body_write_empty_app_iter",
"tests/test_response.py::test_response_file_body_write_empty_body",
"tests/test_response.py::test_response_file_body_close_not_implemented",
"tests/test_response.py::test_response_file_body_repr",
"tests/test_response.py::test_body_get_is_none",
"tests/test_response.py::test_body_get_is_unicode_notverylong",
"tests/test_response.py::test_body_get_is_unicode",
"tests/test_response.py::test_body_set_not_unicode_or_str",
"tests/test_response.py::test_body_set_unicode",
"tests/test_response.py::test_body_set_under_body_doesnt_exist",
"tests/test_response.py::test_body_del",
"tests/test_response.py::test_text_get_no_charset",
"tests/test_response.py::test_text_get_no_default_body_encoding",
"tests/test_response.py::test_unicode_body",
"tests/test_response.py::test_text_get_decode",
"tests/test_response.py::test_text_set_no_charset",
"tests/test_response.py::test_text_set_no_default_body_encoding",
"tests/test_response.py::test_text_set_not_unicode",
"tests/test_response.py::test_text_del",
"tests/test_response.py::test_body_file_del",
"tests/test_response.py::test_write_unicode",
"tests/test_response.py::test_write_unicode_no_charset",
"tests/test_response.py::test_write_text",
"tests/test_response.py::test_app_iter_del",
"tests/test_response.py::test_charset_set_no_content_type_header",
"tests/test_response.py::test_charset_del_no_content_type_header",
"tests/test_response.py::test_content_type_params_get_no_semicolon_in_content_type_header",
"tests/test_response.py::test_content_type_params_get_semicolon_in_content_type_header",
"tests/test_response.py::test_content_type_params_set_value_dict_empty",
"tests/test_response.py::test_content_type_params_set_ok_param_quoting",
"tests/test_response.py::test_charset_delete",
"tests/test_response.py::test_set_cookie_overwrite",
"tests/test_response.py::test_set_cookie_value_is_None",
"tests/test_response.py::test_set_cookie_expires_is_None_and_max_age_is_int",
"tests/test_response.py::test_set_cookie_expires_is_None_and_max_age_is_timedelta",
"tests/test_response.py::test_set_cookie_expires_is_not_None_and_max_age_is_None",
"tests/test_response.py::test_set_cookie_expires_is_timedelta_and_max_age_is_None",
"tests/test_response.py::test_delete_cookie",
"tests/test_response.py::test_delete_cookie_with_path",
"tests/test_response.py::test_delete_cookie_with_domain",
"tests/test_response.py::test_unset_cookie_not_existing_and_not_strict",
"tests/test_response.py::test_unset_cookie_not_existing_and_strict",
"tests/test_response.py::test_unset_cookie_key_in_cookies",
"tests/test_response.py::test_merge_cookies_no_set_cookie",
"tests/test_response.py::test_merge_cookies_resp_is_Response",
"tests/test_response.py::test_merge_cookies_resp_is_wsgi_callable",
"tests/test_response.py::test_body_get_body_is_None_len_app_iter_is_zero",
"tests/test_response.py::test_cache_control_get",
"tests/test_response.py::test_location",
"tests/test_response.py::test_request_uri_http",
"tests/test_response.py::test_request_uri_no_script_name2",
"tests/test_response.py::test_cache_control_object_max_age_ten",
"tests/test_response.py::test_cache_control_set_object_error",
"tests/test_response.py::test_cache_expires_set",
"tests/test_response.py::test_status_code_set",
"tests/test_response.py::test_cache_control_set_dict",
"tests/test_response.py::test_cache_control_set_None",
"tests/test_response.py::test_cache_control_set_unicode",
"tests/test_response.py::test_cache_control_set_control_obj_is_not_None",
"tests/test_response.py::test_cache_control_del",
"tests/test_response.py::test_body_file_get",
"tests/test_response.py::test_body_file_write_no_charset",
"tests/test_response.py::test_body_file_write_unicode_encodes",
"tests/test_response.py::test_repr",
"tests/test_response.py::test_cache_expires_set_timedelta",
"tests/test_response.py::test_cache_expires_set_int",
"tests/test_response.py::test_cache_expires_set_None",
"tests/test_response.py::test_cache_expires_set_zero",
"tests/test_response.py::test_encode_content_unknown",
"tests/test_response.py::test_encode_content_identity",
"tests/test_response.py::test_encode_content_gzip_already_gzipped",
"tests/test_response.py::test_encode_content_gzip_notyet_gzipped",
"tests/test_response.py::test_encode_content_gzip_notyet_gzipped_lazy",
"tests/test_response.py::test_encode_content_gzip_buffer_coverage",
"tests/test_response.py::test_decode_content_identity",
"tests/test_response.py::test_decode_content_weird",
"tests/test_response.py::test_decode_content_gzip",
"tests/test_response.py::test_response_set_body_file1",
"tests/test_response.py::test_response_set_body_file2",
"tests/test_response.py::test_response_json_body",
"tests/test_response.py::test_cache_expires_set_zero_then_nonzero",
"tests/test_response.py::test_default_content_type",
"tests/test_response.py::test_default_charset",
"tests/test_response.py::test_header_list_no_defaults",
"tests/test_response.py::test_explicit_charset",
"tests/test_response.py::test_set_content_type",
"tests/test_response.py::test_raises_no_charset",
"tests/test_response.py::test_raises_none_charset",
"tests/test_response.py::test_content_type_has_charset"
]
| []
| null | 787 | [
"webob/response.py",
"webob/headers.py",
"webob/descriptors.py"
]
| [
"webob/response.py",
"webob/headers.py",
"webob/descriptors.py"
]
|
Pylons__webob-287 | ce1eed59e9101c3732295bc8745ba003c700fd0c | 2016-09-30 06:05:12 | 5ec5ca2e45b70ff4ee9a2c74c77a04b71d6290fd | bertjwregeer: @mmerickel This one's for you! | diff --git a/webob/response.py b/webob/response.py
index a5b7bfd..607ac34 100644
--- a/webob/response.py
+++ b/webob/response.py
@@ -155,12 +155,17 @@ class Response(object):
set to True so that all ``Response`` objects will attempt to check
the original request for conditional response headers. See
:meth:`~Response.conditional_response_app` for more information.
+
+ * ``default_body_encoding`` is set to 'UTF-8' by default, it exists to
+ allow users to get/set the Response object using .text, even if no
+ charset has been set for the Content-Type.
"""
default_content_type = 'text/html'
default_charset = 'UTF-8'
unicode_errors = 'strict'
default_conditional_response = False
+ default_body_encoding = 'UTF-8'
# These two are only around so that when people pass them into the
# constructor they correctly get saved and set, however they are not used
@@ -556,24 +561,30 @@ class Response(object):
def _text__get(self):
"""
- Get/set the text value of the body (using the charset of the
- Content-Type)
+ Get/set the text value of the body using the charset of the
+ Content-Type or the default_body_encoding.
"""
- if not self.charset:
+ if not self.charset and not self.default_body_encoding:
raise AttributeError(
- "You cannot access Response.text unless charset is set")
+ "You cannot access Response.text unless charset or default_body_encoding"
+ " is set"
+ )
+ decoding = self.charset or self.default_body_encoding
body = self.body
- return body.decode(self.charset, self.unicode_errors)
+ return body.decode(decoding, self.unicode_errors)
def _text__set(self, value):
- if not self.charset:
+ if not self.charset and not self.default_body_encoding:
raise AttributeError(
- "You cannot access Response.text unless charset is set")
+ "You cannot access Response.text unless charset or default_body_encoding"
+ " is set"
+ )
if not isinstance(value, text_type):
raise TypeError(
"You can only set Response.text to a unicode string "
"(not %s)" % type(value))
- self.body = value.encode(self.charset)
+ encoding = self.charset or self.default_body_encoding
+ self.body = value.encode(encoding)
def _text__del(self):
del self.body
| Add default_body_encoding to Response
Add a new default_body_encoding, this is used for .text if there is no charset for the content-type.
This is a backwards incompatible change that breaks the assumption that setting .text on a binary content-type will raise an error.
This will allow a user to also change the default_body_encoding on the fly before setting the body using .text, thereby allowing the encoding to happen to whatever they wish. | Pylons/webob | diff --git a/tests/test_response.py b/tests/test_response.py
index 1fdcefe..867d433 100644
--- a/tests/test_response.py
+++ b/tests/test_response.py
@@ -693,8 +693,13 @@ def test_body_del():
def test_text_get_no_charset():
res = Response(charset=None)
+ assert '' == res.text
+
+def test_text_get_no_default_body_encoding():
+ res = Response(charset=None)
+ res.default_body_encoding = None
with pytest.raises(AttributeError):
- res.__getattribute__('text')
+ assert '' == res.text
def test_unicode_body():
res = Response()
@@ -717,8 +722,15 @@ def test_text_get_decode():
def test_text_set_no_charset():
res = Response()
res.charset = None
+ res.text = text_('abc')
+ assert res.text == 'abc'
+
+def test_text_set_no_default_body_encoding():
+ res = Response()
+ res.charset = None
+ res.default_body_encoding = None
with pytest.raises(AttributeError):
- res.__setattr__('text', 'abc')
+ res.text = text_('abc')
def test_text_set_not_unicode():
res = Response()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 1.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"coverage"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
-e git+https://github.com/Pylons/webob.git@ce1eed59e9101c3732295bc8745ba003c700fd0c#egg=WebOb
| name: webob
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/webob
| [
"tests/test_response.py::test_text_get_no_charset",
"tests/test_response.py::test_text_set_no_charset"
]
| [
"tests/test_response.py::test_response_file_body_tell"
]
| [
"tests/test_response.py::test_response",
"tests/test_response.py::test_set_response_status_binary",
"tests/test_response.py::test_set_response_status_str_no_reason",
"tests/test_response.py::test_set_response_status_str_generic_reason",
"tests/test_response.py::test_set_response_status_code",
"tests/test_response.py::test_set_response_status_bad",
"tests/test_response.py::test_set_response_status_code_generic_reason",
"tests/test_response.py::test_content_type",
"tests/test_response.py::test_init_content_type_w_charset",
"tests/test_response.py::test_init_adds_default_charset_when_not_json",
"tests/test_response.py::test_init_no_charset_when_json",
"tests/test_response.py::test_init_keeps_specified_charset_when_json",
"tests/test_response.py::test_init_doesnt_add_default_content_type_with_bodyless_status",
"tests/test_response.py::test_cookies",
"tests/test_response.py::test_unicode_cookies_error_raised",
"tests/test_response.py::test_unicode_cookies_warning_issued",
"tests/test_response.py::test_cookies_warning_issued_backwards_compat",
"tests/test_response.py::test_cookies_raises_typeerror",
"tests/test_response.py::test_http_only_cookie",
"tests/test_response.py::test_headers",
"tests/test_response.py::test_response_copy",
"tests/test_response.py::test_response_copy_content_md5",
"tests/test_response.py::test_HEAD_closes",
"tests/test_response.py::test_HEAD_conditional_response_returns_empty_response",
"tests/test_response.py::test_HEAD_conditional_response_range_empty_response",
"tests/test_response.py::test_conditional_response_if_none_match_false",
"tests/test_response.py::test_conditional_response_if_none_match_true",
"tests/test_response.py::test_conditional_response_if_none_match_weak",
"tests/test_response.py::test_conditional_response_if_modified_since_false",
"tests/test_response.py::test_conditional_response_if_modified_since_true",
"tests/test_response.py::test_conditional_response_range_not_satisfiable_response",
"tests/test_response.py::test_HEAD_conditional_response_range_not_satisfiable_response",
"tests/test_response.py::test_md5_etag",
"tests/test_response.py::test_md5_etag_set_content_md5",
"tests/test_response.py::test_decode_content_defaults_to_identity",
"tests/test_response.py::test_decode_content_with_deflate",
"tests/test_response.py::test_content_length",
"tests/test_response.py::test_app_iter_range",
"tests/test_response.py::test_app_iter_range_inner_method",
"tests/test_response.py::test_has_body",
"tests/test_response.py::test_content_type_in_headerlist",
"tests/test_response.py::test_str_crlf",
"tests/test_response.py::test_from_file",
"tests/test_response.py::test_from_file2",
"tests/test_response.py::test_from_text_file",
"tests/test_response.py::test_from_file_w_leading_space_in_header",
"tests/test_response.py::test_file_bad_header",
"tests/test_response.py::test_from_file_not_unicode_headers",
"tests/test_response.py::test_file_with_http_version",
"tests/test_response.py::test_file_with_http_version_more_status",
"tests/test_response.py::test_set_status",
"tests/test_response.py::test_set_headerlist",
"tests/test_response.py::test_request_uri_no_script_name",
"tests/test_response.py::test_request_uri_https",
"tests/test_response.py::test_app_iter_range_starts_after_iter_end",
"tests/test_response.py::test_resp_write_app_iter_non_list",
"tests/test_response.py::test_response_file_body_writelines",
"tests/test_response.py::test_response_file_body_tell_text",
"tests/test_response.py::test_response_write_non_str",
"tests/test_response.py::test_response_file_body_write_empty_app_iter",
"tests/test_response.py::test_response_file_body_write_empty_body",
"tests/test_response.py::test_response_file_body_close_not_implemented",
"tests/test_response.py::test_response_file_body_repr",
"tests/test_response.py::test_body_get_is_none",
"tests/test_response.py::test_body_get_is_unicode_notverylong",
"tests/test_response.py::test_body_get_is_unicode",
"tests/test_response.py::test_body_set_not_unicode_or_str",
"tests/test_response.py::test_body_set_unicode",
"tests/test_response.py::test_body_set_under_body_doesnt_exist",
"tests/test_response.py::test_body_del",
"tests/test_response.py::test_text_get_no_default_body_encoding",
"tests/test_response.py::test_unicode_body",
"tests/test_response.py::test_text_get_decode",
"tests/test_response.py::test_text_set_no_default_body_encoding",
"tests/test_response.py::test_text_set_not_unicode",
"tests/test_response.py::test_text_del",
"tests/test_response.py::test_body_file_del",
"tests/test_response.py::test_write_unicode",
"tests/test_response.py::test_write_unicode_no_charset",
"tests/test_response.py::test_write_text",
"tests/test_response.py::test_app_iter_del",
"tests/test_response.py::test_charset_set_no_content_type_header",
"tests/test_response.py::test_charset_del_no_content_type_header",
"tests/test_response.py::test_content_type_params_get_no_semicolon_in_content_type_header",
"tests/test_response.py::test_content_type_params_get_semicolon_in_content_type_header",
"tests/test_response.py::test_content_type_params_set_value_dict_empty",
"tests/test_response.py::test_content_type_params_set_ok_param_quoting",
"tests/test_response.py::test_charset_delete",
"tests/test_response.py::test_set_cookie_overwrite",
"tests/test_response.py::test_set_cookie_value_is_None",
"tests/test_response.py::test_set_cookie_expires_is_None_and_max_age_is_int",
"tests/test_response.py::test_set_cookie_expires_is_None_and_max_age_is_timedelta",
"tests/test_response.py::test_set_cookie_expires_is_not_None_and_max_age_is_None",
"tests/test_response.py::test_set_cookie_expires_is_timedelta_and_max_age_is_None",
"tests/test_response.py::test_delete_cookie",
"tests/test_response.py::test_delete_cookie_with_path",
"tests/test_response.py::test_delete_cookie_with_domain",
"tests/test_response.py::test_unset_cookie_not_existing_and_not_strict",
"tests/test_response.py::test_unset_cookie_not_existing_and_strict",
"tests/test_response.py::test_unset_cookie_key_in_cookies",
"tests/test_response.py::test_merge_cookies_no_set_cookie",
"tests/test_response.py::test_merge_cookies_resp_is_Response",
"tests/test_response.py::test_merge_cookies_resp_is_wsgi_callable",
"tests/test_response.py::test_body_get_body_is_None_len_app_iter_is_zero",
"tests/test_response.py::test_cache_control_get",
"tests/test_response.py::test_location",
"tests/test_response.py::test_request_uri_http",
"tests/test_response.py::test_request_uri_no_script_name2",
"tests/test_response.py::test_cache_control_object_max_age_ten",
"tests/test_response.py::test_cache_control_set_object_error",
"tests/test_response.py::test_cache_expires_set",
"tests/test_response.py::test_status_code_set",
"tests/test_response.py::test_cache_control_set_dict",
"tests/test_response.py::test_cache_control_set_None",
"tests/test_response.py::test_cache_control_set_unicode",
"tests/test_response.py::test_cache_control_set_control_obj_is_not_None",
"tests/test_response.py::test_cache_control_del",
"tests/test_response.py::test_body_file_get",
"tests/test_response.py::test_body_file_write_no_charset",
"tests/test_response.py::test_body_file_write_unicode_encodes",
"tests/test_response.py::test_repr",
"tests/test_response.py::test_cache_expires_set_timedelta",
"tests/test_response.py::test_cache_expires_set_int",
"tests/test_response.py::test_cache_expires_set_None",
"tests/test_response.py::test_cache_expires_set_zero",
"tests/test_response.py::test_encode_content_unknown",
"tests/test_response.py::test_encode_content_identity",
"tests/test_response.py::test_encode_content_gzip_already_gzipped",
"tests/test_response.py::test_encode_content_gzip_notyet_gzipped",
"tests/test_response.py::test_encode_content_gzip_notyet_gzipped_lazy",
"tests/test_response.py::test_encode_content_gzip_buffer_coverage",
"tests/test_response.py::test_decode_content_identity",
"tests/test_response.py::test_decode_content_weird",
"tests/test_response.py::test_decode_content_gzip",
"tests/test_response.py::test__abs_headerlist_location_with_scheme",
"tests/test_response.py::test__abs_headerlist_location_no_scheme",
"tests/test_response.py::test_response_set_body_file1",
"tests/test_response.py::test_response_set_body_file2",
"tests/test_response.py::test_response_json_body",
"tests/test_response.py::test_cache_expires_set_zero_then_nonzero"
]
| []
| null | 788 | [
"webob/response.py"
]
| [
"webob/response.py"
]
|
zalando-stups__senza-372 | 56e263195218e3fe052e95221b2d9528c4343264 | 2016-09-30 08:21:15 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/aws.py b/senza/aws.py
index 1284ba3..c7f0468 100644
--- a/senza/aws.py
+++ b/senza/aws.py
@@ -55,13 +55,12 @@ def is_status_complete(status: str):
def get_security_group(region: str, sg_name: str):
ec2 = boto3.resource('ec2', region)
try:
- sec_groups = list(ec2.security_groups.filter(
- Filters=[{'Name': 'group-name', 'Values': [sg_name]}]
- ))
- if not sec_groups:
- return None
- # FIXME: What if we have 2 VPC, with a SG with the same name?!
- return sec_groups[0]
+ # first try by tag name then by group-name (cannot be changed)
+ for _filter in [{'Name': 'tag:Name', 'Values': [sg_name]}, {'Name': 'group-name', 'Values': [sg_name]}]:
+ sec_groups = list(ec2.security_groups.filter(Filters=[_filter]))
+ if sec_groups:
+ # FIXME: What if we have 2 VPC, with a SG with the same name?!
+ return sec_groups[0]
except ClientError as e:
error_code = extract_client_error_code(e)
if error_code == 'InvalidGroup.NotFound':
| Lookup SecurityGroups by the tag "Name" rather than GroupName
Both AWS API and CloudFormation allow to refer to a security group by its name if the operation runs in EC2 Classic or the default VPC. Unfortunately it uses the `GroupName` attribute that is automatically generated by AWS if the SG is a part of CloudFormation stack.
It would be a good idea to extend Senza to lookup SG during the CF template generation phase and to use the _tag_ `Name` instead. The tag can be set by another ("system") Senza stack definition, thus allowing the cross-stack references.
Another option would be to support the new cross-stack references that are recently introduced by Amazon: https://aws.amazon.com/blogs/aws/aws-cloudformation-update-yaml-cross-stack-references-simplified-substitution/ | zalando-stups/senza | diff --git a/tests/test_aws.py b/tests/test_aws.py
index 4ca762a..8dd5b44 100644
--- a/tests/test_aws.py
+++ b/tests/test_aws.py
@@ -14,6 +14,21 @@ def test_get_security_group(monkeypatch):
assert results == get_security_group('myregion', 'group_inexistant')
+def test_get_security_group_by_tag_name(monkeypatch):
+
+ def mock_filter(Filters):
+ if Filters[0]['Name'] == 'tag:Name' and Filters[0]['Values'] == ['my-sg']:
+ sg = MagicMock()
+ sg.id = 'sg-123'
+ return [sg]
+
+ ec2 = MagicMock()
+ ec2.security_groups.filter = mock_filter
+ monkeypatch.setattr('boto3.resource', MagicMock(return_value=ec2))
+
+ assert get_security_group('myregion', 'my-sg').id == 'sg-123'
+
+
def test_resolve_security_groups(monkeypatch):
ec2 = MagicMock()
ec2.security_groups.filter = MagicMock(side_effect=[
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
clickclick==20.10.2
coverage==6.2
dnspython==1.15.0
dnspython3==1.15.0
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
jmespath==0.10.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pystache==0.6.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
raven==6.10.0
requests==2.27.1
s3transfer==0.5.2
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@56e263195218e3fe052e95221b2d9528c4343264#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- charset-normalizer==2.0.12
- click==8.0.4
- clickclick==20.10.2
- coverage==6.2
- dnspython==1.15.0
- dnspython3==1.15.0
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jmespath==0.10.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pystache==0.6.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- raven==6.10.0
- requests==2.27.1
- s3transfer==0.5.2
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/senza
| [
"tests/test_aws.py::test_get_security_group_by_tag_name"
]
| []
| [
"tests/test_aws.py::test_get_security_group",
"tests/test_aws.py::test_resolve_security_groups",
"tests/test_aws.py::test_create",
"tests/test_aws.py::test_encrypt",
"tests/test_aws.py::test_list_kms_keys",
"tests/test_aws.py::test_get_vpc_attribute",
"tests/test_aws.py::test_get_account_id",
"tests/test_aws.py::test_get_account_alias",
"tests/test_aws.py::test_resolve_referenced_resource",
"tests/test_aws.py::test_resolve_referenced_resource_with_update_complete_status",
"tests/test_aws.py::test_resolve_referenced_output_when_stack_is_in_update_complete_status",
"tests/test_aws.py::test_parse_time",
"tests/test_aws.py::test_required_capabilities",
"tests/test_aws.py::test_resolve_topic_arn",
"tests/test_aws.py::test_matches_any",
"tests/test_aws.py::test_get_tag"
]
| []
| Apache License 2.0 | 789 | [
"senza/aws.py"
]
| [
"senza/aws.py"
]
|
|
zalando-stups__senza-374 | 56e263195218e3fe052e95221b2d9528c4343264 | 2016-09-30 08:47:52 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/aws.py b/senza/aws.py
index 1284ba3..c7f0468 100644
--- a/senza/aws.py
+++ b/senza/aws.py
@@ -55,13 +55,12 @@ def is_status_complete(status: str):
def get_security_group(region: str, sg_name: str):
ec2 = boto3.resource('ec2', region)
try:
- sec_groups = list(ec2.security_groups.filter(
- Filters=[{'Name': 'group-name', 'Values': [sg_name]}]
- ))
- if not sec_groups:
- return None
- # FIXME: What if we have 2 VPC, with a SG with the same name?!
- return sec_groups[0]
+ # first try by tag name then by group-name (cannot be changed)
+ for _filter in [{'Name': 'tag:Name', 'Values': [sg_name]}, {'Name': 'group-name', 'Values': [sg_name]}]:
+ sec_groups = list(ec2.security_groups.filter(Filters=[_filter]))
+ if sec_groups:
+ # FIXME: What if we have 2 VPC, with a SG with the same name?!
+ return sec_groups[0]
except ClientError as e:
error_code = extract_client_error_code(e)
if error_code == 'InvalidGroup.NotFound':
diff --git a/senza/subcommands/root.py b/senza/subcommands/root.py
index 9162122..e5dab09 100644
--- a/senza/subcommands/root.py
+++ b/senza/subcommands/root.py
@@ -6,6 +6,7 @@ from typing import Optional
import click
import requests
import senza
+import sys
from clickclick import AliasedGroup, warning
from ..arguments import GLOBAL_OPTIONS, region_option
@@ -81,6 +82,8 @@ def check_senza_version(current_version: str):
Checks if senza is updated and prints a warning with instructions to update
if it's not.
"""
+ if not sys.stdout.isatty():
+ return
current_version = LooseVersion(current_version)
try:
latest_version = get_latest_version()
| Senza version warning should not be printed for non-TTYs (piping result to awk etc)
The Senza version check currently destroys popular shell scripting such as:
```
senza li -o tsv | tail -n +2 | awk '{ print $1 }'
```
We should only print the warning if `sys.stdout` is a TTY. | zalando-stups/senza | diff --git a/tests/test_aws.py b/tests/test_aws.py
index 4ca762a..8dd5b44 100644
--- a/tests/test_aws.py
+++ b/tests/test_aws.py
@@ -14,6 +14,21 @@ def test_get_security_group(monkeypatch):
assert results == get_security_group('myregion', 'group_inexistant')
+def test_get_security_group_by_tag_name(monkeypatch):
+
+ def mock_filter(Filters):
+ if Filters[0]['Name'] == 'tag:Name' and Filters[0]['Values'] == ['my-sg']:
+ sg = MagicMock()
+ sg.id = 'sg-123'
+ return [sg]
+
+ ec2 = MagicMock()
+ ec2.security_groups.filter = mock_filter
+ monkeypatch.setattr('boto3.resource', MagicMock(return_value=ec2))
+
+ assert get_security_group('myregion', 'my-sg').id == 'sg-123'
+
+
def test_resolve_security_groups(monkeypatch):
ec2 = MagicMock()
ec2.security_groups.filter = MagicMock(side_effect=[
diff --git a/tests/test_subcommands/test_root.py b/tests/test_subcommands/test_root.py
index 796d6c9..86f5a4f 100644
--- a/tests/test_subcommands/test_root.py
+++ b/tests/test_subcommands/test_root.py
@@ -34,8 +34,23 @@ def mock_warning(monkeypatch):
return mock
+@fixture()
+def mock_tty(monkeypatch):
+ # check_senza_version only prints if we have a TTY
+ monkeypatch.setattr('sys.stdout.isatty', lambda: True)
+
+
+def test_check_senza_version_notty(monkeypatch, mock_get_app_dir, mock_get, mock_warning):
+ with TemporaryDirectory() as temp_dir:
+ mock_get_app_dir.return_value = temp_dir
+ monkeypatch.setattr("senza.subcommands.root.__file__",
+ '/home/someuser/pymodules/root.py')
+ check_senza_version("0.40")
+ mock_warning.assert_not_called()
+
+
def test_check_senza_version(monkeypatch,
- mock_get_app_dir, mock_get, mock_warning):
+ mock_get_app_dir, mock_get, mock_warning, mock_tty):
with TemporaryDirectory() as temp_dir_1:
mock_get_app_dir.return_value = temp_dir_1
@@ -72,7 +87,7 @@ def test_check_senza_version(monkeypatch,
)
-def test_check_senza_version_timeout(mock_get_app_dir, mock_get, mock_warning):
+def test_check_senza_version_timeout(mock_get_app_dir, mock_get, mock_warning, mock_tty):
with TemporaryDirectory() as temp_dir:
mock_get_app_dir.return_value = temp_dir
mock_get.side_effect = Timeout
@@ -83,7 +98,8 @@ def test_check_senza_version_timeout(mock_get_app_dir, mock_get, mock_warning):
def test_check_senza_version_outdated_cache(monkeypatch, # noqa: F811
mock_get_app_dir,
mock_get,
- mock_warning):
+ mock_warning,
+ mock_tty):
monkeypatch.setattr("senza.subcommands.root.__file__",
'/usr/pymodules/root.py')
with TemporaryDirectory() as temp_dir:
@@ -106,7 +122,8 @@ def test_check_senza_version_outdated_cache(monkeypatch, # noqa: F811
def test_check_senza_version_exception(monkeypatch,
mock_get_app_dir,
mock_get,
- mock_warning):
+ mock_warning,
+ mock_tty):
mock_sentry = MagicMock()
monkeypatch.setattr("senza.subcommands.root.sentry", mock_sentry)
with TemporaryDirectory() as temp_dir:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@56e263195218e3fe052e95221b2d9528c4343264#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_aws.py::test_get_security_group_by_tag_name",
"tests/test_subcommands/test_root.py::test_check_senza_version_notty"
]
| []
| [
"tests/test_aws.py::test_get_security_group",
"tests/test_aws.py::test_resolve_security_groups",
"tests/test_aws.py::test_create",
"tests/test_aws.py::test_encrypt",
"tests/test_aws.py::test_list_kms_keys",
"tests/test_aws.py::test_get_vpc_attribute",
"tests/test_aws.py::test_get_account_id",
"tests/test_aws.py::test_get_account_alias",
"tests/test_aws.py::test_resolve_referenced_resource",
"tests/test_aws.py::test_resolve_referenced_resource_with_update_complete_status",
"tests/test_aws.py::test_resolve_referenced_output_when_stack_is_in_update_complete_status",
"tests/test_aws.py::test_parse_time",
"tests/test_aws.py::test_required_capabilities",
"tests/test_aws.py::test_resolve_topic_arn",
"tests/test_aws.py::test_matches_any",
"tests/test_aws.py::test_get_tag",
"tests/test_subcommands/test_root.py::test_check_senza_version",
"tests/test_subcommands/test_root.py::test_check_senza_version_timeout",
"tests/test_subcommands/test_root.py::test_check_senza_version_outdated_cache",
"tests/test_subcommands/test_root.py::test_check_senza_version_exception",
"tests/test_subcommands/test_root.py::test_version"
]
| []
| Apache License 2.0 | 790 | [
"senza/subcommands/root.py",
"senza/aws.py"
]
| [
"senza/subcommands/root.py",
"senza/aws.py"
]
|
|
falconry__falcon-920 | 0fd502cdfe11942871b823c5c6204a52ef2b8428 | 2016-09-30 16:25:42 | 673bb2e13613f04462a5515ba41f84dbab142970 | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/920?src=pr) is 100% (diff: 100%)
> Merging [#920](https://codecov.io/gh/falconry/falcon/pull/920?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #920 diff @@
====================================
Files 31 31
Lines 1970 1970
Methods 0 0
Messages 0 0
Branches 320 320
====================================
Hits 1970 1970
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [45b4176...c168cd0](https://codecov.io/gh/falconry/falcon/compare/45b4176111bce9e52282c2922c0d60a3214ef9f0...c168cd0a120297021fc692468d442c40f83d84af?src=pr) | diff --git a/falcon/request.py b/falcon/request.py
index 5557fa7..b5fbc68 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -46,8 +46,8 @@ SimpleCookie = http_cookies.SimpleCookie
DEFAULT_ERROR_LOG_FORMAT = (u'{0:%Y-%m-%d %H:%M:%S} [FALCON] [ERROR]'
u' {1} {2}{3} => ')
-TRUE_STRINGS = ('true', 'True', 'yes', '1')
-FALSE_STRINGS = ('false', 'False', 'no', '0')
+TRUE_STRINGS = ('true', 'True', 'yes', '1', 'on')
+FALSE_STRINGS = ('false', 'False', 'no', '0', 'off')
WSGI_CONTENT_HEADERS = ('CONTENT_TYPE', 'CONTENT_LENGTH')
# PERF(kgriffs): Avoid an extra namespace lookup when using these functions
@@ -948,8 +948,8 @@ class Request(object):
The following boolean strings are supported::
- TRUE_STRINGS = ('true', 'True', 'yes', '1')
- FALSE_STRINGS = ('false', 'False', 'no', '0')
+ TRUE_STRINGS = ('true', 'True', 'yes', '1', 'on')
+ FALSE_STRINGS = ('false', 'False', 'no', '0', 'off')
Args:
name (str): Parameter name, case-sensitive (e.g., 'detailed').
| Add additional values on get_param_as_bool like ON, OFF
Hello,
How about add to get_param_as_bool values
TRUE_STRINGS = [ ... , "on" ]
FALSE_STRINGS = [..., "off" ]
This is related to default value in checkbox on Internet Explorer.
Thanx. | falconry/falcon | diff --git a/tests/test_query_params.py b/tests/test_query_params.py
index 0b6a053..3cfe398 100644
--- a/tests/test_query_params.py
+++ b/tests/test_query_params.py
@@ -257,7 +257,8 @@ class _TestQueryParams(testing.TestBase):
def test_boolean(self):
query_string = ('echo=true&doit=false&bogus=bar&bogus2=foo&'
- 't1=True&f1=False&t2=yes&f2=no&blank&one=1&zero=0')
+ 't1=True&f1=False&t2=yes&f2=no&blank&one=1&zero=0&'
+ 'checkbox1=on&checkbox2=off')
self.simulate_request('/', query_string=query_string)
req = self.resource.captured_req
@@ -285,6 +286,9 @@ class _TestQueryParams(testing.TestBase):
self.assertEqual(req.get_param_as_bool('zero'), False)
self.assertEqual(req.get_param('blank'), None)
+ self.assertEqual(req.get_param_as_bool('checkbox1'), True)
+ self.assertEqual(req.get_param_as_bool('checkbox2'), False)
+
store = {}
self.assertEqual(req.get_param_as_bool('echo', store=store), True)
self.assertEqual(store['echo'], True)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@0fd502cdfe11942871b823c5c6204a52ef2b8428#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_query_params.py::_TestQueryParams::test_boolean",
"tests/test_query_params.py::PostQueryParams::test_boolean",
"tests/test_query_params.py::GetQueryParams::test_boolean"
]
| []
| [
"tests/test_query_params.py::_TestQueryParams::test_allowed_names",
"tests/test_query_params.py::_TestQueryParams::test_bad_percentage",
"tests/test_query_params.py::_TestQueryParams::test_blank",
"tests/test_query_params.py::_TestQueryParams::test_boolean_blank",
"tests/test_query_params.py::_TestQueryParams::test_get_date_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_date_store",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid",
"tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_store",
"tests/test_query_params.py::_TestQueryParams::test_get_dict_valid",
"tests/test_query_params.py::_TestQueryParams::test_int",
"tests/test_query_params.py::_TestQueryParams::test_int_neg",
"tests/test_query_params.py::_TestQueryParams::test_list_transformer",
"tests/test_query_params.py::_TestQueryParams::test_list_type",
"tests/test_query_params.py::_TestQueryParams::test_list_type_blank",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::_TestQueryParams::test_none",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::_TestQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_query_params.py::_TestQueryParams::test_param_property",
"tests/test_query_params.py::_TestQueryParams::test_percent_encoded",
"tests/test_query_params.py::_TestQueryParams::test_required_1_get_param",
"tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::_TestQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParams::test_allowed_names",
"tests/test_query_params.py::PostQueryParams::test_bad_percentage",
"tests/test_query_params.py::PostQueryParams::test_blank",
"tests/test_query_params.py::PostQueryParams::test_boolean_blank",
"tests/test_query_params.py::PostQueryParams::test_empty_body",
"tests/test_query_params.py::PostQueryParams::test_empty_body_no_content_length",
"tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse",
"tests/test_query_params.py::PostQueryParams::test_get_date_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_date_store",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid",
"tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::PostQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::PostQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::PostQueryParams::test_get_dict_store",
"tests/test_query_params.py::PostQueryParams::test_get_dict_valid",
"tests/test_query_params.py::PostQueryParams::test_int",
"tests/test_query_params.py::PostQueryParams::test_int_neg",
"tests/test_query_params.py::PostQueryParams::test_list_transformer",
"tests/test_query_params.py::PostQueryParams::test_list_type",
"tests/test_query_params.py::PostQueryParams::test_list_type_blank",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::PostQueryParams::test_non_ascii",
"tests/test_query_params.py::PostQueryParams::test_none",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::PostQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_query_params.py::PostQueryParams::test_param_property",
"tests/test_query_params.py::PostQueryParams::test_percent_encoded",
"tests/test_query_params.py::PostQueryParams::test_required_1_get_param",
"tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::PostQueryParams::test_simple",
"tests/test_query_params.py::GetQueryParams::test_allowed_names",
"tests/test_query_params.py::GetQueryParams::test_bad_percentage",
"tests/test_query_params.py::GetQueryParams::test_blank",
"tests/test_query_params.py::GetQueryParams::test_boolean_blank",
"tests/test_query_params.py::GetQueryParams::test_get_date_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_date_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_date_store",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid",
"tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format",
"tests/test_query_params.py::GetQueryParams::test_get_dict_invalid",
"tests/test_query_params.py::GetQueryParams::test_get_dict_missing_param",
"tests/test_query_params.py::GetQueryParams::test_get_dict_store",
"tests/test_query_params.py::GetQueryParams::test_get_dict_valid",
"tests/test_query_params.py::GetQueryParams::test_int",
"tests/test_query_params.py::GetQueryParams::test_int_neg",
"tests/test_query_params.py::GetQueryParams::test_list_transformer",
"tests/test_query_params.py::GetQueryParams::test_list_type",
"tests/test_query_params.py::GetQueryParams::test_list_type_blank",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys",
"tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool",
"tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int",
"tests/test_query_params.py::GetQueryParams::test_none",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_complex_false",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_simple_false",
"tests/test_query_params.py::GetQueryParams::test_option_auto_parse_qs_csv_simple_true",
"tests/test_query_params.py::GetQueryParams::test_param_property",
"tests/test_query_params.py::GetQueryParams::test_percent_encoded",
"tests/test_query_params.py::GetQueryParams::test_required_1_get_param",
"tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int",
"tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool",
"tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list",
"tests/test_query_params.py::GetQueryParams::test_simple",
"tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default"
]
| []
| Apache License 2.0 | 791 | [
"falcon/request.py"
]
| [
"falcon/request.py"
]
|
kevin1024__vcrpy-278 | ecbc192fc445d3bd91aba43d1a34dd6d9ccb1976 | 2016-10-02 20:25:36 | ecbc192fc445d3bd91aba43d1a34dd6d9ccb1976 | diff --git a/vcr/matchers.py b/vcr/matchers.py
index b54ed2f..8fe334e 100644
--- a/vcr/matchers.py
+++ b/vcr/matchers.py
@@ -49,7 +49,8 @@ def _transform_json(body):
# Request body is always a byte string, but json.loads() wants a text
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
# and UTF-32 are also allowed: hmmmmm).
- return json.loads(body.decode('utf-8'))
+ if body:
+ return json.loads(body.decode('utf-8'))
_xml_header_checker = _header_checker('text/xml')
| AttributeError: 'NoneType' object has no attribute 'decode' when 'body' in match_on, fixed with 'raw_body'
When using vcr with the following configuration with 'body', there are errors with loading the JSON response -- it could be due to a blank response or blank payload. When I use 'raw_body' it works perfectly. Here's the stack trace (relevant sections):
```bash
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/sessions.py", line 473, in get
return self.request('GET', url, **kwargs)
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/sessions.py", line 461, in request
resp = self.send(prep, **send_kwargs)
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/sessions.py", line 573, in send
r = adapter.send(request, **kwargs)
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/adapters.py", line 370, in send
timeout=timeout
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/packages/urllib3/connectionpool.py", line 518, in urlopen
body=body, headers=headers)
File "/home/vagrant/venv/local/lib/python2.7/site-packages/requests/packages/urllib3/connectionpool.py", line 353, in _make_request
httplib_response = conn.getresponse(buffering=True)
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/stubs/__init__.py", line 219, in getresponse
if self.cassette.can_play_response_for(self._vcr_request):
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/cassette.py", line 232, in can_play_response_for
return request and request in self and \
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/cassette.py", line 303, in __contains__
for index, response in self._responses(request):
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/cassette.py", line 227, in _responses
if requests_match(request, stored_request, self._match_on):
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/matchers.py", line 97, in requests_match
matches = [(m(r1, r2), m) for m in matchers]
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/matchers.py", line 80, in body
return transformer(read_body(r1)) == transformer(read_body(r2))
File "/home/vagrant/venv/local/lib/python2.7/site-packages/vcr/matchers.py", line 52, in _transform_json
return json.loads(body.decode('utf-8'))
AttributeError: 'NoneType' object has no attribute 'decode'
``` | kevin1024/vcrpy | diff --git a/tests/integration/test_requests.py b/tests/integration/test_requests.py
index 84a992b..804ab2e 100644
--- a/tests/integration/test_requests.py
+++ b/tests/integration/test_requests.py
@@ -38,6 +38,18 @@ def test_body(tmpdir, httpbin_both):
assert content == requests.get(url).content
+def test_get_empty_content_type_json(tmpdir, httpbin_both):
+ '''Ensure GET with application/json content-type and empty request body doesn't crash'''
+ url = httpbin_both + '/status/200'
+ headers = {'Content-Type': 'application/json'}
+
+ with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
+ status = requests.get(url, headers=headers).status_code
+
+ with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
+ assert status == requests.get(url, headers=headers).status_code
+
+
def test_effective_url(tmpdir, httpbin_both):
'''Ensure that the effective_url is captured'''
url = httpbin_both.url + '/redirect-to?url=/html'
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-httpbin",
"requests",
"httplib2",
"urllib3",
"tornado",
"boto",
"boto3",
"aiohttp"
],
"pre_install": [
"apt-get update",
"apt-get install -y libyaml-dev"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs==2.6.1
aiohttp==3.11.14
aiosignal==1.3.2
async-timeout==5.0.1
attrs==25.3.0
blinker==1.9.0
boto==2.49.0
boto3==1.37.23
botocore==1.37.23
brotlicffi==1.1.0.0
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
decorator==5.2.1
exceptiongroup==1.2.2
flasgger==0.9.7.1
Flask==3.1.0
frozenlist==1.5.0
greenlet==2.0.2
httpbin==0.10.2
httplib2==0.22.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
itsdangerous==2.2.0
Jinja2==3.1.6
jmespath==1.0.1
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
MarkupSafe==3.0.2
mistune==3.1.3
multidict==6.2.0
packaging==24.2
pluggy==1.5.0
propcache==0.3.1
pycparser==2.22
pyparsing==3.2.3
pytest==8.3.5
pytest-httpbin==2.1.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
s3transfer==0.11.4
six==1.17.0
tomli==2.2.1
tornado==6.4.2
typing_extensions==4.13.0
urllib3==1.26.20
-e git+https://github.com/kevin1024/vcrpy.git@ecbc192fc445d3bd91aba43d1a34dd6d9ccb1976#egg=vcrpy
Werkzeug==3.1.3
wrapt==1.17.2
yarl==1.18.3
zipp==3.21.0
| name: vcrpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohappyeyeballs==2.6.1
- aiohttp==3.11.14
- aiosignal==1.3.2
- async-timeout==5.0.1
- attrs==25.3.0
- blinker==1.9.0
- boto==2.49.0
- boto3==1.37.23
- botocore==1.37.23
- brotlicffi==1.1.0.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- decorator==5.2.1
- exceptiongroup==1.2.2
- flasgger==0.9.7.1
- flask==3.1.0
- frozenlist==1.5.0
- greenlet==2.0.2
- httpbin==0.10.2
- httplib2==0.22.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- itsdangerous==2.2.0
- jinja2==3.1.6
- jmespath==1.0.1
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- markupsafe==3.0.2
- mistune==3.1.3
- multidict==6.2.0
- packaging==24.2
- pluggy==1.5.0
- propcache==0.3.1
- pycparser==2.22
- pyparsing==3.2.3
- pytest==8.3.5
- pytest-httpbin==2.1.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- s3transfer==0.11.4
- six==1.17.0
- tomli==2.2.1
- tornado==6.4.2
- typing-extensions==4.13.0
- urllib3==1.26.20
- werkzeug==3.1.3
- wrapt==1.17.2
- yarl==1.18.3
- zipp==3.21.0
prefix: /opt/conda/envs/vcrpy
| [
"tests/integration/test_requests.py::test_get_empty_content_type_json[http]"
]
| [
"tests/integration/test_requests.py::test_status_code[https]",
"tests/integration/test_requests.py::test_headers[https]",
"tests/integration/test_requests.py::test_body[https]",
"tests/integration/test_requests.py::test_get_empty_content_type_json[https]",
"tests/integration/test_requests.py::test_effective_url[https]",
"tests/integration/test_requests.py::test_auth[https]",
"tests/integration/test_requests.py::test_auth_failed[https]",
"tests/integration/test_requests.py::test_post[https]",
"tests/integration/test_requests.py::test_post_chunked_binary[https]",
"tests/integration/test_requests.py::test_redirects[https]",
"tests/integration/test_requests.py::test_gzip[https]",
"tests/integration/test_requests.py::test_session_created_before_use_cassette_is_patched[https]",
"tests/integration/test_requests.py::test_nested_cassettes_with_session_created_before_nesting[https]",
"tests/integration/test_requests.py::test_post_file[https]",
"tests/integration/test_requests.py::test_filter_post_params[https]",
"tests/integration/test_requests.py::test_cross_scheme"
]
| [
"tests/integration/test_requests.py::test_status_code[http]",
"tests/integration/test_requests.py::test_headers[http]",
"tests/integration/test_requests.py::test_body[http]",
"tests/integration/test_requests.py::test_effective_url[http]",
"tests/integration/test_requests.py::test_auth[http]",
"tests/integration/test_requests.py::test_auth_failed[http]",
"tests/integration/test_requests.py::test_post[http]",
"tests/integration/test_requests.py::test_post_chunked_binary[http]",
"tests/integration/test_requests.py::test_redirects[http]",
"tests/integration/test_requests.py::test_gzip[http]",
"tests/integration/test_requests.py::test_session_created_before_use_cassette_is_patched[http]",
"tests/integration/test_requests.py::test_nested_cassettes_with_session_created_before_nesting[http]",
"tests/integration/test_requests.py::test_post_file[http]",
"tests/integration/test_requests.py::test_filter_post_params[http]",
"tests/integration/test_requests.py::test_session_and_connection_close",
"tests/integration/test_requests.py::test_https_with_cert_validation_disabled",
"tests/integration/test_requests.py::test_session_can_make_requests_after_requests_unpatched"
]
| []
| MIT License | 792 | [
"vcr/matchers.py"
]
| [
"vcr/matchers.py"
]
|
|
simphony__tornado-webapi-19 | 95ad403cd70109fb94727a119545567593d8203d | 2016-10-03 12:47:29 | 95ad403cd70109fb94727a119545567593d8203d | diff --git a/tornadowebapi/exceptions.py b/tornadowebapi/exceptions.py
index a5f3dd3..a2999b5 100644
--- a/tornadowebapi/exceptions.py
+++ b/tornadowebapi/exceptions.py
@@ -44,7 +44,7 @@ class NotFound(WebAPIException):
return None
-class BadRequest(WebAPIException):
+class BadRepresentation(WebAPIException):
"""Exception raised when the resource representation is
invalid or does not contain the appropriate keys.
Raise this exception in your handlers when the received
@@ -53,6 +53,10 @@ class BadRequest(WebAPIException):
http_code = httpstatus.BAD_REQUEST
+# Deprecated. Kept for compatibility
+BadRequest = BadRepresentation
+
+
class Unable(WebAPIException):
"""Exception raised when the request cannot be performed
for whatever reason that is not dependent on the client.
diff --git a/tox.ini b/tox.ini
index 1147235..debc066 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,2 +1,2 @@
[flake8]
-exclude = build/*,venv/*,doc/source/conf.py
+exclude = build/*,venv*/*,doc/source/conf.py
| Change BadRequest error to BadRepresentation
BadRequest is HTTP. It's technically a bad representation that the CRUD methods handle, and the exception should reflect that.
Migrated from https://github.com/simphony/simphony-remote/issues/181 | simphony/tornado-webapi | diff --git a/tornadowebapi/tests/test_webapi.py b/tornadowebapi/tests/test_webapi.py
index 3a3c8e5..e3f3b8b 100644
--- a/tornadowebapi/tests/test_webapi.py
+++ b/tornadowebapi/tests/test_webapi.py
@@ -74,19 +74,19 @@ class UnsupportAll(Resource):
class Unprocessable(Resource):
@gen.coroutine
def create(self, representation):
- raise exceptions.BadRequest("unprocessable", foo="bar")
+ raise exceptions.BadRepresentation("unprocessable", foo="bar")
@gen.coroutine
def update(self, identifier, representation):
- raise exceptions.BadRequest("unprocessable", foo="bar")
+ raise exceptions.BadRepresentation("unprocessable", foo="bar")
@gen.coroutine
def retrieve(self, identifier):
- raise exceptions.BadRequest("unprocessable", foo="bar")
+ raise exceptions.BadRepresentation("unprocessable", foo="bar")
@gen.coroutine
def items(self):
- raise exceptions.BadRequest("unprocessable", foo="bar")
+ raise exceptions.BadRepresentation("unprocessable", foo="bar")
class UnsupportsCollection(Resource):
@@ -328,7 +328,7 @@ class TestREST(AsyncHTTPTestCase):
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
self.assertEqual(res.headers["Content-Type"], 'application/json')
self.assertEqual(escape.json_decode(res.body), {
- "type": "BadRequest",
+ "type": "BadRepresentation",
"message": "unprocessable",
"foo": "bar",
})
@@ -340,7 +340,7 @@ class TestREST(AsyncHTTPTestCase):
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
self.assertEqual(res.headers["Content-Type"], 'application/json')
self.assertEqual(escape.json_decode(res.body), {
- "type": "BadRequest",
+ "type": "BadRepresentation",
"message": "unprocessable",
"foo": "bar",
})
@@ -353,7 +353,7 @@ class TestREST(AsyncHTTPTestCase):
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
self.assertEqual(res.headers["Content-Type"], 'application/json')
self.assertEqual(escape.json_decode(res.body), {
- "type": "BadRequest",
+ "type": "BadRepresentation",
"message": "unprocessable",
"foo": "bar",
})
@@ -365,7 +365,7 @@ class TestREST(AsyncHTTPTestCase):
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
self.assertEqual(res.headers["Content-Type"], 'application/json')
self.assertEqual(escape.json_decode(res.body), {
- "type": "BadRequest",
+ "type": "BadRepresentation",
"message": "unprocessable",
"foo": "bar",
})
@@ -378,7 +378,7 @@ class TestREST(AsyncHTTPTestCase):
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
self.assertEqual(res.headers["Content-Type"], 'application/json')
self.assertEqual(escape.json_decode(res.body), {
- "type": "BadRequest",
+ "type": "BadRepresentation",
"message": "unprocessable",
"foo": "bar",
})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
tomli==2.2.1
tornado==6.4.2
-e git+https://github.com/simphony/tornado-webapi.git@95ad403cd70109fb94727a119545567593d8203d#egg=tornadowebapi
| name: tornado-webapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- tomli==2.2.1
- tornado==6.4.2
prefix: /opt/conda/envs/tornado-webapi
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_unprocessable"
]
| []
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_broken",
"tornadowebapi/tests/test_webapi.py::TestREST::test_create",
"tornadowebapi/tests/test_webapi.py::TestREST::test_delete",
"tornadowebapi/tests/test_webapi.py::TestREST::test_items",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_non_json",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_on_resource",
"tornadowebapi/tests/test_webapi.py::TestREST::test_retrieve",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unexistent_resource_type",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupported_methods",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupports_collections",
"tornadowebapi/tests/test_webapi.py::TestREST::test_update",
"tornadowebapi/tests/test_webapi.py::TestRESTFunctions::test_api_handlers",
"tornadowebapi/tests/test_webapi.py::TestNonGlobalRegistry::test_non_global_registry"
]
| []
| BSD 3-Clause "New" or "Revised" License | 793 | [
"tox.ini",
"tornadowebapi/exceptions.py"
]
| [
"tox.ini",
"tornadowebapi/exceptions.py"
]
|
|
OpenKMIP__PyKMIP-194 | 40919468a64046349191c946991e0415fe768a0b | 2016-10-03 15:15:44 | 5c50ec73fef849ed0250ed53acb5f6be04ed1398 | diff --git a/kmip/services/server/engine.py b/kmip/services/server/engine.py
index c90d355..5787b30 100644
--- a/kmip/services/server/engine.py
+++ b/kmip/services/server/engine.py
@@ -249,7 +249,10 @@ class KmipEngine(object):
)
# Process the authentication credentials
- auth_credentials = header.authentication.credential
+ if header.authentication:
+ auth_credentials = header.authentication.credential
+ else:
+ auth_credentials = None
self._verify_credential(auth_credentials, credential)
# Process the batch error continuation option
| Install/Run Instructions and SQLAlchemy Issue
I have installed the latest version of pykmip-server (0.5.0) but have some questions.
I noticed that the instructions on the Wiki mention to run the "run_server.py" script but it appears as though the newer versions of pykmip support installation of Pykmip-server using the easy-install/pip method (python server.py build/install). Is the easy-install/pip the preferred method of installing now?
When I start the server (0.5.0) using the pykmip-server script in /bin, I am receiving the following message:
/usr/lib/python3.4/site-packages/SQLAlchemy-1.1.0b3-py3.4-linux-x86_64.egg/sqlalchemy/orm/mapper.py:1034: SAWarning: Reassigning polymorphic association for identity 'Certificate' from <Mapper at 0x7f2fa55a99b0; Certificate> to <Mapper at 0x7f2fa55a9eb8; X509Certificate>: Check for duplicate use of 'Certificate' as value for polymorphic_identity
I have generated certs and they are signed by a CA. I even tried using the demo certs that were included. It appears as though the server still runs with after this message is displayed but wasn't sure if it is a warning or is causing other issues. I confirmed that I am binding to port 5696 on a public interface and enabled logging. I'm not currently able to connect from my client.
The message I receive in the log when a client attempts to connect is:
2016-09-21 16:08:35,948 - kmip.server.session.00000001 - WARNING - An unexpected error occurred while processing request.
2016-09-21 16:08:35,948 - kmip.server.session.00000001 - ERROR - 'NoneType' object has no attribute 'credential'
Traceback (most recent call last):
File "/usr/lib/python3.4/site-packages/PyKMIP-0.5.0-py3.4.egg/kmip/services/server/session.py", line 106, in _handle_message_loop
request
File "/usr/lib/python3.4/site-packages/PyKMIP-0.5.0-py3.4.egg/kmip/services/server/engine.py", line 142, in decorator
return function(self, *args, **kwargs)
File "/usr/lib/python3.4/site-packages/PyKMIP-0.5.0-py3.4.egg/kmip/services/server/engine.py", line 252, in process_request
auth_credentials = header.authentication.credential
AttributeError: 'NoneType' object has no attribute 'credential'
Last question. I tried going back to a previous version of PyKmip (such as 0.4.1) since this is prior to using sqlalchemy. When I install this, there is no pykmip-server in /bin and I see no run_server.py. I tried running the kmip/services/server/server.py script but it runs and then exits right away. What is the preferred way to run in version 0.4.1?
Sorry for all of the questions and thanks for your hard work. I would greatly appreciate any help you can provide.
| OpenKMIP/PyKMIP | diff --git a/kmip/tests/unit/services/server/test_engine.py b/kmip/tests/unit/services/server/test_engine.py
index f969557..d13a893 100644
--- a/kmip/tests/unit/services/server/test_engine.py
+++ b/kmip/tests/unit/services/server/test_engine.py
@@ -391,6 +391,41 @@ class TestKmipEngine(testtools.TestCase):
*args
)
+ def test_process_request_missing_credential(self):
+ """
+ Test that the engine does not immediately error out when retrieving
+ a non-existent credential from the request.
+ """
+ e = engine.KmipEngine()
+ e._logger = mock.MagicMock()
+
+ protocol = contents.ProtocolVersion.create(1, 1)
+ header = messages.RequestHeader(
+ protocol_version=protocol,
+ authentication=None,
+ batch_error_cont_option=contents.BatchErrorContinuationOption(
+ enums.BatchErrorContinuationOption.STOP
+ ),
+ batch_order_option=contents.BatchOrderOption(True),
+ time_stamp=contents.TimeStamp(int(time.time())),
+ batch_count=contents.BatchCount(1)
+ )
+ payload = discover_versions.DiscoverVersionsRequestPayload()
+ batch = list([
+ messages.RequestBatchItem(
+ operation=contents.Operation(
+ enums.Operation.DISCOVER_VERSIONS
+ ),
+ request_payload=payload
+ )
+ ])
+ request = messages.RequestMessage(
+ request_header=header,
+ batch_items=batch
+ )
+
+ e.process_request(request)
+
def test_build_error_response(self):
"""
Test that a bare bones response containing a single error result can
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest",
"flake8",
"testtools",
"fixtures",
"testresources",
"mock",
"testscenarios",
"testrepository",
"sphinx",
"bandit"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"pip install tox bandit codecov coveralls"
],
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
bandit==1.7.1
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
coveralls==3.3.1
cryptography==40.0.2
distlib==0.3.9
docopt==0.6.2
docutils==0.17.1
enum34==1.1.10
extras==1.0.0
filelock==3.4.1
fixtures==4.0.0
flake8==5.0.4
gitdb==4.0.9
GitPython==3.1.18
greenlet==2.0.2
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
iso8601==1.1.0
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pbr==6.1.1
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.14.0
-e git+https://github.com/OpenKMIP/PyKMIP.git@40919468a64046349191c946991e0415fe768a0b#egg=PyKMIP
pyparsing==3.1.4
pytest==7.0.1
python-subunit==1.4.2
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
smmap==5.0.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
stevedore==3.5.2
testrepository==0.0.21
testresources==2.0.1
testscenarios==0.5.0
testtools==2.6.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
zipp==3.6.0
| name: PyKMIP
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- bandit==1.7.1
- cffi==1.15.1
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- coveralls==3.3.1
- cryptography==40.0.2
- distlib==0.3.9
- docopt==0.6.2
- docutils==0.17.1
- enum34==1.1.10
- extras==1.0.0
- filelock==3.4.1
- fixtures==4.0.0
- flake8==5.0.4
- gitdb==4.0.9
- gitpython==3.1.18
- greenlet==2.0.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- iso8601==1.1.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-subunit==1.4.2
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- smmap==5.0.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- stevedore==3.5.2
- testrepository==0.0.21
- testresources==2.0.1
- testscenarios==0.5.0
- testtools==2.6.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/PyKMIP
| [
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_missing_credential"
]
| [
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_destroy",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_object_type",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_object_type_unsupported_type",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_with_key_format_type"
]
| [
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_build_core_object",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_build_core_object_unsupported_type",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_build_error_response",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_get_destroy",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_key_pair",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_key_pair_get_destroy",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_key_pair_mismatched_attributes",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_key_pair_omitting_attributes",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_omitting_attributes",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_create_unsupported_object_type",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_discover_versions",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_object_type_missing_object",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_object_type_multiple_objects",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_get_with_unsupported_features",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_init",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_batch",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_batch_expected_error",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_batch_missing_batch_id",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_batch_unexpected_error",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_multibatch",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_future_timestamp",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_stale_timestamp",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_unsupported_async_indicator",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_unsupported_batch_option",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_request_unsupported_version",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_template_attribute",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_process_template_attribute_unsupported_features",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_query",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_register",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_register_get_destroy",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_register_unsupported_object_type",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_request_omitting_secret",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_set_attribute_on_managed_object",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_set_attribute_on_managed_object_unsupported_features",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_set_attributes_on_managed_object",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_set_attributes_on_managed_object_attribute_mismatch",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_supported_operation",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_unsupported_operation",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_version_operation_match",
"kmip/tests/unit/services/server/test_engine.py::TestKmipEngine::test_version_operation_mismatch"
]
| []
| Apache License 2.0 | 794 | [
"kmip/services/server/engine.py"
]
| [
"kmip/services/server/engine.py"
]
|
|
scrapy__scrapy-2306 | 7b49b9c0f53396ac89cbd74930bc4c6e41d41901 | 2016-10-05 15:26:13 | dfe6d3d59aa3de7a96c1883d0f3f576ba5994aa9 | codecov-io: ## [Current coverage](https://codecov.io/gh/scrapy/scrapy/pull/2306?src=pr) is 83.41% (diff: 95.34%)
> Merging [#2306](https://codecov.io/gh/scrapy/scrapy/pull/2306?src=pr) into [master](https://codecov.io/gh/scrapy/scrapy/branch/master?src=pr) will increase coverage by **3.45%**
```diff
@@ master #2306 diff @@
==========================================
Files 161 161
Lines 8719 8804 +85
Methods 0 0
Messages 0 0
Branches 1284 1295 +11
==========================================
+ Hits 6972 7344 +372
+ Misses 1496 1207 -289
- Partials 251 253 +2
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [3235bfe...9f9af6c](https://codecov.io/gh/scrapy/scrapy/compare/3235bfeb1ea46f4e99269469cecbb715c93b747e...9f9af6c7d03dce2006e5869881c4e6844f40fcff?src=pr)
redapple: I'm adding 2 policies that appeared in [the working draft](https://www.w3.org/TR/2016/WD-referrer-policy-20161222/):
- [`"strict-origin"`](https://www.w3.org/TR/2016/WD-referrer-policy-20161222/#referrer-policy-strict-origin)
- [`"strict-origin-when-cross-origin"`](https://www.w3.org/TR/2016/WD-referrer-policy-20161222/#referrer-policy-strict-origin-when-cross-origin)
redapple: Alright, @kmike , @eliasdorneles , @lopuhin , @dangra , I think this is ready for review.
Thanks in advance!
redapple: (I'm not sure why I use `ftps` in some tests. It's not supported by Scrapy)
redapple: @kmike , I rebased, updated the docs with `autoclass` directives and renamed the setting to `REFERRER_POLICY`.
redapple: Another thing could be to move the policies to scrapy/extensions, similar to HTTP cache policies (with `scrapy/scrapy/extensions/httpcache.py`). What do you think?
kmike: @redapple I think current code structure is fine; policies are not Scrapy extensions | diff --git a/docs/topics/request-response.rst b/docs/topics/request-response.rst
index 3d110b02d..67f8ec285 100644
--- a/docs/topics/request-response.rst
+++ b/docs/topics/request-response.rst
@@ -307,6 +307,7 @@ Those are:
* :reqmeta:`proxy`
* ``ftp_user`` (See :setting:`FTP_USER` for more info)
* ``ftp_password`` (See :setting:`FTP_PASSWORD` for more info)
+* :reqmeta:`referrer_policy`
.. reqmeta:: bindaddress
diff --git a/docs/topics/spider-middleware.rst b/docs/topics/spider-middleware.rst
index 8360827e8..9a0ccd0c1 100644
--- a/docs/topics/spider-middleware.rst
+++ b/docs/topics/spider-middleware.rst
@@ -95,7 +95,7 @@ following methods:
it has processed the response.
:meth:`process_spider_output` must return an iterable of
- :class:`~scrapy.http.Request`, dict or :class:`~scrapy.item.Item`
+ :class:`~scrapy.http.Request`, dict or :class:`~scrapy.item.Item`
objects.
:param response: the response which generated this output from the
@@ -328,6 +328,90 @@ Default: ``True``
Whether to enable referer middleware.
+.. setting:: REFERRER_POLICY
+
+REFERRER_POLICY
+^^^^^^^^^^^^^^^
+
+.. versionadded:: 1.4
+
+Default: ``'scrapy.spidermiddlewares.referer.DefaultReferrerPolicy'``
+
+.. reqmeta:: referrer_policy
+
+`Referrer Policy`_ to apply when populating Request "Referer" header.
+
+.. note::
+ You can also set the Referrer Policy per request,
+ using the special ``"referrer_policy"`` :ref:`Request.meta <topics-request-meta>` key,
+ with the same acceptable values as for the ``REFERRER_POLICY`` setting.
+
+Acceptable values for REFERRER_POLICY
+*************************************
+
+- either a path to a ``scrapy.spidermiddlewares.referer.ReferrerPolicy``
+ subclass — a custom policy or one of the built-in ones (see classes below),
+- or one of the standard W3C-defined string values,
+- or the special ``"scrapy-default"``.
+
+======================================= ========================================================================
+String value Class name (as a string)
+======================================= ========================================================================
+``"scrapy-default"`` (default) :class:`scrapy.spidermiddlewares.referer.DefaultReferrerPolicy`
+`"no-referrer"`_ :class:`scrapy.spidermiddlewares.referer.NoReferrerPolicy`
+`"no-referrer-when-downgrade"`_ :class:`scrapy.spidermiddlewares.referer.NoReferrerWhenDowngradePolicy`
+`"same-origin"`_ :class:`scrapy.spidermiddlewares.referer.SameOriginPolicy`
+`"origin"`_ :class:`scrapy.spidermiddlewares.referer.OriginPolicy`
+`"strict-origin"`_ :class:`scrapy.spidermiddlewares.referer.StrictOriginPolicy`
+`"origin-when-cross-origin"`_ :class:`scrapy.spidermiddlewares.referer.OriginWhenCrossOriginPolicy`
+`"strict-origin-when-cross-origin"`_ :class:`scrapy.spidermiddlewares.referer.StrictOriginWhenCrossOriginPolicy`
+`"unsafe-url"`_ :class:`scrapy.spidermiddlewares.referer.UnsafeUrlPolicy`
+======================================= ========================================================================
+
+.. autoclass:: DefaultReferrerPolicy
+.. warning::
+ Scrapy's default referrer policy — just like `"no-referrer-when-downgrade"`_,
+ the W3C-recommended value for browsers — will send a non-empty
+ "Referer" header from any ``http(s)://`` to any ``https://`` URL,
+ even if the domain is different.
+
+ `"same-origin"`_ may be a better choice if you want to remove referrer
+ information for cross-domain requests.
+
+.. autoclass:: NoReferrerPolicy
+
+.. autoclass:: NoReferrerWhenDowngradePolicy
+.. note::
+ "no-referrer-when-downgrade" policy is the W3C-recommended default,
+ and is used by major web browsers.
+
+ However, it is NOT Scrapy's default referrer policy (see :class:`DefaultReferrerPolicy`).
+
+.. autoclass:: SameOriginPolicy
+
+.. autoclass:: OriginPolicy
+
+.. autoclass:: StrictOriginPolicy
+
+.. autoclass:: OriginWhenCrossOriginPolicy
+
+.. autoclass:: StrictOriginWhenCrossOriginPolicy
+
+.. autoclass:: UnsafeUrlPolicy
+.. warning::
+ "unsafe-url" policy is NOT recommended.
+
+.. _Referrer Policy: https://www.w3.org/TR/referrer-policy
+.. _"no-referrer": https://www.w3.org/TR/referrer-policy/#referrer-policy-no-referrer
+.. _"no-referrer-when-downgrade": https://www.w3.org/TR/referrer-policy/#referrer-policy-no-referrer-when-downgrade
+.. _"same-origin": https://www.w3.org/TR/referrer-policy/#referrer-policy-same-origin
+.. _"origin": https://www.w3.org/TR/referrer-policy/#referrer-policy-origin
+.. _"strict-origin": https://www.w3.org/TR/referrer-policy/#referrer-policy-strict-origin
+.. _"origin-when-cross-origin": https://www.w3.org/TR/referrer-policy/#referrer-policy-origin-when-cross-origin
+.. _"strict-origin-when-cross-origin": https://www.w3.org/TR/referrer-policy/#referrer-policy-strict-origin-when-cross-origin
+.. _"unsafe-url": https://www.w3.org/TR/referrer-policy/#referrer-policy-unsafe-url
+
+
UrlLengthMiddleware
-------------------
diff --git a/scrapy/settings/default_settings.py b/scrapy/settings/default_settings.py
index a5931a3d5..35d9844a7 100644
--- a/scrapy/settings/default_settings.py
+++ b/scrapy/settings/default_settings.py
@@ -234,6 +234,7 @@ REDIRECT_MAX_TIMES = 20 # uses Firefox default setting
REDIRECT_PRIORITY_ADJUST = +2
REFERER_ENABLED = True
+REFERRER_POLICY = 'scrapy.spidermiddlewares.referer.DefaultReferrerPolicy'
RETRY_ENABLED = True
RETRY_TIMES = 2 # initial response + 2 retries = 3 requests
diff --git a/scrapy/spidermiddlewares/referer.py b/scrapy/spidermiddlewares/referer.py
index 6a8c46543..b444e34bb 100644
--- a/scrapy/spidermiddlewares/referer.py
+++ b/scrapy/spidermiddlewares/referer.py
@@ -2,22 +2,355 @@
RefererMiddleware: populates Request referer field, based on the Response which
originated it.
"""
+from six.moves.urllib.parse import urlparse
+import warnings
-from scrapy.http import Request
+from w3lib.url import safe_url_string
+
+from scrapy.http import Request, Response
from scrapy.exceptions import NotConfigured
+from scrapy import signals
+from scrapy.utils.python import to_native_str
+from scrapy.utils.httpobj import urlparse_cached
+from scrapy.utils.misc import load_object
+from scrapy.utils.url import strip_url
+
+
+LOCAL_SCHEMES = ('about', 'blob', 'data', 'filesystem',)
+
+POLICY_NO_REFERRER = "no-referrer"
+POLICY_NO_REFERRER_WHEN_DOWNGRADE = "no-referrer-when-downgrade"
+POLICY_SAME_ORIGIN = "same-origin"
+POLICY_ORIGIN = "origin"
+POLICY_STRICT_ORIGIN = "strict-origin"
+POLICY_ORIGIN_WHEN_CROSS_ORIGIN = "origin-when-cross-origin"
+POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN = "strict-origin-when-cross-origin"
+POLICY_UNSAFE_URL = "unsafe-url"
+POLICY_SCRAPY_DEFAULT = "scrapy-default"
+
+
+class ReferrerPolicy(object):
+
+ NOREFERRER_SCHEMES = LOCAL_SCHEMES
+
+ def referrer(self, response_url, request_url):
+ raise NotImplementedError()
+
+ def stripped_referrer(self, url):
+ if urlparse(url).scheme not in self.NOREFERRER_SCHEMES:
+ return self.strip_url(url)
+
+ def origin_referrer(self, url):
+ if urlparse(url).scheme not in self.NOREFERRER_SCHEMES:
+ return self.origin(url)
+
+ def strip_url(self, url, origin_only=False):
+ """
+ https://www.w3.org/TR/referrer-policy/#strip-url
+
+ If url is null, return no referrer.
+ If url's scheme is a local scheme, then return no referrer.
+ Set url's username to the empty string.
+ Set url's password to null.
+ Set url's fragment to null.
+ If the origin-only flag is true, then:
+ Set url's path to null.
+ Set url's query to null.
+ Return url.
+ """
+ if not url:
+ return None
+ return strip_url(url,
+ strip_credentials=True,
+ strip_fragment=True,
+ strip_default_port=True,
+ origin_only=origin_only)
+
+ def origin(self, url):
+ """Return serialized origin (scheme, host, path) for a request or response URL."""
+ return self.strip_url(url, origin_only=True)
+
+ def potentially_trustworthy(self, url):
+ # Note: this does not follow https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy
+ parsed_url = urlparse(url)
+ if parsed_url.scheme in ('data',):
+ return False
+ return self.tls_protected(url)
+
+ def tls_protected(self, url):
+ return urlparse(url).scheme in ('https', 'ftps')
+
+
+class NoReferrerPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-no-referrer
+
+ The simplest policy is "no-referrer", which specifies that no referrer information
+ is to be sent along with requests made from a particular request client to any origin.
+ The header will be omitted entirely.
+ """
+ name = POLICY_NO_REFERRER
+
+ def referrer(self, response_url, request_url):
+ return None
+
+
+class NoReferrerWhenDowngradePolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-no-referrer-when-downgrade
+
+ The "no-referrer-when-downgrade" policy sends a full URL along with requests
+ from a TLS-protected environment settings object to a potentially trustworthy URL,
+ and requests from clients which are not TLS-protected to any origin.
+
+ Requests from TLS-protected clients to non-potentially trustworthy URLs,
+ on the other hand, will contain no referrer information.
+ A Referer HTTP header will not be sent.
+
+ This is a user agent's default behavior, if no policy is otherwise specified.
+ """
+ name = POLICY_NO_REFERRER_WHEN_DOWNGRADE
+
+ def referrer(self, response_url, request_url):
+ if not self.tls_protected(response_url) or self.tls_protected(request_url):
+ return self.stripped_referrer(response_url)
+
+
+class SameOriginPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-same-origin
+
+ The "same-origin" policy specifies that a full URL, stripped for use as a referrer,
+ is sent as referrer information when making same-origin requests from a particular request client.
+
+ Cross-origin requests, on the other hand, will contain no referrer information.
+ A Referer HTTP header will not be sent.
+ """
+ name = POLICY_SAME_ORIGIN
+
+ def referrer(self, response_url, request_url):
+ if self.origin(response_url) == self.origin(request_url):
+ return self.stripped_referrer(response_url)
+
+
+class OriginPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-origin
+
+ The "origin" policy specifies that only the ASCII serialization
+ of the origin of the request client is sent as referrer information
+ when making both same-origin requests and cross-origin requests
+ from a particular request client.
+ """
+ name = POLICY_ORIGIN
+
+ def referrer(self, response_url, request_url):
+ return self.origin_referrer(response_url)
+
+
+class StrictOriginPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-strict-origin
+
+ The "strict-origin" policy sends the ASCII serialization
+ of the origin of the request client when making requests:
+ - from a TLS-protected environment settings object to a potentially trustworthy URL, and
+ - from non-TLS-protected environment settings objects to any origin.
+
+ Requests from TLS-protected request clients to non- potentially trustworthy URLs,
+ on the other hand, will contain no referrer information.
+ A Referer HTTP header will not be sent.
+ """
+ name = POLICY_STRICT_ORIGIN
+
+ def referrer(self, response_url, request_url):
+ if ((self.tls_protected(response_url) and
+ self.potentially_trustworthy(request_url))
+ or not self.tls_protected(response_url)):
+ return self.origin_referrer(response_url)
+
+
+class OriginWhenCrossOriginPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-origin-when-cross-origin
+
+ The "origin-when-cross-origin" policy specifies that a full URL,
+ stripped for use as a referrer, is sent as referrer information
+ when making same-origin requests from a particular request client,
+ and only the ASCII serialization of the origin of the request client
+ is sent as referrer information when making cross-origin requests
+ from a particular request client.
+ """
+ name = POLICY_ORIGIN_WHEN_CROSS_ORIGIN
+
+ def referrer(self, response_url, request_url):
+ origin = self.origin(response_url)
+ if origin == self.origin(request_url):
+ return self.stripped_referrer(response_url)
+ else:
+ return origin
+
+
+class StrictOriginWhenCrossOriginPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-strict-origin-when-cross-origin
+
+ The "strict-origin-when-cross-origin" policy specifies that a full URL,
+ stripped for use as a referrer, is sent as referrer information
+ when making same-origin requests from a particular request client,
+ and only the ASCII serialization of the origin of the request client
+ when making cross-origin requests:
+
+ - from a TLS-protected environment settings object to a potentially trustworthy URL, and
+ - from non-TLS-protected environment settings objects to any origin.
+
+ Requests from TLS-protected clients to non- potentially trustworthy URLs,
+ on the other hand, will contain no referrer information.
+ A Referer HTTP header will not be sent.
+ """
+ name = POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN
+
+ def referrer(self, response_url, request_url):
+ origin = self.origin(response_url)
+ if origin == self.origin(request_url):
+ return self.stripped_referrer(response_url)
+ elif ((self.tls_protected(response_url) and
+ self.potentially_trustworthy(request_url))
+ or not self.tls_protected(response_url)):
+ return self.origin_referrer(response_url)
+
+
+class UnsafeUrlPolicy(ReferrerPolicy):
+ """
+ https://www.w3.org/TR/referrer-policy/#referrer-policy-unsafe-url
+
+ The "unsafe-url" policy specifies that a full URL, stripped for use as a referrer,
+ is sent along with both cross-origin requests
+ and same-origin requests made from a particular request client.
+
+ Note: The policy's name doesn't lie; it is unsafe.
+ This policy will leak origins and paths from TLS-protected resources
+ to insecure origins.
+ Carefully consider the impact of setting such a policy for potentially sensitive documents.
+ """
+ name = POLICY_UNSAFE_URL
+
+ def referrer(self, response_url, request_url):
+ return self.stripped_referrer(response_url)
+
+
+class DefaultReferrerPolicy(NoReferrerWhenDowngradePolicy):
+ """
+ A variant of "no-referrer-when-downgrade",
+ with the addition that "Referer" is not sent if the parent request was
+ using ``file://`` or ``s3://`` scheme.
+ """
+ NOREFERRER_SCHEMES = LOCAL_SCHEMES + ('file', 's3')
+ name = POLICY_SCRAPY_DEFAULT
+
+
+_policy_classes = {p.name: p for p in (
+ NoReferrerPolicy,
+ NoReferrerWhenDowngradePolicy,
+ SameOriginPolicy,
+ OriginPolicy,
+ StrictOriginPolicy,
+ OriginWhenCrossOriginPolicy,
+ StrictOriginWhenCrossOriginPolicy,
+ UnsafeUrlPolicy,
+ DefaultReferrerPolicy,
+)}
+
+
+def _load_policy_class(policy, warning_only=False):
+ """
+ Expect a string for the path to the policy class,
+ otherwise try to interpret the string as a standard value
+ from https://www.w3.org/TR/referrer-policy/#referrer-policies
+ """
+ try:
+ return load_object(policy)
+ except ValueError:
+ try:
+ return _policy_classes[policy.lower()]
+ except KeyError:
+ msg = "Could not load referrer policy %r" % policy
+ if not warning_only:
+ raise RuntimeError(msg)
+ else:
+ warnings.warn(msg, RuntimeWarning)
+ return None
+
class RefererMiddleware(object):
+ def __init__(self, settings=None):
+ self.default_policy = DefaultReferrerPolicy
+ if settings is not None:
+ self.default_policy = _load_policy_class(
+ settings.get('REFERRER_POLICY'))
+
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('REFERER_ENABLED'):
raise NotConfigured
- return cls()
+ mw = cls(crawler.settings)
+
+ # Note: this hook is a bit of a hack to intercept redirections
+ crawler.signals.connect(mw.request_scheduled, signal=signals.request_scheduled)
+
+ return mw
+
+ def policy(self, resp_or_url, request):
+ """
+ Determine Referrer-Policy to use from a parent Response (or URL),
+ and a Request to be sent.
+
+ - if a valid policy is set in Request meta, it is used.
+ - if the policy is set in meta but is wrong (e.g. a typo error),
+ the policy from settings is used
+ - if the policy is not set in Request meta,
+ but there is a Referrer-policy header in the parent response,
+ it is used if valid
+ - otherwise, the policy from settings is used.
+ """
+ policy_name = request.meta.get('referrer_policy')
+ if policy_name is None:
+ if isinstance(resp_or_url, Response):
+ policy_name = to_native_str(
+ resp_or_url.headers.get('Referrer-Policy', '').decode('latin1'))
+ if policy_name is None:
+ return self.default_policy()
+
+ cls = _load_policy_class(policy_name, warning_only=True)
+ return cls() if cls else self.default_policy()
def process_spider_output(self, response, result, spider):
def _set_referer(r):
if isinstance(r, Request):
- r.headers.setdefault('Referer', response.url)
+ referrer = self.policy(response, r).referrer(response.url, r.url)
+ if referrer is not None:
+ r.headers.setdefault('Referer', referrer)
return r
return (_set_referer(r) for r in result or ())
+ def request_scheduled(self, request, spider):
+ # check redirected request to patch "Referer" header if necessary
+ redirected_urls = request.meta.get('redirect_urls', [])
+ if redirected_urls:
+ request_referrer = request.headers.get('Referer')
+ # we don't patch the referrer value if there is none
+ if request_referrer is not None:
+ # the request's referrer header value acts as a surrogate
+ # for the parent response URL
+ #
+ # Note: if the 3xx response contained a Referrer-Policy header,
+ # the information is not available using this hook
+ parent_url = safe_url_string(request_referrer)
+ policy_referrer = self.policy(parent_url, request).referrer(
+ parent_url, request.url)
+ if policy_referrer != request_referrer:
+ if policy_referrer is None:
+ request.headers.pop('Referer')
+ else:
+ request.headers['Referer'] = policy_referrer
diff --git a/scrapy/utils/url.py b/scrapy/utils/url.py
index dc1cce4ac..8eed31060 100644
--- a/scrapy/utils/url.py
+++ b/scrapy/utils/url.py
@@ -7,7 +7,7 @@ to the w3lib.url module. Always import those from there instead.
"""
import posixpath
import re
-from six.moves.urllib.parse import (ParseResult, urldefrag, urlparse)
+from six.moves.urllib.parse import (ParseResult, urldefrag, urlparse, urlunparse)
# scrapy.utils.url was moved to w3lib.url and import * ensures this
# move doesn't break old code
@@ -103,3 +103,34 @@ def guess_scheme(url):
return any_to_uri(url)
else:
return add_http_if_no_scheme(url)
+
+
+def strip_url(url, strip_credentials=True, strip_default_port=True, origin_only=False, strip_fragment=True):
+
+ """Strip URL string from some of its components:
+
+ - `strip_credentials` removes "user:password@"
+ - `strip_default_port` removes ":80" (resp. ":443", ":21")
+ from http:// (resp. https://, ftp://) URLs
+ - `origin_only` replaces path component with "/", also dropping
+ query and fragment components ; it also strips credentials
+ - `strip_fragment` drops any #fragment component
+ """
+
+ parsed_url = urlparse(url)
+ netloc = parsed_url.netloc
+ if (strip_credentials or origin_only) and (parsed_url.username or parsed_url.password):
+ netloc = netloc.split('@')[-1]
+ if strip_default_port and parsed_url.port:
+ if (parsed_url.scheme, parsed_url.port) in (('http', 80),
+ ('https', 443),
+ ('ftp', 21)):
+ netloc = netloc.replace(':{p.port}'.format(p=parsed_url), '')
+ return urlunparse((
+ parsed_url.scheme,
+ netloc,
+ '/' if origin_only else parsed_url.path,
+ '' if origin_only else parsed_url.params,
+ '' if origin_only else parsed_url.query,
+ '' if strip_fragment else parsed_url.fragment
+ ))
| Scrapy uses `file://` URL as referer
If I use a local file in a request (e.g.
``` python
def start_requests(self):
yield Request(path_to_file_uri(filename))
def parse(self, response):
for url in json.loads(response.text):
yield Request(url)
```
and then make a HTTP request in the callback, scrapy uses the file URL as the `referer` in the HTTP header.
I think `file://` URL should be avoided from being used as the `referer` value (or generally any protocol other than http/s) because
1. it is not part of the normal hyperlink navigation
2. it exposes the local file structure to the remote site
| scrapy/scrapy | diff --git a/tests/test_spidermiddleware_referer.py b/tests/test_spidermiddleware_referer.py
index bd7673efb..b1c815876 100644
--- a/tests/test_spidermiddleware_referer.py
+++ b/tests/test_spidermiddleware_referer.py
@@ -1,21 +1,867 @@
+from six.moves.urllib.parse import urlparse
from unittest import TestCase
+import warnings
+from scrapy.exceptions import NotConfigured
from scrapy.http import Response, Request
+from scrapy.settings import Settings
from scrapy.spiders import Spider
-from scrapy.spidermiddlewares.referer import RefererMiddleware
+from scrapy.downloadermiddlewares.redirect import RedirectMiddleware
+from scrapy.spidermiddlewares.referer import RefererMiddleware, \
+ POLICY_NO_REFERRER, POLICY_NO_REFERRER_WHEN_DOWNGRADE, \
+ POLICY_SAME_ORIGIN, POLICY_ORIGIN, POLICY_ORIGIN_WHEN_CROSS_ORIGIN, \
+ POLICY_SCRAPY_DEFAULT, POLICY_UNSAFE_URL, \
+ POLICY_STRICT_ORIGIN, POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN, \
+ DefaultReferrerPolicy, \
+ NoReferrerPolicy, NoReferrerWhenDowngradePolicy, \
+ OriginWhenCrossOriginPolicy, OriginPolicy, \
+ StrictOriginWhenCrossOriginPolicy, StrictOriginPolicy, \
+ SameOriginPolicy, UnsafeUrlPolicy, ReferrerPolicy
class TestRefererMiddleware(TestCase):
+ req_meta = {}
+ resp_headers = {}
+ settings = {}
+ scenarii = [
+ ('http://scrapytest.org', 'http://scrapytest.org/', b'http://scrapytest.org'),
+ ]
+
def setUp(self):
self.spider = Spider('foo')
- self.mw = RefererMiddleware()
+ settings = Settings(self.settings)
+ self.mw = RefererMiddleware(settings)
+
+ def get_request(self, target):
+ return Request(target, meta=self.req_meta)
+
+ def get_response(self, origin):
+ return Response(origin, headers=self.resp_headers)
+
+ def test(self):
+
+ for origin, target, referrer in self.scenarii:
+ response = self.get_response(origin)
+ request = self.get_request(target)
+ out = list(self.mw.process_spider_output(response, [request], self.spider))
+ self.assertEquals(out[0].headers.get('Referer'), referrer)
+
+
+class MixinDefault(object):
+ """
+ Based on https://www.w3.org/TR/referrer-policy/#referrer-policy-no-referrer-when-downgrade
+
+ with some additional filtering of s3://
+ """
+ scenarii = [
+ ('https://example.com/', 'https://scrapy.org/', b'https://example.com/'),
+ ('http://example.com/', 'http://scrapy.org/', b'http://example.com/'),
+ ('http://example.com/', 'https://scrapy.org/', b'http://example.com/'),
+ ('https://example.com/', 'http://scrapy.org/', None),
+
+ # no credentials leak
+ ('http://user:[email protected]/', 'https://scrapy.org/', b'http://example.com/'),
+
+ # no referrer leak for local schemes
+ ('file:///home/path/to/somefile.html', 'https://scrapy.org/', None),
+ ('file:///home/path/to/somefile.html', 'http://scrapy.org/', None),
+
+ # no referrer leak for s3 origins
+ ('s3://mybucket/path/to/data.csv', 'https://scrapy.org/', None),
+ ('s3://mybucket/path/to/data.csv', 'http://scrapy.org/', None),
+ ]
+
+
+class MixinNoReferrer(object):
+ scenarii = [
+ ('https://example.com/page.html', 'https://example.com/', None),
+ ('http://www.example.com/', 'https://scrapy.org/', None),
+ ('http://www.example.com/', 'http://scrapy.org/', None),
+ ('https://www.example.com/', 'http://scrapy.org/', None),
+ ('file:///home/path/to/somefile.html', 'http://scrapy.org/', None),
+ ]
+
+
+class MixinNoReferrerWhenDowngrade(object):
+ scenarii = [
+ # TLS to TLS: send non-empty referrer
+ ('https://example.com/page.html', 'https://not.example.com/', b'https://example.com/page.html'),
+ ('https://example.com/page.html', 'https://scrapy.org/', b'https://example.com/page.html'),
+ ('https://example.com:443/page.html', 'https://scrapy.org/', b'https://example.com/page.html'),
+ ('https://example.com:444/page.html', 'https://scrapy.org/', b'https://example.com:444/page.html'),
+ ('ftps://example.com/urls.zip', 'https://scrapy.org/', b'ftps://example.com/urls.zip'),
+
+ # TLS to non-TLS: do not send referrer
+ ('https://example.com/page.html', 'http://not.example.com/', None),
+ ('https://example.com/page.html', 'http://scrapy.org/', None),
+ ('ftps://example.com/urls.zip', 'http://scrapy.org/', None),
+
+ # non-TLS to TLS or non-TLS: send referrer
+ ('http://example.com/page.html', 'https://not.example.com/', b'http://example.com/page.html'),
+ ('http://example.com/page.html', 'https://scrapy.org/', b'http://example.com/page.html'),
+ ('http://example.com:8080/page.html', 'https://scrapy.org/', b'http://example.com:8080/page.html'),
+ ('http://example.com:80/page.html', 'http://not.example.com/', b'http://example.com/page.html'),
+ ('http://example.com/page.html', 'http://scrapy.org/', b'http://example.com/page.html'),
+ ('http://example.com:443/page.html', 'http://scrapy.org/', b'http://example.com:443/page.html'),
+ ('ftp://example.com/urls.zip', 'http://scrapy.org/', b'ftp://example.com/urls.zip'),
+ ('ftp://example.com/urls.zip', 'https://scrapy.org/', b'ftp://example.com/urls.zip'),
+
+ # test for user/password stripping
+ ('http://user:[email protected]/page.html', 'https://not.example.com/', b'http://example.com/page.html'),
+ ]
+
+
+class MixinSameOrigin(object):
+ scenarii = [
+ # Same origin (protocol, host, port): send referrer
+ ('https://example.com/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('https://example.com:443/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com:80/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com:80/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com:8888/page.html', 'http://example.com:8888/not-page.html', b'http://example.com:8888/page.html'),
+
+ # Different host: do NOT send referrer
+ ('https://example.com/page.html', 'https://not.example.com/otherpage.html', None),
+ ('http://example.com/page.html', 'http://not.example.com/otherpage.html', None),
+ ('http://example.com/page.html', 'http://www.example.com/otherpage.html', None),
+
+ # Different port: do NOT send referrer
+ ('https://example.com:444/page.html', 'https://example.com/not-page.html', None),
+ ('http://example.com:81/page.html', 'http://example.com/not-page.html', None),
+ ('http://example.com/page.html', 'http://example.com:81/not-page.html', None),
+
+ # Different protocols: do NOT send refferer
+ ('https://example.com/page.html', 'http://example.com/not-page.html', None),
+ ('https://example.com/page.html', 'http://not.example.com/', None),
+ ('ftps://example.com/urls.zip', 'https://example.com/not-page.html', None),
+ ('ftp://example.com/urls.zip', 'http://example.com/not-page.html', None),
+ ('ftps://example.com/urls.zip', 'https://example.com/not-page.html', None),
+
+ # test for user/password stripping
+ ('https://user:[email protected]/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('https://user:[email protected]/page.html', 'http://example.com/not-page.html', None),
+ ]
+
+
+class MixinOrigin(object):
+ scenarii = [
+ # TLS or non-TLS to TLS or non-TLS: referrer origin is sent (yes, even for downgrades)
+ ('https://example.com/page.html', 'https://example.com/not-page.html', b'https://example.com/'),
+ ('https://example.com/page.html', 'https://scrapy.org', b'https://example.com/'),
+ ('https://example.com/page.html', 'http://scrapy.org', b'https://example.com/'),
+ ('http://example.com/page.html', 'http://scrapy.org', b'http://example.com/'),
+
+ # test for user/password stripping
+ ('https://user:[email protected]/page.html', 'http://scrapy.org', b'https://example.com/'),
+ ]
+
+
+class MixinStrictOrigin(object):
+ scenarii = [
+ # TLS or non-TLS to TLS or non-TLS: referrer origin is sent but not for downgrades
+ ('https://example.com/page.html', 'https://example.com/not-page.html', b'https://example.com/'),
+ ('https://example.com/page.html', 'https://scrapy.org', b'https://example.com/'),
+ ('http://example.com/page.html', 'http://scrapy.org', b'http://example.com/'),
+
+ # downgrade: send nothing
+ ('https://example.com/page.html', 'http://scrapy.org', None),
+
+ # upgrade: send origin
+ ('http://example.com/page.html', 'https://scrapy.org', b'http://example.com/'),
+
+ # test for user/password stripping
+ ('https://user:[email protected]/page.html', 'https://scrapy.org', b'https://example.com/'),
+ ('https://user:[email protected]/page.html', 'http://scrapy.org', None),
+ ]
+
+
+class MixinOriginWhenCrossOrigin(object):
+ scenarii = [
+ # Same origin (protocol, host, port): send referrer
+ ('https://example.com/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('https://example.com:443/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com:80/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com:80/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com:8888/page.html', 'http://example.com:8888/not-page.html', b'http://example.com:8888/page.html'),
+
+ # Different host: send origin as referrer
+ ('https://example2.com/page.html', 'https://scrapy.org/otherpage.html', b'https://example2.com/'),
+ ('https://example2.com/page.html', 'https://not.example2.com/otherpage.html', b'https://example2.com/'),
+ ('http://example2.com/page.html', 'http://not.example2.com/otherpage.html', b'http://example2.com/'),
+ # exact match required
+ ('http://example2.com/page.html', 'http://www.example2.com/otherpage.html', b'http://example2.com/'),
+
+ # Different port: send origin as referrer
+ ('https://example3.com:444/page.html', 'https://example3.com/not-page.html', b'https://example3.com:444/'),
+ ('http://example3.com:81/page.html', 'http://example3.com/not-page.html', b'http://example3.com:81/'),
+
+ # Different protocols: send origin as referrer
+ ('https://example4.com/page.html', 'http://example4.com/not-page.html', b'https://example4.com/'),
+ ('https://example4.com/page.html', 'http://not.example4.com/', b'https://example4.com/'),
+ ('ftps://example4.com/urls.zip', 'https://example4.com/not-page.html', b'ftps://example4.com/'),
+ ('ftp://example4.com/urls.zip', 'http://example4.com/not-page.html', b'ftp://example4.com/'),
+ ('ftps://example4.com/urls.zip', 'https://example4.com/not-page.html', b'ftps://example4.com/'),
+
+ # test for user/password stripping
+ ('https://user:[email protected]/page.html', 'https://example5.com/not-page.html', b'https://example5.com/page.html'),
+ # TLS to non-TLS downgrade: send origin
+ ('https://user:[email protected]/page.html', 'http://example5.com/not-page.html', b'https://example5.com/'),
+ ]
+
+
+class MixinStrictOriginWhenCrossOrigin(object):
+ scenarii = [
+ # Same origin (protocol, host, port): send referrer
+ ('https://example.com/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('https://example.com:443/page.html', 'https://example.com/not-page.html', b'https://example.com/page.html'),
+ ('http://example.com:80/page.html', 'http://example.com/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com/page.html', 'http://example.com:80/not-page.html', b'http://example.com/page.html'),
+ ('http://example.com:8888/page.html', 'http://example.com:8888/not-page.html', b'http://example.com:8888/page.html'),
+
+ # Different host: send origin as referrer
+ ('https://example2.com/page.html', 'https://scrapy.org/otherpage.html', b'https://example2.com/'),
+ ('https://example2.com/page.html', 'https://not.example2.com/otherpage.html', b'https://example2.com/'),
+ ('http://example2.com/page.html', 'http://not.example2.com/otherpage.html', b'http://example2.com/'),
+ # exact match required
+ ('http://example2.com/page.html', 'http://www.example2.com/otherpage.html', b'http://example2.com/'),
+
+ # Different port: send origin as referrer
+ ('https://example3.com:444/page.html', 'https://example3.com/not-page.html', b'https://example3.com:444/'),
+ ('http://example3.com:81/page.html', 'http://example3.com/not-page.html', b'http://example3.com:81/'),
+
+ # downgrade
+ ('https://example4.com/page.html', 'http://example4.com/not-page.html', None),
+ ('https://example4.com/page.html', 'http://not.example4.com/', None),
+
+ # non-TLS to non-TLS
+ ('ftp://example4.com/urls.zip', 'http://example4.com/not-page.html', b'ftp://example4.com/'),
+
+ # upgrade
+ ('http://example4.com/page.html', 'https://example4.com/not-page.html', b'http://example4.com/'),
+ ('http://example4.com/page.html', 'https://not.example4.com/', b'http://example4.com/'),
+
+ # Different protocols: send origin as referrer
+ ('ftps://example4.com/urls.zip', 'https://example4.com/not-page.html', b'ftps://example4.com/'),
+ ('ftps://example4.com/urls.zip', 'https://example4.com/not-page.html', b'ftps://example4.com/'),
+
+ # test for user/password stripping
+ ('https://user:[email protected]/page.html', 'https://example5.com/not-page.html', b'https://example5.com/page.html'),
+
+ # TLS to non-TLS downgrade: send nothing
+ ('https://user:[email protected]/page.html', 'http://example5.com/not-page.html', None),
+ ]
+
+
+class MixinUnsafeUrl(object):
+ scenarii = [
+ # TLS to TLS: send referrer
+ ('https://example.com/sekrit.html', 'http://not.example.com/', b'https://example.com/sekrit.html'),
+ ('https://example1.com/page.html', 'https://not.example1.com/', b'https://example1.com/page.html'),
+ ('https://example1.com/page.html', 'https://scrapy.org/', b'https://example1.com/page.html'),
+ ('https://example1.com:443/page.html', 'https://scrapy.org/', b'https://example1.com/page.html'),
+ ('https://example1.com:444/page.html', 'https://scrapy.org/', b'https://example1.com:444/page.html'),
+ ('ftps://example1.com/urls.zip', 'https://scrapy.org/', b'ftps://example1.com/urls.zip'),
+
+ # TLS to non-TLS: send referrer (yes, it's unsafe)
+ ('https://example2.com/page.html', 'http://not.example2.com/', b'https://example2.com/page.html'),
+ ('https://example2.com/page.html', 'http://scrapy.org/', b'https://example2.com/page.html'),
+ ('ftps://example2.com/urls.zip', 'http://scrapy.org/', b'ftps://example2.com/urls.zip'),
+
+ # non-TLS to TLS or non-TLS: send referrer (yes, it's unsafe)
+ ('http://example3.com/page.html', 'https://not.example3.com/', b'http://example3.com/page.html'),
+ ('http://example3.com/page.html', 'https://scrapy.org/', b'http://example3.com/page.html'),
+ ('http://example3.com:8080/page.html', 'https://scrapy.org/', b'http://example3.com:8080/page.html'),
+ ('http://example3.com:80/page.html', 'http://not.example3.com/', b'http://example3.com/page.html'),
+ ('http://example3.com/page.html', 'http://scrapy.org/', b'http://example3.com/page.html'),
+ ('http://example3.com:443/page.html', 'http://scrapy.org/', b'http://example3.com:443/page.html'),
+ ('ftp://example3.com/urls.zip', 'http://scrapy.org/', b'ftp://example3.com/urls.zip'),
+ ('ftp://example3.com/urls.zip', 'https://scrapy.org/', b'ftp://example3.com/urls.zip'),
+
+ # test for user/password stripping
+ ('http://user:[email protected]/page.html', 'https://not.example4.com/', b'http://example4.com/page.html'),
+ ('https://user:[email protected]/page.html', 'http://scrapy.org/', b'https://example4.com/page.html'),
+ ]
+
+
+class TestRefererMiddlewareDefault(MixinDefault, TestRefererMiddleware):
+ pass
+
+
+# --- Tests using settings to set policy using class path
+class TestSettingsNoReferrer(MixinNoReferrer, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.NoReferrerPolicy'}
+
+
+class TestSettingsNoReferrerWhenDowngrade(MixinNoReferrerWhenDowngrade, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.NoReferrerWhenDowngradePolicy'}
+
+
+class TestSettingsSameOrigin(MixinSameOrigin, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.SameOriginPolicy'}
+
+
+class TestSettingsOrigin(MixinOrigin, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.OriginPolicy'}
+
+
+class TestSettingsStrictOrigin(MixinStrictOrigin, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.StrictOriginPolicy'}
+
+
+class TestSettingsOriginWhenCrossOrigin(MixinOriginWhenCrossOrigin, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.OriginWhenCrossOriginPolicy'}
+
+
+class TestSettingsStrictOriginWhenCrossOrigin(MixinStrictOriginWhenCrossOrigin, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.StrictOriginWhenCrossOriginPolicy'}
+
+
+class TestSettingsUnsafeUrl(MixinUnsafeUrl, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.UnsafeUrlPolicy'}
+
+
+class CustomPythonOrgPolicy(ReferrerPolicy):
+ """
+ A dummy policy that returns referrer as http(s)://python.org
+ depending on the scheme of the target URL.
+ """
+ def referrer(self, response, request):
+ scheme = urlparse(request).scheme
+ if scheme == 'https':
+ return b'https://python.org/'
+ elif scheme == 'http':
+ return b'http://python.org/'
+
+
+class TestSettingsCustomPolicy(TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'tests.test_spidermiddleware_referer.CustomPythonOrgPolicy'}
+ scenarii = [
+ ('https://example.com/', 'https://scrapy.org/', b'https://python.org/'),
+ ('http://example.com/', 'http://scrapy.org/', b'http://python.org/'),
+ ('http://example.com/', 'https://scrapy.org/', b'https://python.org/'),
+ ('https://example.com/', 'http://scrapy.org/', b'http://python.org/'),
+ ('file:///home/path/to/somefile.html', 'https://scrapy.org/', b'https://python.org/'),
+ ('file:///home/path/to/somefile.html', 'http://scrapy.org/', b'http://python.org/'),
+
+ ]
+
+# --- Tests using Request meta dict to set policy
+class TestRequestMetaDefault(MixinDefault, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_SCRAPY_DEFAULT}
+
+
+class TestRequestMetaNoReferrer(MixinNoReferrer, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_NO_REFERRER}
+
+
+class TestRequestMetaNoReferrerWhenDowngrade(MixinNoReferrerWhenDowngrade, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_NO_REFERRER_WHEN_DOWNGRADE}
+
+
+class TestRequestMetaSameOrigin(MixinSameOrigin, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_SAME_ORIGIN}
+
+
+class TestRequestMetaOrigin(MixinOrigin, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_ORIGIN}
+
+
+class TestRequestMetaSrictOrigin(MixinStrictOrigin, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_STRICT_ORIGIN}
+
+
+class TestRequestMetaOriginWhenCrossOrigin(MixinOriginWhenCrossOrigin, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_ORIGIN_WHEN_CROSS_ORIGIN}
+
+
+class TestRequestMetaStrictOriginWhenCrossOrigin(MixinStrictOriginWhenCrossOrigin, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN}
+
+
+class TestRequestMetaUnsafeUrl(MixinUnsafeUrl, TestRefererMiddleware):
+ req_meta = {'referrer_policy': POLICY_UNSAFE_URL}
+
+
+class TestRequestMetaPredecence001(MixinUnsafeUrl, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.SameOriginPolicy'}
+ req_meta = {'referrer_policy': POLICY_UNSAFE_URL}
+
+
+class TestRequestMetaPredecence002(MixinNoReferrer, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.NoReferrerWhenDowngradePolicy'}
+ req_meta = {'referrer_policy': POLICY_NO_REFERRER}
+
+
+class TestRequestMetaPredecence003(MixinUnsafeUrl, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.OriginWhenCrossOriginPolicy'}
+ req_meta = {'referrer_policy': POLICY_UNSAFE_URL}
+
+
+class TestRequestMetaSettingFallback(TestCase):
+
+ params = [
+ (
+ # When an unknown policy is referenced in Request.meta
+ # (here, a typo error),
+ # the policy defined in settings takes precedence
+ {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.OriginWhenCrossOriginPolicy'},
+ {},
+ {'referrer_policy': 'ssscrapy-default'},
+ OriginWhenCrossOriginPolicy,
+ True
+ ),
+ (
+ # same as above but with string value for settings policy
+ {'REFERRER_POLICY': 'origin-when-cross-origin'},
+ {},
+ {'referrer_policy': 'ssscrapy-default'},
+ OriginWhenCrossOriginPolicy,
+ True
+ ),
+ (
+ # request meta references a wrong policy but it is set,
+ # so the Referrer-Policy header in response is not used,
+ # and the settings' policy is applied
+ {'REFERRER_POLICY': 'origin-when-cross-origin'},
+ {'Referrer-Policy': 'unsafe-url'},
+ {'referrer_policy': 'ssscrapy-default'},
+ OriginWhenCrossOriginPolicy,
+ True
+ ),
+ (
+ # here, request meta does not set the policy
+ # so response headers take precedence
+ {'REFERRER_POLICY': 'origin-when-cross-origin'},
+ {'Referrer-Policy': 'unsafe-url'},
+ {},
+ UnsafeUrlPolicy,
+ False
+ ),
+ (
+ # here, request meta does not set the policy,
+ # but response headers also use an unknown policy,
+ # so the settings' policy is used
+ {'REFERRER_POLICY': 'origin-when-cross-origin'},
+ {'Referrer-Policy': 'unknown'},
+ {},
+ OriginWhenCrossOriginPolicy,
+ True
+ )
+ ]
+
+ def test(self):
+
+ origin = 'http://www.scrapy.org'
+ target = 'http://www.example.com'
+
+ for settings, response_headers, request_meta, policy_class, check_warning in self.params[3:]:
+ spider = Spider('foo')
+ mw = RefererMiddleware(Settings(settings))
+
+ response = Response(origin, headers=response_headers)
+ request = Request(target, meta=request_meta)
+
+ with warnings.catch_warnings(record=True) as w:
+ policy = mw.policy(response, request)
+ self.assertIsInstance(policy, policy_class)
+
+ if check_warning:
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, RuntimeWarning, w[0].message)
+
+
+class TestSettingsPolicyByName(TestCase):
+
+ def test_valid_name(self):
+ for s, p in [
+ (POLICY_SCRAPY_DEFAULT, DefaultReferrerPolicy),
+ (POLICY_NO_REFERRER, NoReferrerPolicy),
+ (POLICY_NO_REFERRER_WHEN_DOWNGRADE, NoReferrerWhenDowngradePolicy),
+ (POLICY_SAME_ORIGIN, SameOriginPolicy),
+ (POLICY_ORIGIN, OriginPolicy),
+ (POLICY_STRICT_ORIGIN, StrictOriginPolicy),
+ (POLICY_ORIGIN_WHEN_CROSS_ORIGIN, OriginWhenCrossOriginPolicy),
+ (POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN, StrictOriginWhenCrossOriginPolicy),
+ (POLICY_UNSAFE_URL, UnsafeUrlPolicy),
+ ]:
+ settings = Settings({'REFERRER_POLICY': s})
+ mw = RefererMiddleware(settings)
+ self.assertEquals(mw.default_policy, p)
+
+ def test_valid_name_casevariants(self):
+ for s, p in [
+ (POLICY_SCRAPY_DEFAULT, DefaultReferrerPolicy),
+ (POLICY_NO_REFERRER, NoReferrerPolicy),
+ (POLICY_NO_REFERRER_WHEN_DOWNGRADE, NoReferrerWhenDowngradePolicy),
+ (POLICY_SAME_ORIGIN, SameOriginPolicy),
+ (POLICY_ORIGIN, OriginPolicy),
+ (POLICY_STRICT_ORIGIN, StrictOriginPolicy),
+ (POLICY_ORIGIN_WHEN_CROSS_ORIGIN, OriginWhenCrossOriginPolicy),
+ (POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN, StrictOriginWhenCrossOriginPolicy),
+ (POLICY_UNSAFE_URL, UnsafeUrlPolicy),
+ ]:
+ settings = Settings({'REFERRER_POLICY': s.upper()})
+ mw = RefererMiddleware(settings)
+ self.assertEquals(mw.default_policy, p)
+
+ def test_invalid_name(self):
+ settings = Settings({'REFERRER_POLICY': 'some-custom-unknown-policy'})
+ with self.assertRaises(RuntimeError):
+ mw = RefererMiddleware(settings)
+
+
+class TestPolicyHeaderPredecence001(MixinUnsafeUrl, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.SameOriginPolicy'}
+ resp_headers = {'Referrer-Policy': POLICY_UNSAFE_URL.upper()}
+
+class TestPolicyHeaderPredecence002(MixinNoReferrer, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.NoReferrerWhenDowngradePolicy'}
+ resp_headers = {'Referrer-Policy': POLICY_NO_REFERRER.swapcase()}
+
+class TestPolicyHeaderPredecence003(MixinNoReferrerWhenDowngrade, TestRefererMiddleware):
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.OriginWhenCrossOriginPolicy'}
+ resp_headers = {'Referrer-Policy': POLICY_NO_REFERRER_WHEN_DOWNGRADE.title()}
+
+
+class TestReferrerOnRedirect(TestRefererMiddleware):
+
+ settings = {'REFERRER_POLICY': 'scrapy.spidermiddlewares.referer.UnsafeUrlPolicy'}
+ scenarii = [
+ ( 'http://scrapytest.org/1', # parent
+ 'http://scrapytest.org/2', # target
+ (
+ # redirections: code, URL
+ (301, 'http://scrapytest.org/3'),
+ (301, 'http://scrapytest.org/4'),
+ ),
+ b'http://scrapytest.org/1', # expected initial referer
+ b'http://scrapytest.org/1', # expected referer for the redirection request
+ ),
+ ( 'https://scrapytest.org/1',
+ 'https://scrapytest.org/2',
+ (
+ # redirecting to non-secure URL
+ (301, 'http://scrapytest.org/3'),
+ ),
+ b'https://scrapytest.org/1',
+ b'https://scrapytest.org/1',
+ ),
+ ( 'https://scrapytest.org/1',
+ 'https://scrapytest.com/2',
+ (
+ # redirecting to non-secure URL: different origin
+ (301, 'http://scrapytest.com/3'),
+ ),
+ b'https://scrapytest.org/1',
+ b'https://scrapytest.org/1',
+ ),
+ ]
+
+ def setUp(self):
+ self.spider = Spider('foo')
+ settings = Settings(self.settings)
+ self.referrermw = RefererMiddleware(settings)
+ self.redirectmw = RedirectMiddleware(settings)
+
+ def test(self):
+
+ for parent, target, redirections, init_referrer, final_referrer in self.scenarii:
+ response = self.get_response(parent)
+ request = self.get_request(target)
+
+ out = list(self.referrermw.process_spider_output(response, [request], self.spider))
+ self.assertEquals(out[0].headers.get('Referer'), init_referrer)
+
+ for status, url in redirections:
+ response = Response(request.url, headers={'Location': url}, status=status)
+ request = self.redirectmw.process_response(request, response, self.spider)
+ self.referrermw.request_scheduled(request, self.spider)
+
+ assert isinstance(request, Request)
+ self.assertEquals(request.headers.get('Referer'), final_referrer)
+
+
+class TestReferrerOnRedirectNoReferrer(TestReferrerOnRedirect):
+ """
+ No Referrer policy never sets the "Referer" header.
+ HTTP redirections should not change that.
+ """
+ settings = {'REFERRER_POLICY': 'no-referrer'}
+ scenarii = [
+ ( 'http://scrapytest.org/1', # parent
+ 'http://scrapytest.org/2', # target
+ (
+ # redirections: code, URL
+ (301, 'http://scrapytest.org/3'),
+ (301, 'http://scrapytest.org/4'),
+ ),
+ None, # expected initial "Referer"
+ None, # expected "Referer" for the redirection request
+ ),
+ ( 'https://scrapytest.org/1',
+ 'https://scrapytest.org/2',
+ (
+ (301, 'http://scrapytest.org/3'),
+ ),
+ None,
+ None,
+ ),
+ ( 'https://scrapytest.org/1',
+ 'https://example.com/2', # different origin
+ (
+ (301, 'http://scrapytest.com/3'),
+ ),
+ None,
+ None,
+ ),
+ ]
+
+
+class TestReferrerOnRedirectSameOrigin(TestReferrerOnRedirect):
+ """
+ Same Origin policy sends the full URL as "Referer" if the target origin
+ is the same as the parent response (same protocol, same domain, same port).
+
+ HTTP redirections to a different domain or a lower secure level
+ should have the "Referer" removed.
+ """
+ settings = {'REFERRER_POLICY': 'same-origin'}
+ scenarii = [
+ ( 'http://scrapytest.org/101', # origin
+ 'http://scrapytest.org/102', # target
+ (
+ # redirections: code, URL
+ (301, 'http://scrapytest.org/103'),
+ (301, 'http://scrapytest.org/104'),
+ ),
+ b'http://scrapytest.org/101', # expected initial "Referer"
+ b'http://scrapytest.org/101', # expected referer for the redirection request
+ ),
+ ( 'https://scrapytest.org/201',
+ 'https://scrapytest.org/202',
+ (
+ # redirecting from secure to non-secure URL == different origin
+ (301, 'http://scrapytest.org/203'),
+ ),
+ b'https://scrapytest.org/201',
+ None,
+ ),
+ ( 'https://scrapytest.org/301',
+ 'https://scrapytest.org/302',
+ (
+ # different domain == different origin
+ (301, 'http://example.com/303'),
+ ),
+ b'https://scrapytest.org/301',
+ None,
+ ),
+ ]
+
+
+class TestReferrerOnRedirectStrictOrigin(TestReferrerOnRedirect):
+ """
+ Strict Origin policy will always send the "origin" as referrer
+ (think of it as the parent URL without the path part),
+ unless the security level is lower and no "Referer" is sent.
+
+ Redirections from secure to non-secure URLs should have the
+ "Referrer" header removed if necessary.
+ """
+ settings = {'REFERRER_POLICY': POLICY_STRICT_ORIGIN}
+ scenarii = [
+ ( 'http://scrapytest.org/101',
+ 'http://scrapytest.org/102',
+ (
+ (301, 'http://scrapytest.org/103'),
+ (301, 'http://scrapytest.org/104'),
+ ),
+ b'http://scrapytest.org/', # send origin
+ b'http://scrapytest.org/', # redirects to same origin: send origin
+ ),
+ ( 'https://scrapytest.org/201',
+ 'https://scrapytest.org/202',
+ (
+ # redirecting to non-secure URL: no referrer
+ (301, 'http://scrapytest.org/203'),
+ ),
+ b'https://scrapytest.org/',
+ None,
+ ),
+ ( 'https://scrapytest.org/301',
+ 'https://scrapytest.org/302',
+ (
+ # redirecting to non-secure URL (different domain): no referrer
+ (301, 'http://example.com/303'),
+ ),
+ b'https://scrapytest.org/',
+ None,
+ ),
+ ( 'http://scrapy.org/401',
+ 'http://example.com/402',
+ (
+ (301, 'http://scrapytest.org/403'),
+ ),
+ b'http://scrapy.org/',
+ b'http://scrapy.org/',
+ ),
+ ( 'https://scrapy.org/501',
+ 'https://example.com/502',
+ (
+ # HTTPS all along, so origin referrer is kept as-is
+ (301, 'https://google.com/503'),
+ (301, 'https://facebook.com/504'),
+ ),
+ b'https://scrapy.org/',
+ b'https://scrapy.org/',
+ ),
+ ( 'https://scrapytest.org/601',
+ 'http://scrapytest.org/602', # TLS to non-TLS: no referrer
+ (
+ (301, 'https://scrapytest.org/603'), # TLS URL again: (still) no referrer
+ ),
+ None,
+ None,
+ ),
+ ]
+
+
+class TestReferrerOnRedirectOriginWhenCrossOrigin(TestReferrerOnRedirect):
+ """
+ Origin When Cross-Origin policy sends the full URL as "Referer",
+ unless the target's origin is different (different domain, different protocol)
+ in which case only the origin is sent.
+
+ Redirections to a different origin should strip the "Referer"
+ to the parent origin.
+ """
+ settings = {'REFERRER_POLICY': POLICY_ORIGIN_WHEN_CROSS_ORIGIN}
+ scenarii = [
+ ( 'http://scrapytest.org/101', # origin
+ 'http://scrapytest.org/102', # target + redirection
+ (
+ # redirections: code, URL
+ (301, 'http://scrapytest.org/103'),
+ (301, 'http://scrapytest.org/104'),
+ ),
+ b'http://scrapytest.org/101', # expected initial referer
+ b'http://scrapytest.org/101', # expected referer for the redirection request
+ ),
+ ( 'https://scrapytest.org/201',
+ 'https://scrapytest.org/202',
+ (
+ # redirecting to non-secure URL: send origin
+ (301, 'http://scrapytest.org/203'),
+ ),
+ b'https://scrapytest.org/201',
+ b'https://scrapytest.org/',
+ ),
+ ( 'https://scrapytest.org/301',
+ 'https://scrapytest.org/302',
+ (
+ # redirecting to non-secure URL (different domain): send origin
+ (301, 'http://example.com/303'),
+ ),
+ b'https://scrapytest.org/301',
+ b'https://scrapytest.org/',
+ ),
+ ( 'http://scrapy.org/401',
+ 'http://example.com/402',
+ (
+ (301, 'http://scrapytest.org/403'),
+ ),
+ b'http://scrapy.org/',
+ b'http://scrapy.org/',
+ ),
+ ( 'https://scrapy.org/501',
+ 'https://example.com/502',
+ (
+ # all different domains: send origin
+ (301, 'https://google.com/503'),
+ (301, 'https://facebook.com/504'),
+ ),
+ b'https://scrapy.org/',
+ b'https://scrapy.org/',
+ ),
+ ( 'https://scrapytest.org/301',
+ 'http://scrapytest.org/302', # TLS to non-TLS: send origin
+ (
+ (301, 'https://scrapytest.org/303'), # TLS URL again: send origin (also)
+ ),
+ b'https://scrapytest.org/',
+ b'https://scrapytest.org/',
+ ),
+ ]
- def test_process_spider_output(self):
- res = Response('http://scrapytest.org')
- reqs = [Request('http://scrapytest.org/')]
- out = list(self.mw.process_spider_output(res, reqs, self.spider))
- self.assertEquals(out[0].headers.get('Referer'),
- b'http://scrapytest.org')
+class TestReferrerOnRedirectStrictOriginWhenCrossOrigin(TestReferrerOnRedirect):
+ """
+ Strict Origin When Cross-Origin policy sends the full URL as "Referer",
+ unless the target's origin is different (different domain, different protocol)
+ in which case only the origin is sent...
+ Unless there's also a downgrade in security and then the "Referer" header
+ is not sent.
+ Redirections to a different origin should strip the "Referer" to the parent origin,
+ and from https:// to http:// will remove the "Referer" header.
+ """
+ settings = {'REFERRER_POLICY': POLICY_STRICT_ORIGIN_WHEN_CROSS_ORIGIN}
+ scenarii = [
+ ( 'http://scrapytest.org/101', # origin
+ 'http://scrapytest.org/102', # target + redirection
+ (
+ # redirections: code, URL
+ (301, 'http://scrapytest.org/103'),
+ (301, 'http://scrapytest.org/104'),
+ ),
+ b'http://scrapytest.org/101', # expected initial referer
+ b'http://scrapytest.org/101', # expected referer for the redirection request
+ ),
+ ( 'https://scrapytest.org/201',
+ 'https://scrapytest.org/202',
+ (
+ # redirecting to non-secure URL: do not send the "Referer" header
+ (301, 'http://scrapytest.org/203'),
+ ),
+ b'https://scrapytest.org/201',
+ None,
+ ),
+ ( 'https://scrapytest.org/301',
+ 'https://scrapytest.org/302',
+ (
+ # redirecting to non-secure URL (different domain): send origin
+ (301, 'http://example.com/303'),
+ ),
+ b'https://scrapytest.org/301',
+ None,
+ ),
+ ( 'http://scrapy.org/401',
+ 'http://example.com/402',
+ (
+ (301, 'http://scrapytest.org/403'),
+ ),
+ b'http://scrapy.org/',
+ b'http://scrapy.org/',
+ ),
+ ( 'https://scrapy.org/501',
+ 'https://example.com/502',
+ (
+ # all different domains: send origin
+ (301, 'https://google.com/503'),
+ (301, 'https://facebook.com/504'),
+ ),
+ b'https://scrapy.org/',
+ b'https://scrapy.org/',
+ ),
+ ( 'https://scrapytest.org/601',
+ 'http://scrapytest.org/602', # TLS to non-TLS: do not send "Referer"
+ (
+ (301, 'https://scrapytest.org/603'), # TLS URL again: (still) send nothing
+ ),
+ None,
+ None,
+ ),
+ ]
diff --git a/tests/test_utils_url.py b/tests/test_utils_url.py
index f46d1d927..c2b9fc176 100644
--- a/tests/test_utils_url.py
+++ b/tests/test_utils_url.py
@@ -6,7 +6,8 @@ from six.moves.urllib.parse import urlparse
from scrapy.spiders import Spider
from scrapy.utils.url import (url_is_from_any_domain, url_is_from_spider,
- add_http_if_no_scheme, guess_scheme, parse_url)
+ add_http_if_no_scheme, guess_scheme,
+ parse_url, strip_url)
__doctests__ = ['scrapy.utils.url']
@@ -241,5 +242,171 @@ for k, args in enumerate ([
setattr (GuessSchemeTest, t_method.__name__, t_method)
+class StripUrl(unittest.TestCase):
+
+ def test_noop(self):
+ self.assertEqual(strip_url(
+ 'http://www.example.com/index.html'),
+ 'http://www.example.com/index.html')
+
+ def test_noop_query_string(self):
+ self.assertEqual(strip_url(
+ 'http://www.example.com/index.html?somekey=somevalue'),
+ 'http://www.example.com/index.html?somekey=somevalue')
+
+ def test_fragments(self):
+ self.assertEqual(strip_url(
+ 'http://www.example.com/index.html?somekey=somevalue#section', strip_fragment=False),
+ 'http://www.example.com/index.html?somekey=somevalue#section')
+
+ def test_path(self):
+ for input_url, origin, output_url in [
+ ('http://www.example.com/',
+ False,
+ 'http://www.example.com/'),
+
+ ('http://www.example.com',
+ False,
+ 'http://www.example.com'),
+
+ ('http://www.example.com',
+ True,
+ 'http://www.example.com/'),
+ ]:
+ self.assertEqual(strip_url(input_url, origin_only=origin), output_url)
+
+ def test_credentials(self):
+ for i, o in [
+ ('http://[email protected]/index.html?somekey=somevalue#section',
+ 'http://www.example.com/index.html?somekey=somevalue'),
+
+ ('https://username:@www.example.com/index.html?somekey=somevalue#section',
+ 'https://www.example.com/index.html?somekey=somevalue'),
+
+ ('ftp://username:[email protected]/index.html?somekey=somevalue#section',
+ 'ftp://www.example.com/index.html?somekey=somevalue'),
+ ]:
+ self.assertEqual(strip_url(i, strip_credentials=True), o)
+
+ def test_credentials_encoded_delims(self):
+ for i, o in [
+ # user: "username@"
+ # password: none
+ ('http://username%[email protected]/index.html?somekey=somevalue#section',
+ 'http://www.example.com/index.html?somekey=somevalue'),
+
+ # user: "username:pass"
+ # password: ""
+ ('https://username%3Apass:@www.example.com/index.html?somekey=somevalue#section',
+ 'https://www.example.com/index.html?somekey=somevalue'),
+
+ # user: "me"
+ # password: "[email protected]"
+ ('ftp://me:user%[email protected]/index.html?somekey=somevalue#section',
+ 'ftp://www.example.com/index.html?somekey=somevalue'),
+ ]:
+ self.assertEqual(strip_url(i, strip_credentials=True), o)
+
+ def test_default_ports_creds_off(self):
+ for i, o in [
+ ('http://username:[email protected]:80/index.html?somekey=somevalue#section',
+ 'http://www.example.com/index.html?somekey=somevalue'),
+
+ ('http://username:[email protected]:8080/index.html#section',
+ 'http://www.example.com:8080/index.html'),
+
+ ('http://username:[email protected]:443/index.html?somekey=somevalue&someotherkey=sov#section',
+ 'http://www.example.com:443/index.html?somekey=somevalue&someotherkey=sov'),
+
+ ('https://username:[email protected]:443/index.html',
+ 'https://www.example.com/index.html'),
+
+ ('https://username:[email protected]:442/index.html',
+ 'https://www.example.com:442/index.html'),
+
+ ('https://username:[email protected]:80/index.html',
+ 'https://www.example.com:80/index.html'),
+
+ ('ftp://username:[email protected]:21/file.txt',
+ 'ftp://www.example.com/file.txt'),
+
+ ('ftp://username:[email protected]:221/file.txt',
+ 'ftp://www.example.com:221/file.txt'),
+ ]:
+ self.assertEqual(strip_url(i), o)
+
+ def test_default_ports(self):
+ for i, o in [
+ ('http://username:[email protected]:80/index.html',
+ 'http://username:[email protected]/index.html'),
+
+ ('http://username:[email protected]:8080/index.html',
+ 'http://username:[email protected]:8080/index.html'),
+
+ ('http://username:[email protected]:443/index.html',
+ 'http://username:[email protected]:443/index.html'),
+
+ ('https://username:[email protected]:443/index.html',
+ 'https://username:[email protected]/index.html'),
+
+ ('https://username:[email protected]:442/index.html',
+ 'https://username:[email protected]:442/index.html'),
+
+ ('https://username:[email protected]:80/index.html',
+ 'https://username:[email protected]:80/index.html'),
+
+ ('ftp://username:[email protected]:21/file.txt',
+ 'ftp://username:[email protected]/file.txt'),
+
+ ('ftp://username:[email protected]:221/file.txt',
+ 'ftp://username:[email protected]:221/file.txt'),
+ ]:
+ self.assertEqual(strip_url(i, strip_default_port=True, strip_credentials=False), o)
+
+ def test_default_ports_keep(self):
+ for i, o in [
+ ('http://username:[email protected]:80/index.html?somekey=somevalue&someotherkey=sov#section',
+ 'http://username:[email protected]:80/index.html?somekey=somevalue&someotherkey=sov'),
+
+ ('http://username:[email protected]:8080/index.html?somekey=somevalue&someotherkey=sov#section',
+ 'http://username:[email protected]:8080/index.html?somekey=somevalue&someotherkey=sov'),
+
+ ('http://username:[email protected]:443/index.html',
+ 'http://username:[email protected]:443/index.html'),
+
+ ('https://username:[email protected]:443/index.html',
+ 'https://username:[email protected]:443/index.html'),
+
+ ('https://username:[email protected]:442/index.html',
+ 'https://username:[email protected]:442/index.html'),
+
+ ('https://username:[email protected]:80/index.html',
+ 'https://username:[email protected]:80/index.html'),
+
+ ('ftp://username:[email protected]:21/file.txt',
+ 'ftp://username:[email protected]:21/file.txt'),
+
+ ('ftp://username:[email protected]:221/file.txt',
+ 'ftp://username:[email protected]:221/file.txt'),
+ ]:
+ self.assertEqual(strip_url(i, strip_default_port=False, strip_credentials=False), o)
+
+ def test_origin_only(self):
+ for i, o in [
+ ('http://username:[email protected]/index.html',
+ 'http://www.example.com/'),
+
+ ('http://username:[email protected]:80/foo/bar?query=value#somefrag',
+ 'http://www.example.com/'),
+
+ ('http://username:[email protected]:8008/foo/bar?query=value#somefrag',
+ 'http://www.example.com:8008/'),
+
+ ('https://username:[email protected]:443/index.html',
+ 'https://www.example.com/'),
+ ]:
+ self.assertEqual(strip_url(i, origin_only=True), o)
+
+
if __name__ == "__main__":
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 5
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@7b49b9c0f53396ac89cbd74930bc4c6e41d41901#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_spidermiddleware_referer.py::TestRefererMiddleware::test",
"tests/test_spidermiddleware_referer.py::TestRefererMiddlewareDefault::test",
"tests/test_spidermiddleware_referer.py::TestSettingsNoReferrer::test",
"tests/test_spidermiddleware_referer.py::TestSettingsNoReferrerWhenDowngrade::test",
"tests/test_spidermiddleware_referer.py::TestSettingsSameOrigin::test",
"tests/test_spidermiddleware_referer.py::TestSettingsOrigin::test",
"tests/test_spidermiddleware_referer.py::TestSettingsStrictOrigin::test",
"tests/test_spidermiddleware_referer.py::TestSettingsOriginWhenCrossOrigin::test",
"tests/test_spidermiddleware_referer.py::TestSettingsStrictOriginWhenCrossOrigin::test",
"tests/test_spidermiddleware_referer.py::TestSettingsUnsafeUrl::test",
"tests/test_spidermiddleware_referer.py::TestSettingsCustomPolicy::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaDefault::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaNoReferrer::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaNoReferrerWhenDowngrade::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaSameOrigin::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaOrigin::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaSrictOrigin::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaOriginWhenCrossOrigin::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaStrictOriginWhenCrossOrigin::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaUnsafeUrl::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaPredecence001::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaPredecence002::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaPredecence003::test",
"tests/test_spidermiddleware_referer.py::TestRequestMetaSettingFallback::test",
"tests/test_spidermiddleware_referer.py::TestSettingsPolicyByName::test_invalid_name",
"tests/test_spidermiddleware_referer.py::TestSettingsPolicyByName::test_valid_name",
"tests/test_spidermiddleware_referer.py::TestSettingsPolicyByName::test_valid_name_casevariants",
"tests/test_spidermiddleware_referer.py::TestPolicyHeaderPredecence001::test",
"tests/test_spidermiddleware_referer.py::TestPolicyHeaderPredecence002::test",
"tests/test_spidermiddleware_referer.py::TestPolicyHeaderPredecence003::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirect::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirectNoReferrer::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirectSameOrigin::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirectStrictOrigin::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirectOriginWhenCrossOrigin::test",
"tests/test_spidermiddleware_referer.py::TestReferrerOnRedirectStrictOriginWhenCrossOrigin::test",
"tests/test_utils_url.py::UrlUtilsTest::test_url_is_from_any_domain",
"tests/test_utils_url.py::UrlUtilsTest::test_url_is_from_spider",
"tests/test_utils_url.py::UrlUtilsTest::test_url_is_from_spider_class_attributes",
"tests/test_utils_url.py::UrlUtilsTest::test_url_is_from_spider_with_allowed_domains",
"tests/test_utils_url.py::UrlUtilsTest::test_url_is_from_spider_with_allowed_domains_class_attributes",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_add_scheme",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_complete_url",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_fragment",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_path",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_port",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_ftp",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_complete_url",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_fragment",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_path",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_port",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_query",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_username_password",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_http_without_subdomain",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_preserve_https",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_complete_url",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_fragment",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_path",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_port",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_query",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_username_password",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_protocol_relative_without_subdomain",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_query",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_username_password",
"tests/test_utils_url.py::AddHttpIfNoScheme::test_without_subdomain",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_001",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_002",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_003",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_004",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_005",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_006",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_007",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_008",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_009",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_010",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_011",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_012",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_013",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_014",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_015",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_016",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_017",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_018",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_019",
"tests/test_utils_url.py::GuessSchemeTest::test_uri_020",
"tests/test_utils_url.py::StripUrl::test_credentials",
"tests/test_utils_url.py::StripUrl::test_credentials_encoded_delims",
"tests/test_utils_url.py::StripUrl::test_default_ports",
"tests/test_utils_url.py::StripUrl::test_default_ports_creds_off",
"tests/test_utils_url.py::StripUrl::test_default_ports_keep",
"tests/test_utils_url.py::StripUrl::test_fragments",
"tests/test_utils_url.py::StripUrl::test_noop",
"tests/test_utils_url.py::StripUrl::test_noop_query_string",
"tests/test_utils_url.py::StripUrl::test_origin_only",
"tests/test_utils_url.py::StripUrl::test_path"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 795 | [
"scrapy/utils/url.py",
"docs/topics/spider-middleware.rst",
"docs/topics/request-response.rst",
"scrapy/settings/default_settings.py",
"scrapy/spidermiddlewares/referer.py"
]
| [
"scrapy/utils/url.py",
"docs/topics/spider-middleware.rst",
"docs/topics/request-response.rst",
"scrapy/settings/default_settings.py",
"scrapy/spidermiddlewares/referer.py"
]
|
rapidpro__rapidpro-python-38 | 3c9417903b2426c57d1b355cdf24ca9f26660c7c | 2016-10-05 17:59:48 | 3c9417903b2426c57d1b355cdf24ca9f26660c7c | diff --git a/temba_client/utils.py b/temba_client/utils.py
index 39a3a2a..2489884 100644
--- a/temba_client/utils.py
+++ b/temba_client/utils.py
@@ -33,8 +33,11 @@ def parse_iso8601(value):
def format_iso8601(value):
"""
- Formats a datetime as a UTC ISO8601 date
+ Formats a datetime as a UTC ISO8601 date or returns None if value is None
"""
+ if value is None:
+ return None
+
_format = ISO8601_DATETIME_FORMAT + '.%f'
return six.text_type(value.astimezone(pytz.UTC).strftime(_format))
| Null DatetimeField raises an exception upon serialisation request
Objects with DatetimeField attributes cannot be properly serialised when any of their DatetimeField attributes is not set (i.e. set to null in the JSON response). This makes the following scenario possible:
1. fetch data from the server (e.g. TembaClient(...).get_runs()) - the data is internally deserialised by rapidpro-python
2. fed the result of this deserialisation process back to rapidpro-python to have it (re)serialised
3. error (please see the example traceback below)
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/ego/.virtualenvs/rapidpro-python/local/lib/python2.7/site-packages/temba_client/serialization.py", line 63, in serialize
field_value = field.serialize(attr_value)
File "/home/ego/.virtualenvs/rapidpro-python/local/lib/python2.7/site-packages/temba_client/serialization.py", line 148, in serialize
return [self.item_class.serialize(item) for item in value]
File "/home/ego/.virtualenvs/rapidpro-python/local/lib/python2.7/site-packages/temba_client/serialization.py", line 63, in serialize
field_value = field.serialize(attr_value)
File "/home/ego/.virtualenvs/rapidpro-python/local/lib/python2.7/site-packages/temba_client/serialization.py", line 122, in serialize
return format_iso8601(value)
File "/home/ego/.virtualenvs/rapidpro-python/local/lib/python2.7/site-packages/temba_client/utils.py", line 40, in format_iso8601
return six.text_type(value.astimezone(pytz.UTC).strftime(_format))
AttributeError: 'NoneType' object has no attribute 'astimezone'
``` | rapidpro/rapidpro-python | diff --git a/temba_client/tests.py b/temba_client/tests.py
index d8386f7..e24f86a 100644
--- a/temba_client/tests.py
+++ b/temba_client/tests.py
@@ -72,6 +72,9 @@ class UtilsTest(TembaTest):
d = datetime.datetime(2014, 1, 2, 3, 4, 5, 6, UtilsTest.TestTZ())
self.assertEqual(format_iso8601(d), '2014-01-02T08:04:05.000006')
+ def test_format_iso8601_should_return_none_when_no_datetime_given(self):
+ self.assertIs(format_iso8601(None), None)
+
def test_parse_iso8601(self):
dt = datetime.datetime(2014, 1, 2, 3, 4, 5, 0, pytz.UTC)
self.assertEqual(parse_iso8601('2014-01-02T03:04:05.000000Z'), dt)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt",
"requirements/tests.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
pytz==2025.2
-e git+https://github.com/rapidpro/rapidpro-python.git@3c9417903b2426c57d1b355cdf24ca9f26660c7c#egg=rapidpro_python
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: rapidpro-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pytz==2025.2
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/rapidpro-python
| [
"temba_client/tests.py::UtilsTest::test_format_iso8601_should_return_none_when_no_datetime_given"
]
| []
| [
"temba_client/tests.py::UtilsTest::test_format_iso8601",
"temba_client/tests.py::UtilsTest::test_parse_iso8601",
"temba_client/tests.py::FieldsTest::test_boolean",
"temba_client/tests.py::FieldsTest::test_integer",
"temba_client/tests.py::FieldsTest::test_object_list",
"temba_client/tests.py::TembaObjectTest::test_create",
"temba_client/tests.py::TembaObjectTest::test_deserialize",
"temba_client/tests.py::TembaObjectTest::test_serialize",
"temba_client/tests.py::BaseClientTest::test_init"
]
| []
| BSD 3-Clause "New" or "Revised" License | 796 | [
"temba_client/utils.py"
]
| [
"temba_client/utils.py"
]
|
|
abh1nav__gnippy-25 | ce2968934c3d25aabda033a544e3abcc0e80775e | 2016-10-06 11:03:51 | ce2968934c3d25aabda033a544e3abcc0e80775e | diff --git a/gnippy/rules.py b/gnippy/rules.py
index 37877f8..be59fcc 100644
--- a/gnippy/rules.py
+++ b/gnippy/rules.py
@@ -31,7 +31,7 @@ def _check_rules_list(rules_list):
if not isinstance(rules_list, list):
fail()
- expected = ("value", "tag")
+ expected = ("value", "tag", "id")
for r in rules_list:
if not isinstance(r, dict):
fail()
@@ -77,6 +77,7 @@ def _post(conf, built_rules):
error_text = "HTTP Response Code: %s, Text: '%s'" % (str(r.status_code), r.text)
raise RuleAddFailedException(error_text)
+
def _generate_delete_url(conf):
"""
Generate the Rules URL for a DELETE request.
@@ -89,6 +90,7 @@ def _generate_delete_url(conf):
else:
return rules_url + "?_method=delete"
+
def _delete(conf, built_rules):
"""
Generate the Delete Rules URL and make a POST request.
@@ -112,6 +114,7 @@ def _delete(conf, built_rules):
error_text = "HTTP Response Code: %s, Text: '%s'" % (str(r.status_code), r.text)
raise RuleDeleteFailedException(error_text)
+
def build(rule_string, tag=None):
"""
Takes a rule string and optional tag and turns it into a "built_rule" that looks like:
| New rule format breaks delete example
The examples in the README for deleting rules are broken due to the addition of the id field in the rules returned by GNIP.
from gnippy import rules
from gnippy.errors import RuleDeleteFailedException, RulesGetFailedException
try:
rules_list = rules.get_rules()
# Suppose I want to delete the first rule in the list
rules.delete_rule(rules_list[0])
# OR ... I want to delete ALL rules
rules.delete_rules(rules_list)
except RuleDeleteFailedException, RulesGetFailedException:
pass
At the moment a user needs to either manually remove the `id` field
del rule['id']
rules.delete_rule(rule)
or rebuild the rule using `rules.build`
rules.delete_rule(rules.build(rule['value'], rule.get('tag')))
| abh1nav/gnippy | diff --git a/gnippy/test/test_rules.py b/gnippy/test/test_rules.py
index cfa1ee4..cbb4202 100644
--- a/gnippy/test/test_rules.py
+++ b/gnippy/test/test_rules.py
@@ -42,7 +42,8 @@ def good_get_no_rules(url, auth):
def good_get_one_rule(url, auth):
- return test_utils.GoodResponse(json={"rules":[{"value": "Hello", "tag": "mytag"}]})
+ return test_utils.GoodResponse(
+ json={"rules": [{"value": "Hello", "tag": "mytag"}]})
def bad_delete(url, auth, data):
@@ -54,7 +55,6 @@ def good_delete(url, auth, data):
class RulesTestCase(unittest.TestCase):
-
rule_string = "Hello OR World"
tag = "my_tag"
@@ -79,45 +79,53 @@ class RulesTestCase(unittest.TestCase):
def test_check_one_rule_ok(self):
""" Check list of one rule. """
- l = [ { "value": "hello" } ]
+ l = [{"value": "hello"}]
rules._check_rules_list(l)
def test_check_many_rules_ok(self):
""" Check list of many rules. """
- l = [ { "value": "hello" }, { "value": "h", "tag": "w" }]
+ l = [
+ {"value": "hello", "id": 3},
+ {"value": "goodbye", "tag": "w", "id": 4},
+ {"value": "hi again", "tag": "x"},
+ {"value": "bye again"}
+ ]
rules._check_rules_list(l)
def test_check_rule_tag_none(self):
""" Check list of rules both with tag and without. """
- l = [ { "value": "hello", "tag": None }, { "value": "h", "tag": "w" }]
+ l = [{"value": "hello", "tag": None}, {"value": "h", "tag": "w"}]
rules._check_rules_list(l)
def test_check_one_rule_typo_values(self):
""" Make sure incorectly formatted rule values fail. """
- l = [ { "values": "hello" } ]
+ l = [{"values": "hello"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_check_one_rule_typo_tag(self):
""" Make sure incorrectly formatted rule tags fail. """
- l = [ { "value": "hello", "tags": "t" } ]
+ l = [{"value": "hello", "tags": "t"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_check_one_rule_extra_stuff_in_rule(self):
""" Make sure rules with unexpected keys fail. """
- l = [ { "value": "hello", "wat": "man" } ]
+ l = [{"value": "hello", "wat": "man"}]
try:
rules._check_rules_list(l)
except RulesListFormatException:
return
- self.fail("_check_rules_list was supposed to throw a RuleFormatException")
+ self.fail(
+ "_check_rules_list was supposed to throw a RuleFormatException")
def test_build_rule_bad_args(self):
""" Make sure building rules with unexpected args fail. """
@@ -125,7 +133,8 @@ class RulesTestCase(unittest.TestCase):
rules.build(None)
except BadArgumentException:
return
- self.fail("rules.build_rule was supposed to throw a BadArgumentException")
+ self.fail(
+ "rules.build_rule was supposed to throw a BadArgumentException")
def test_build_rule_without_tag(self):
""" Build rule without tag. """
@@ -149,18 +158,21 @@ class RulesTestCase(unittest.TestCase):
rules.add_rule(self.rule_string, self.tag)
except ConfigFileNotFoundException:
return
- self.fail("Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
+ self.fail(
+ "Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
@mock.patch('requests.post', good_post)
def test_add_one_rule_ok(self):
"""Add one rule with config. """
- rules.add_rule(self.rule_string, self.tag, config_file_path=test_utils.test_config_path)
+ rules.add_rule(self.rule_string, self.tag,
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', bad_post)
def test_add_one_rule_not_ok(self):
"""Add one rule with exception thrown. """
try:
- rules.add_rule(self.rule_string, self.tag, config_file_path=test_utils.test_config_path)
+ rules.add_rule(self.rule_string, self.tag,
+ config_file_path=test_utils.test_config_path)
except RuleAddFailedException:
return
self.fail("Rule Add was supposed to fail and throw a RuleAddException")
@@ -173,20 +185,23 @@ class RulesTestCase(unittest.TestCase):
rules.add_rule(self.rule_string, self.tag)
except ConfigFileNotFoundException:
return
- self.fail("Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
+ self.fail(
+ "Rule Add was supposed to fail and throw a ConfigFileNotFoundException")
@mock.patch('requests.post', good_post)
def test_add_many_rules_ok(self):
""" Add many rules. """
rules_list = self._generate_rules_list()
- rules.add_rules(rules_list, config_file_path=test_utils.test_config_path)
+ rules.add_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', bad_post)
def test_add_many_rules_not_ok(self):
""" Add many rules with exception thrown. """
try:
rules_list = self._generate_rules_list()
- rules.add_rules(rules_list, config_file_path=test_utils.test_config_path)
+ rules.add_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
except RuleAddFailedException:
return
self.fail("Rule Add was supposed to fail and throw a RuleAddException")
@@ -226,7 +241,8 @@ class RulesTestCase(unittest.TestCase):
try:
r = rules.get_rules(config_file_path=test_utils.test_config_path)
except RulesGetFailedException as e:
- self.assertTrue("GNIP API response did not return a rules object" in str(e))
+ self.assertTrue(
+ "GNIP API response did not return a rules object" in str(e))
return
self.fail("rules.get() was supposed to throw a RulesGetFailedException")
@@ -245,13 +261,15 @@ class RulesTestCase(unittest.TestCase):
@mock.patch('requests.post', good_delete)
def test_delete_rules_single(self):
""" Delete one rule. """
- rules.delete_rule({"value": "Hello World"}, config_file_path=test_utils.test_config_path)
+ rules.delete_rule({"value": "Hello World"},
+ config_file_path=test_utils.test_config_path)
@mock.patch('requests.post', good_delete)
def test_delete_rules_multiple(self):
""" Delete multiple rules. """
rules_list = [
- { "value": "Hello World" },
- { "value": "Hello", "tag": "mytag" }
+ {"value": "Hello World"},
+ {"value": "Hello", "tag": "mytag"}
]
- rules.delete_rules(rules_list, config_file_path=test_utils.test_config_path)
\ No newline at end of file
+ rules.delete_rules(rules_list,
+ config_file_path=test_utils.test_config_path)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
-e git+https://github.com/abh1nav/gnippy.git@ce2968934c3d25aabda033a544e3abcc0e80775e#egg=gnippy
idna==3.10
iniconfig==2.1.0
mock==1.0.1
nose==1.3.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pyproject-api==1.9.0
pytest==8.3.5
requests==2.32.3
six==1.10.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
| name: gnippy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- idna==3.10
- iniconfig==2.1.0
- mock==1.0.1
- nose==1.3.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pyproject-api==1.9.0
- pytest==8.3.5
- requests==2.32.3
- six==1.10.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/gnippy
| [
"gnippy/test/test_rules.py::RulesTestCase::test_check_many_rules_ok"
]
| []
| [
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_no_creds",
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_not_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_many_rules_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_no_creds",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_not_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_add_one_rule_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_build_post_object",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_bad_args",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_with_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_build_rule_without_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_extra_stuff_in_rule",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_ok",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_typo_tag",
"gnippy/test/test_rules.py::RulesTestCase::test_check_one_rule_typo_values",
"gnippy/test/test_rules.py::RulesTestCase::test_check_rule_tag_none",
"gnippy/test/test_rules.py::RulesTestCase::test_delete_rules_multiple",
"gnippy/test/test_rules.py::RulesTestCase::test_delete_rules_single",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_bad_json",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_bad_status_code",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_no_rules_field_json",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_requests_get_exception",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_success_no_rules",
"gnippy/test/test_rules.py::RulesTestCase::test_get_rules_success_one_rule"
]
| []
| Apache License 2.0 | 797 | [
"gnippy/rules.py"
]
| [
"gnippy/rules.py"
]
|
|
openmrslab__suspect-22 | 973a03f749fd6af336f968dd2e99366586ac59d2 | 2016-10-07 01:18:04 | 964f2460e46378c29e78d280f999128f34e829df | diff --git a/docs/index.rst b/docs/index.rst
index 86af541..4726e2a 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -26,4 +26,5 @@ API Reference
:maxdepth: 1
:glob:
- mrs_data_api.rst
+ suspect_api.rst
+ mrs_data_api.rst
\ No newline at end of file
diff --git a/docs/suspect_api.rst b/docs/suspect_api.rst
new file mode 100644
index 0000000..0e12fb5
--- /dev/null
+++ b/docs/suspect_api.rst
@@ -0,0 +1,5 @@
+Suspect API Reference
+=====================
+
+.. automodule:: suspect
+ :members: adjust_phase
diff --git a/suspect/__init__.py b/suspect/__init__.py
index bb1d5fd..659b4c4 100644
--- a/suspect/__init__.py
+++ b/suspect/__init__.py
@@ -7,3 +7,4 @@ from . import fitting
from . import image
from . import viz
from ._version import __version__
+from .core import adjust_phase
diff --git a/suspect/core.py b/suspect/core.py
new file mode 100644
index 0000000..ffbde62
--- /dev/null
+++ b/suspect/core.py
@@ -0,0 +1,25 @@
+from .mrsdata import MRSData
+
+
+def adjust_phase(data, zero_phase, first_phase=0, fixed_frequency=0):
+ """
+ Adjust the phase of an MRSData object
+
+ Parameters
+ ----------
+ data : MRSData
+ The MRSData object to be phased
+ zero_phase : scalar
+ The change to the zero order phase, in radians
+ first_phase : scalar, optional
+ The change to the first order phase, in radians per Hz
+ fixed_frequency : scalar, optional
+ The frequency, in Hz, which is unchanged by the first order
+ phase shift
+
+ Returns
+ -------
+ out : MRSData
+ A new MRSData object with adjusted phase.
+ """
+ return data.adjust_phase(zero_phase, first_phase, fixed_frequency)
diff --git a/suspect/mrsdata.py b/suspect/mrsdata.py
index 0be660d..3d251e7 100644
--- a/suspect/mrsdata.py
+++ b/suspect/mrsdata.py
@@ -246,4 +246,26 @@ class MRSData(numpy.ndarray):
transformed_point = numpy.linalg.inv(self.transform) * numpy.matrix([x, y, z, 1]).T
- return numpy.squeeze(numpy.asarray(transformed_point))[0:3]
\ No newline at end of file
+ return numpy.squeeze(numpy.asarray(transformed_point))[0:3]
+
+ def adjust_phase(self, zero_phase, first_phase=0, fixed_frequency=0):
+ """
+ Adjust the phases of the signal.
+
+ Refer to suspect.adjust_phase for full documentation.
+
+ See Also
+ --------
+ suspect.adjust_phase : equivalent function
+ """
+ # easiest to apply the phase shift in the frequency domain
+ # TODO when MRSSpectrum is a real class, this function can delegate
+ # to that one.
+ spectrum = self.spectrum()
+ phase_ramp = numpy.linspace(-self.sw / 2,
+ self.sw / 2,
+ self.np,
+ endpoint=False)
+ phase_shift = zero_phase + first_phase * (fixed_frequency + phase_ramp)
+ phased_spectrum = spectrum * numpy.exp(1j * phase_shift)
+ return self.inherit(numpy.fft.ifft(numpy.fft.ifftshift(phased_spectrum, axes=-1), axis=-1))
| Add adjust_phase function
Add a function which can be passed an MRSData object and a zeroth (and optional first) order phase, and returns a new MRSData object which has had the phase shifts applied. This should also be callable with a method on the original MRSData object, by analogy with methods like np.ndarray.clip() | openmrslab/suspect | diff --git a/tests/test_mrs/test_core.py b/tests/test_mrs/test_core.py
new file mode 100644
index 0000000..97a9f0e
--- /dev/null
+++ b/tests/test_mrs/test_core.py
@@ -0,0 +1,17 @@
+import suspect
+
+import numpy
+
+
+def test_adjust_zero_phase():
+ data = suspect.MRSData(numpy.ones(10), 1e-3, 123)
+ phased_data = suspect.adjust_phase(data, numpy.pi / 2)
+ numpy.testing.assert_allclose(phased_data, 1j * numpy.ones(10))
+
+
+def test_adjust_first_phase():
+ data = suspect.MRSData(numpy.fft.ifft(numpy.ones(10)), 1e-1, 123)
+ phased_data = suspect.adjust_phase(data, 0, numpy.pi / 10).spectrum()
+ numpy.testing.assert_almost_equal(phased_data[0], -1j)
+ numpy.testing.assert_almost_equal(phased_data[-1], numpy.exp(1j * 0.4 * numpy.pi))
+ numpy.testing.assert_almost_equal(phased_data[5], 1)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 3
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
asteval==1.0.6
asttokens==3.0.0
attrs==25.3.0
babel==2.17.0
beautifulsoup4==4.13.3
bleach==6.2.0
certifi==2025.1.31
charset-normalizer==3.4.1
comm==0.2.2
debugpy==1.8.13
decorator==5.2.1
defusedxml==0.7.1
dill==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
executing==2.2.0
fastjsonschema==2.21.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
ipykernel==6.29.5
ipython==8.18.1
jedi==0.19.2
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterlab_pygments==0.3.0
lmfit==1.3.3
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
mistune==3.1.3
mock==5.2.0
nbclient==0.10.2
nbconvert==7.16.6
nbformat==5.10.4
nbsphinx==0.9.7
nest-asyncio==1.6.0
numpy==2.0.2
packaging==24.2
pandocfilters==1.5.1
Parsley==1.3
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy==1.5.0
prompt_toolkit==3.0.50
psutil==7.0.0
ptyprocess==0.7.0
pure_eval==0.2.3
pydicom==2.4.4
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
PyWavelets==1.6.0
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
scipy==1.13.1
six==1.17.0
snowballstemmer==2.2.0
soupsieve==2.6
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
stack-data==0.6.3
-e git+https://github.com/openmrslab/suspect.git@973a03f749fd6af336f968dd2e99366586ac59d2#egg=suspect
tinycss2==1.4.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
typing_extensions==4.13.0
uncertainties==3.2.2
urllib3==2.3.0
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.21.0
| name: suspect
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- asteval==1.0.6
- asttokens==3.0.0
- attrs==25.3.0
- babel==2.17.0
- beautifulsoup4==4.13.3
- bleach==6.2.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- comm==0.2.2
- debugpy==1.8.13
- decorator==5.2.1
- defusedxml==0.7.1
- dill==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- executing==2.2.0
- fastjsonschema==2.21.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipykernel==6.29.5
- ipython==8.18.1
- jedi==0.19.2
- jinja2==3.1.6
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyterlab-pygments==0.3.0
- lmfit==1.3.3
- markupsafe==3.0.2
- matplotlib-inline==0.1.7
- mistune==3.1.3
- mock==5.2.0
- nbclient==0.10.2
- nbconvert==7.16.6
- nbformat==5.10.4
- nbsphinx==0.9.7
- nest-asyncio==1.6.0
- numpy==2.0.2
- packaging==24.2
- pandocfilters==1.5.1
- parsley==1.3
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- psutil==7.0.0
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pydicom==2.4.4
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pywavelets==1.6.0
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- scipy==1.13.1
- six==1.17.0
- snowballstemmer==2.2.0
- soupsieve==2.6
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- stack-data==0.6.3
- tinycss2==1.4.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- typing-extensions==4.13.0
- uncertainties==3.2.2
- urllib3==2.3.0
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.21.0
prefix: /opt/conda/envs/suspect
| [
"tests/test_mrs/test_core.py::test_adjust_zero_phase",
"tests/test_mrs/test_core.py::test_adjust_first_phase"
]
| []
| []
| []
| MIT License | 798 | [
"docs/suspect_api.rst",
"suspect/core.py",
"suspect/mrsdata.py",
"suspect/__init__.py",
"docs/index.rst"
]
| [
"docs/suspect_api.rst",
"suspect/core.py",
"suspect/mrsdata.py",
"suspect/__init__.py",
"docs/index.rst"
]
|
|
napjon__krisk-47 | f873a68da47b99677a73ebea2d92698db482a6d0 | 2016-10-07 06:54:05 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/47?src=pr) is 93.40% (diff: 100%)
> Merging [#47](https://codecov.io/gh/napjon/krisk/pull/47?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will increase coverage by **0.31%**
```diff
@@ 0.2-develop #47 diff @@
=============================================
Files 10 9 -1
Lines 333 349 +16
Methods 0 0
Messages 0 0
Branches 48 49 +1
=============================================
+ Hits 310 326 +16
Misses 14 14
Partials 9 9
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [f873a68...37ff13d](https://codecov.io/gh/napjon/krisk/compare/f873a68da47b99677a73ebea2d92698db482a6d0...37ff13d06b37388e99685b3af43d00c8ff0eb429?src=pr) | diff --git a/krisk/chart.py b/krisk/chart.py
index b5d17a1..beb692d 100644
--- a/krisk/chart.py
+++ b/krisk/chart.py
@@ -3,20 +3,37 @@
import uuid
import json
from copy import deepcopy
-from krisk.template import *
from krisk.connections import get_paths
from IPython.display import Javascript
-from krisk.util import get_content
-
+from krisk.util import get_content, join_current_dir
+
+JS_TEMPLATE_PATH = 'static/krisk.js'
+EVENT_TEMPLATE_PATH = 'static/on_event.js'
+HTML_TEMPLATE_PATH = 'static/template.html'
+
+APPEND_ELEMENT = """
+$('#{id}').attr('id','{id}'+'_old');
+element.append('<div id="{id}" style="width: {width}px;height:{height}px;"></div>');"""
+
+OPTION_TEMPLATE = {
+ 'title': {
+ 'text': ''
+ },
+ 'tooltip': {'axisPointer': {'type': ''}},
+ 'legend': {
+ 'data': []
+ },
+ 'xAxis': {
+ 'data': []
+ },
+ 'yAxis': {},
+ 'series': []
+}
class Chart(object):
"""Chart Object"""
- JS_TEMPLATE_PATH = 'static/krisk.js'
- EVENT_TEMPLATE_PATH = 'static/on_event.js'
-
-
def __init__(self, **kwargs):
"""Constructor"""
# Currently, there are three type of data structure.
@@ -363,6 +380,65 @@ class Chart(object):
'yAxis'], self.option['xAxis']
return self
+ def _set_label_axes(self, xy, **kwargs):
+ """Set label axes name and other customization"""
+ assert xy in ['x','y']
+ self.option[xy + 'Axis'].update(**kwargs)
+ return self
+
+ def set_xlabel(self, name, axis_position='middle', axis_gap=30, rotation=0, font_size=16):
+ """Set x-axis label and other type of customization.
+
+ Parameters
+ ----------
+ name: the label of axes
+ axis_position: {start, middle, end}, default middle
+ horizontal alignment of label. start will position the label at leftmost position
+ axis_gap: int default 30
+ vertical alignment position of label. zero start from x-axis and going further away
+ rotation: int default 0
+ the rotation of the label
+ font_size: int default 16
+ the font size of the label
+
+ Return
+ ------
+ Chart object
+ """
+ label_kwargs = dict(name=name,
+ nameLocation=axis_position,
+ nameGap=axis_gap,
+ nameTextStyle={'fontSize':font_size},
+ nameRotate=rotation)
+ return self._set_label_axes('x', **label_kwargs)
+
+ def set_ylabel(self, name, axis_position='middle', axis_gap=30, rotation=90, font_size=16):
+ """Set y-axis label and other type of customization.
+
+ Parameters
+ ----------
+ name: the label of axes
+ axis_position: {start, middle, end}, default middle
+ vertical alignment of label. start will position the label at bottom position
+ axis_gap: int default 30
+ horizontal alignment position of label. zero start from y-axis and going further
+ away
+ rotation: int default 90
+ the rotation of the label
+ font_size: int default 16
+ the font size of the label
+
+ Return
+ ------
+ Chart object
+ """
+ label_kwargs = dict(name=name,
+ nameLocation=axis_position,
+ nameGap=axis_gap,
+ nameTextStyle={'fontSize':font_size},
+ nameRotate=rotation)
+ return self._set_label_axes('y', **label_kwargs)
+
# ------------------------------------------------------------------------------------------------
# Events
def on_event(self, event, handler):
@@ -436,8 +512,8 @@ class Chart(object):
def _get_resync_option_strings(self, option):
"""Resync Chart option"""
- js_template = get_content(self.JS_TEMPLATE_PATH)
- event_template = get_content(self.EVENT_TEMPLATE_PATH)
+ js_template = get_content(JS_TEMPLATE_PATH)
+ event_template = get_content(EVENT_TEMPLATE_PATH)
events = [event_template.format(
event=e, function=self._events[e]) for e in self._events]
@@ -474,4 +550,12 @@ class Chart(object):
def to_html(self, path):
"Save full html file"
# TODO: Optional add open new tab as soon as it save the html file
- save_html(self._repr_javascript_(), path)
+ from jinja2 import Template
+
+ script = self._repr_javascript_()
+ script = script.replace('element', '$("body")')
+
+ html_template = Template(get_content(HTML_TEMPLATE_PATH))
+ html_content = html_template.render(SCRIPT=script)
+ with open(path, 'w') as f:
+ f.write(html_content)
diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index ce77d49..ea90926 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -39,6 +39,10 @@ def make_chart(df, **kwargs):
chart = Chart(**kwargs)
chart._kwargs_chart_['data_columns'] = df.columns
+ chart.set_xlabel(kwargs['x'])
+ if kwargs.get('y', None):
+ chart.set_ylabel(kwargs['y'])
+
if kwargs['type'] in ['bar', 'line', 'hist']:
set_bar_line_chart(chart, df, **kwargs)
diff --git a/krisk/template.py b/krisk/template.py
deleted file mode 100644
index b1b2e19..0000000
--- a/krisk/template.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from krisk.util import join_current_dir
-
-
-def save_html(script, path):
-
- from jinja2 import Template
-
- html_template = open(join_current_dir('static/template.html'), 'r')
- script = script.replace('element', '$("body")')
- f = open(path, 'w')
- f.write(Template(html_template.read()).render(SCRIPT=script))
- f.close()
- html_template.close()
-
-
-APPEND_ELEMENT = """
-$('#{id}').attr('id','{id}'+'_old');
-element.append('<div id="{id}" style="width: {width}px;height:{height}px;"></div>');"""
-
-OPTION_TEMPLATE = {
- 'title': {
- 'text': ''
- },
- 'tooltip': {'axisPointer': {'type': ''}},
- 'legend': {
- 'data': []
- },
- 'xAxis': {
- 'data': []
- },
- 'yAxis': {},
- 'series': []
-}
| Set Style for y and x axis | napjon/krisk | diff --git a/krisk/tests/conftest.py b/krisk/tests/conftest.py
index 4d132f1..bbf3e2e 100644
--- a/krisk/tests/conftest.py
+++ b/krisk/tests/conftest.py
@@ -21,6 +21,14 @@ def gapminder():
DATA_DIR + '/gapminderDataFiveYear.txt', sep='\t').groupby(
['year', 'continent'], as_index=False).first())
[email protected](scope="module")
+def decl_chart():
+ "Declarative Chart"
+ from krisk.chart import Chart
+ chart = Chart()
+ chart.option['series'] = [{'data': [10, 3, 7, 4, 5], 'name': 'continent', 'type': 'bar'}]
+ chart.option['xAxis'] = {'data': ['Americas', 'Asia', 'Africa', 'Oceania', 'Europe']}
+ return chart
@pytest.fixture
def gap_chart(gapminder):
diff --git a/krisk/tests/test_chart.py b/krisk/tests/test_chart.py
index 88c1d9f..9376e6f 100644
--- a/krisk/tests/test_chart.py
+++ b/krisk/tests/test_chart.py
@@ -48,3 +48,19 @@ def test_color(bar_simple):
background='green', palette=['purple']).get_option()
assert colored['backgroundColor'] == 'green'
assert colored['color'] == ['purple']
+
+def test_label_axes(decl_chart):
+ decl_chart.set_xlabel('xlabel')
+ decl_chart.set_ylabel('ylabel')
+ assert decl_chart.option['xAxis'] == {'data': ['Americas', 'Asia', 'Africa', 'Oceania', 'Europe'],
+ 'name': 'xlabel',
+ 'nameGap': 30,
+ 'nameLocation': 'middle',
+ 'nameRotate': 0,
+ 'nameTextStyle': {'fontSize': 16}}
+
+ assert decl_chart.option['yAxis'] == {'name': 'ylabel',
+ 'nameGap': 30,
+ 'nameLocation': 'middle',
+ 'nameRotate': 90,
+ 'nameTextStyle': {'fontSize': 16}}
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 08cf413..ebbd7f5 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -5,6 +5,16 @@ DATA_DIR = 'krisk/tests/data'
read_option_tests = lambda f: json.load(open(DATA_DIR + '/' + f, 'r'))
+
+# Will delete this in 0.3, https://github.com/napjon/krisk/issues/46
+def remove_name_label(chart):
+ remove_key = lambda s,prefix: dict(filter(lambda t: not t[0].startswith(prefix),
+ chart.option[s].items()))
+ chart.option['xAxis'] = remove_key('xAxis', 'name')
+ chart.option['yAxis'] = remove_key('yAxis', 'name')
+ return chart
+
+
def test_bar(gapminder):
#Bar
@@ -16,12 +26,12 @@ def test_bar(gapminder):
how='mean',
stacked=True,
annotate=True)
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
#Bar with x-axis and category
true_option = read_option_tests('bar_x_c.json')
p = kk.bar(gapminder,'year',c='continent',stacked=True)
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
# Bar Annotate All
@@ -33,10 +43,10 @@ def test_bar(gapminder):
how='mean',
stacked=True,
annotate='all')
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
p = kk.bar(gapminder,'continent',y='gdpPercap',how='mean')
- assert p.get_option() == {'legend': {'data': []},
+ assert remove_name_label(p).get_option() == {'legend': {'data': []},
'series': [{'data': [4426.026, 8955.554, 802.675, 3255.367, 19980.596],
'name': 'continent',
'type': 'bar'}],
@@ -48,10 +58,10 @@ def test_bar(gapminder):
def test_trendline(gapminder):
p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
- assert p.get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
+ assert remove_name_label(p).get_option() == read_option_tests('bar_year_pop_mean_trendline.json')
p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
- assert p.get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
+ assert remove_name_label(p).get_option() == read_option_tests('bar_year_pop_mean_continent_trendline.json')
try:
kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
@@ -71,7 +81,7 @@ def test_line(gapminder):
area=True,
annotate='all')
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
def test_smooth_line(gapminder):
@@ -91,14 +101,14 @@ def test_full_bar_line(gapminder):
true_option = read_option_tests('full_bar_line.json')
- assert bar.option == line.option == true_option
+ assert remove_name_label(bar).option == remove_name_label(line).option == true_option
def test_hist(gapminder):
true_option = read_option_tests('hist_x.json')
p = kk.hist(gapminder,'lifeExp',bins=10)
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
true_option = read_option_tests('hist.json')
@@ -110,12 +120,13 @@ def test_hist(gapminder):
normed=True,
stacked=True)
- assert p.get_option() == true_option
+ assert remove_name_label(p).get_option() == true_option
def test_density(gapminder):
- option = kk.hist(gapminder,'lifeExp',density=True).get_option()
+ chart1 = kk.hist(gapminder,'lifeExp',density=True)
+ option = remove_name_label(chart1).get_option()
assert option['series'][0]['data'] == [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6]
assert option['series'][-1] == {'data': [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6, 0],
@@ -127,7 +138,8 @@ def test_density(gapminder):
'data': [0, 28, 34, 39, 44, 49, 55, 60, 65, 70, 75, 81, 0]}
true_option = read_option_tests('hist_lifeExp_b10_continent_density.json')
- option2 = kk.hist(gapminder,'lifeExp',bins=10,c='continent',stacked=True,density=True).get_option()
+ chart = kk.hist(gapminder,'lifeExp',bins=10,c='continent',stacked=True,density=True)
+ option2 = remove_name_label(chart).get_option()
assert true_option == option2
try:
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@f873a68da47b99677a73ebea2d92698db482a6d0#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_chart.py::test_label_axes"
]
| []
| [
"krisk/tests/test_chart.py::test_replot_and_resync",
"krisk/tests/test_chart.py::test_flip",
"krisk/tests/test_chart.py::test_read_df",
"krisk/tests/test_chart.py::test_color",
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_density",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 799 | [
"krisk/plot/make_chart.py",
"krisk/chart.py",
"krisk/template.py"
]
| [
"krisk/plot/make_chart.py",
"krisk/chart.py",
"krisk/template.py"
]
|
cwacek__python-jsonschema-objects-56 | f6e2f9ef5d99f2cfb707c121828d2a0fa3d9252d | 2016-10-08 18:18:37 | 03be1567ef25edc27fe36675444bd14da93b0f15 | diff --git a/python_jsonschema_objects/__init__.py b/python_jsonschema_objects/__init__.py
index 04ccf1d..39aeb54 100644
--- a/python_jsonschema_objects/__init__.py
+++ b/python_jsonschema_objects/__init__.py
@@ -47,7 +47,6 @@ class ObjectBuilder(object):
resolver=self.resolver)
self._classes = None
- self._resolved = None
@property
def classes(self):
@@ -55,12 +54,6 @@ class ObjectBuilder(object):
self._classes = self.build_classes()
return self._classes
- def get_class(self, uri):
- if self._resolved is None:
- self._classes = self.build_classes()
- return self._resolved.get(uri, None)
-
-
def memory_resolver(self, uri):
return self.mem_resolved[uri[7:]]
@@ -95,10 +88,8 @@ class ObjectBuilder(object):
nm = self.schema['title'] if 'title' in self.schema else self.schema['id']
nm = inflection.parameterize(six.text_type(nm), '_')
-
kw = {"strict" : strict}
builder.construct(nm, self.schema,**kw)
- self._resolved = builder.resolved
return (
util.Namespace.from_mapping(dict(
diff --git a/python_jsonschema_objects/classbuilder.py b/python_jsonschema_objects/classbuilder.py
index f75b039..086a5c3 100644
--- a/python_jsonschema_objects/classbuilder.py
+++ b/python_jsonschema_objects/classbuilder.py
@@ -255,8 +255,8 @@ class ProtocolBase(collections.MutableMapping):
propname = lambda x: self.__prop_names__[x]
missing = [x for x in self.__required__
- if propname(x) not in self._properties
- or self._properties[propname(x)] is None]
+ if propname(x) not in self._properties or
+ self._properties[propname(x)] is None]
if len(missing) > 0:
raise validators.ValidationError(
@@ -305,6 +305,7 @@ class TypeProxy(object):
self.__class__, klass))
try:
obj = klass(*a, **kw)
+ obj.validate()
except TypeError as e:
validation_errors.append((klass, e))
except validators.ValidationError as e:
diff --git a/python_jsonschema_objects/validators.py b/python_jsonschema_objects/validators.py
index facb0c6..6aafcb3 100644
--- a/python_jsonschema_objects/validators.py
+++ b/python_jsonschema_objects/validators.py
@@ -166,6 +166,20 @@ class ArrayValidator(object):
raise TypeError("Invalid value given to array validator: {0}"
.format(ary))
+ @classmethod
+ def from_json(cls, jsonmsg):
+ import json
+ msg = json.loads(jsonmsg)
+ obj = cls(msg)
+ obj.validate()
+ return obj
+
+ @classmethod
+ def serialize(self):
+ self.validate()
+ enc = util.ProtocolJSONEncoder()
+ return enc.encode(self)
+
def validate(self):
converted = self.validate_items()
self.validate_length()
@@ -238,10 +252,12 @@ class ArrayValidator(object):
val = elem
val.validate()
typed_elems.append(val)
+
elif util.safe_issubclass(typ, ArrayValidator):
val = typ(elem)
val.validate()
typed_elems.append(val)
+
elif isinstance(typ, classbuilder.TypeProxy):
try:
if isinstance(elem, (six.string_types, six.integer_types, float)):
@@ -251,8 +267,9 @@ class ArrayValidator(object):
except TypeError as e:
raise ValidationError("'{0}' is not a valid value for '{1}': {2}"
.format(elem, typ, e))
- val.validate()
- typed_elems.append(val)
+ else:
+ val.validate()
+ typed_elems.append(val)
return typed_elems
@@ -314,11 +331,32 @@ class ArrayValidator(object):
item_constraint = klassbuilder.resolved[uri]
- elif isdict and item_constraint['type'] == 'array':
+ elif isdict and item_constraint.get('type') == 'array':
+ # We need to create a sub-array validator.
item_constraint = ArrayValidator.create(name + "#sub",
item_constraint=item_constraint[
'items'],
addl_constraints=item_constraint)
+ elif isdict and 'oneOf' in item_constraint:
+ # We need to create a TypeProxy validator
+ uri = "{0}_{1}".format(name, "<anonymous_list_type>")
+ type_array = []
+ for i, item_detail in enumerate(item_constraint['oneOf']):
+ if '$ref' in item_detail:
+ subtype = klassbuilder.construct(
+ util.resolve_ref_uri(
+ klassbuilder.resolver.resolution_scope,
+ item_detail['$ref']),
+ item_detail)
+ else:
+ subtype = klassbuilder.construct(
+ uri + "_%s" % i, item_detail)
+
+ type_array.append(subtype)
+
+ item_constraint = classbuilder.TypeProxy(type_array)
+
+
props['__itemtype__'] = item_constraint
| Bad handling of arrays of mixed types objects
(or at least inconsistent with `jsonschema`)
Here is a minimal test case:
```
schema_good = {
'title': 'example',
'type': 'array',
'items': {
'oneOf': [
{
'type': 'object',
'properties': {
'a': {
'type': 'string',
}
},
'required': [ 'a' ],
},
{
'type': 'object',
'properties': {
'b': {
'type': 'string',
}
},
'required': [ 'b' ],
},
]
},
}
instance = [{'a': ''}, {'b': ''}]
```
`jsonschema` validates our instance but `build_classes()` crashes:
```
Traceback (most recent call last):
File "schema.py", line 70, in <module>
pjs.ObjectBuilder(schema_bad).build_classes()
File ".../python_jsonschema_objects/__init__.py", line 83, in build_classes
builder.construct(nm, self.schema)
File ".../python_jsonschema_objects/classbuilder.py", line 418, in construct
ret = self._construct(uri, *args, **kw)
File ".../python_jsonschema_objects/classbuilder.py", line 474, in _construct
**clsdata_copy)
File ".../python-jsonschema-objects/python_jsonschema_objects/validators.py", line 317, in create
elif isdict and item_constraint['type'] == 'array':
KeyError: 'type'
```
If we remove the `required` arrays in both objects, `build_classes()` terminates but `jsonschema` is unable to validate the instance anymore:
```
jsonschema.exceptions.ValidationError: {'a': ''} is valid under each of {'properties': {'b': {'type': 'string'}}, 'type': 'object'}, {'properties': {'a': {'type': 'string'}}, 'type': 'object'}
Failed validating 'oneOf' in schema['items']:
{'oneOf': [{'properties': {'a': {'type': 'string'}}, 'type': 'object'},
{'properties': {'b': {'type': 'string'}}, 'type': 'object'}]}
On instance[0]:
{'a': ''}
``` | cwacek/python-jsonschema-objects | diff --git a/test/test_regression_49.py b/test/test_regression_49.py
new file mode 100644
index 0000000..ed110c2
--- /dev/null
+++ b/test/test_regression_49.py
@@ -0,0 +1,51 @@
+import pytest
+
+from jsonschema import validate
+import python_jsonschema_objects as pjo
+import json
+
+
[email protected]
+def bad_schema_49():
+ return {
+ 'title': 'example',
+ 'type': 'array',
+ 'items': {
+ 'oneOf': [
+ {
+ 'type': 'object',
+ 'properties': {
+ 'a': {
+ 'type': 'string',
+ }
+ },
+ 'required': ['a'],
+ },
+ {
+ 'type': 'object',
+ 'properties': {
+ 'b': {
+ 'type': 'string',
+ }
+ },
+ 'required': ['b'],
+ },
+ ]
+ },
+ }
+
+
[email protected]
+def instance():
+ return [{"a": ''}, {"b": ""}]
+
+
+def test_is_valid_jsonschema(bad_schema_49, instance):
+ validate(instance, bad_schema_49)
+
+
+def test_regression_49(bad_schema_49, instance):
+ builder = pjo.ObjectBuilder(bad_schema_49)
+ ns = builder.build_classes()
+
+ ns.Example.from_json(json.dumps(instance))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 3
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
execnet==1.9.0
importlib-metadata==4.8.3
inflection==0.2.0
iniconfig==1.1.1
jsonschema==2.3.0
Markdown==2.4
packaging==21.3
pandocfilters==1.2
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
-e git+https://github.com/cwacek/python-jsonschema-objects.git@f6e2f9ef5d99f2cfb707c121828d2a0fa3d9252d#egg=python_jsonschema_objects
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-jsonschema-objects
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- inflection==0.2.0
- iniconfig==1.1.1
- jsonschema==2.3.0
- markdown==2.4
- packaging==21.3
- pandocfilters==1.2
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-jsonschema-objects
| [
"test/test_regression_49.py::test_regression_49"
]
| []
| [
"test/test_regression_49.py::test_is_valid_jsonschema"
]
| []
| MIT License | 800 | [
"python_jsonschema_objects/validators.py",
"python_jsonschema_objects/classbuilder.py",
"python_jsonschema_objects/__init__.py"
]
| [
"python_jsonschema_objects/validators.py",
"python_jsonschema_objects/classbuilder.py",
"python_jsonschema_objects/__init__.py"
]
|
|
alecthomas__voluptuous-231 | 99fb7bd43b2cbd8e53cb9e50765c93339b2f7843 | 2016-10-09 22:21:03 | 99fb7bd43b2cbd8e53cb9e50765c93339b2f7843 | diff --git a/voluptuous/schema_builder.py b/voluptuous/schema_builder.py
index 053d951..9e4b622 100644
--- a/voluptuous/schema_builder.py
+++ b/voluptuous/schema_builder.py
@@ -106,6 +106,10 @@ ALLOW_EXTRA = 1 # extra keys not in schema will be included in output
REMOVE_EXTRA = 2 # extra keys not in schema will be excluded from output
+def _isnamedtuple(obj):
+ return isinstance(obj, tuple) and hasattr(obj, '_fields')
+
+
class Undefined(object):
def __nonzero__(self):
return False
@@ -557,7 +561,11 @@ class Schema(object):
errors.append(invalid)
if errors:
raise er.MultipleInvalid(errors)
- return type(data)(out)
+
+ if _isnamedtuple(data):
+ return type(data)(*out)
+ else:
+ return type(data)(out)
return validate_sequence
| NamedTuples do not validate as tuples or named tuples
```
from collections import namedtuple
NT = namedtuple('NT', ['a', 'b'])
nt = NT(1, 2)
t = (1, 2)
Schema((int, int))(nt) # => Invalid
Schema((int, int))(t) # => Valid
Schema(NT(int, int))(nt) # => Invalid
Schema(NT(int, int))(t) # => Valid
````
Given that `NT(1, 2) == (1, 2) => True` I'd expect all of the above cases to return as valid. | alecthomas/voluptuous | diff --git a/voluptuous/tests/tests.py b/voluptuous/tests/tests.py
index 8759b13..7ddd092 100644
--- a/voluptuous/tests/tests.py
+++ b/voluptuous/tests/tests.py
@@ -1,4 +1,5 @@
import copy
+import collections
from nose.tools import assert_equal, assert_raises, assert_true
from voluptuous import (
@@ -699,3 +700,14 @@ def test_number_validation_with_valid_precision_scale_yield_decimal_false():
schema = Schema({"number" : Number(precision=6, scale=2, yield_decimal=False)})
out_ = schema({"number": '1234.00'})
assert_equal(out_.get("number"), '1234.00')
+
+
+def test_named_tuples_validate_as_tuples():
+ NT = collections.namedtuple('NT', ['a', 'b'])
+ nt = NT(1, 2)
+ t = (1, 2)
+
+ Schema((int, int))(nt)
+ Schema((int, int))(t)
+ Schema(NT(int, int))(nt)
+ Schema(NT(int, int))(t)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"nose",
"coverage",
"coveralls",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/alecthomas/voluptuous.git@99fb7bd43b2cbd8e53cb9e50765c93339b2f7843#egg=voluptuous
| name: voluptuous
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/voluptuous
| [
"voluptuous/tests/tests.py::test_named_tuples_validate_as_tuples"
]
| []
| [
"voluptuous/tests/tests.py::test_exact_sequence",
"voluptuous/tests/tests.py::test_required",
"voluptuous/tests/tests.py::test_extra_with_required",
"voluptuous/tests/tests.py::test_iterate_candidates",
"voluptuous/tests/tests.py::test_in",
"voluptuous/tests/tests.py::test_not_in",
"voluptuous/tests/tests.py::test_remove",
"voluptuous/tests/tests.py::test_extra_empty_errors",
"voluptuous/tests/tests.py::test_literal",
"voluptuous/tests/tests.py::test_email_validation",
"voluptuous/tests/tests.py::test_email_validation_with_none",
"voluptuous/tests/tests.py::test_email_validation_with_empty_string",
"voluptuous/tests/tests.py::test_email_validation_without_host",
"voluptuous/tests/tests.py::test_fqdn_url_validation",
"voluptuous/tests/tests.py::test_fqdn_url_without_domain_name",
"voluptuous/tests/tests.py::test_fqdnurl_validation_with_none",
"voluptuous/tests/tests.py::test_fqdnurl_validation_with_empty_string",
"voluptuous/tests/tests.py::test_fqdnurl_validation_without_host",
"voluptuous/tests/tests.py::test_url_validation",
"voluptuous/tests/tests.py::test_url_validation_with_none",
"voluptuous/tests/tests.py::test_url_validation_with_empty_string",
"voluptuous/tests/tests.py::test_url_validation_without_host",
"voluptuous/tests/tests.py::test_copy_dict_undefined",
"voluptuous/tests/tests.py::test_sorting",
"voluptuous/tests/tests.py::test_schema_extend",
"voluptuous/tests/tests.py::test_schema_extend_overrides",
"voluptuous/tests/tests.py::test_schema_extend_key_swap",
"voluptuous/tests/tests.py::test_subschema_extension",
"voluptuous/tests/tests.py::test_repr",
"voluptuous/tests/tests.py::test_list_validation_messages",
"voluptuous/tests/tests.py::test_nested_multiple_validation_errors",
"voluptuous/tests/tests.py::test_humanize_error",
"voluptuous/tests/tests.py::test_fix_157",
"voluptuous/tests/tests.py::test_range_exlcudes_nan",
"voluptuous/tests/tests.py::test_equal",
"voluptuous/tests/tests.py::test_unordered",
"voluptuous/tests/tests.py::test_empty_list_as_exact",
"voluptuous/tests/tests.py::test_schema_decorator_match_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_match_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_match_return_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_return_with_args",
"voluptuous/tests/tests.py::test_schema_decorator_match_return_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_unmatch_return_with_kwargs",
"voluptuous/tests/tests.py::test_schema_decorator_return_only_match",
"voluptuous/tests/tests.py::test_schema_decorator_return_only_unmatch",
"voluptuous/tests/tests.py::test_unicode_key_is_converted_to_utf8_when_in_marker",
"voluptuous/tests/tests.py::test_number_validation_with_string",
"voluptuous/tests/tests.py::test_unicode_key_is_converted_to_utf8_when_plain_text",
"voluptuous/tests/tests.py::test_number_validation_with_invalid_precision_invalid_scale",
"voluptuous/tests/tests.py::test_number_validation_with_valid_precision_scale_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_when_precision_scale_none_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_when_precision_none_n_valid_scale_case1_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_when_precision_none_n_valid_scale_case2_yield_decimal_true",
"voluptuous/tests/tests.py::test_to_utf8",
"voluptuous/tests/tests.py::test_number_when_precision_none_n_invalid_scale_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_when_valid_precision_n_scale_none_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_when_invalid_precision_n_scale_none_yield_decimal_true",
"voluptuous/tests/tests.py::test_number_validation_with_valid_precision_scale_yield_decimal_false"
]
| []
| BSD 3-Clause "New" or "Revised" License | 801 | [
"voluptuous/schema_builder.py"
]
| [
"voluptuous/schema_builder.py"
]
|
|
pypa__setuptools_scm-106 | 9f360e6f70b3818275580d3f4ca285a81a5b86ce | 2016-10-10 19:07:12 | 9f360e6f70b3818275580d3f4ca285a81a5b86ce | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index ef96782..4d2e62f 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,3 +1,8 @@
+v1.13.1
+=======
+
+* fix issue #86 - detect dirty git workdir without tags
+
v1.13.0
=======
diff --git a/setuptools_scm/git.py b/setuptools_scm/git.py
index 6864f96..10280e5 100644
--- a/setuptools_scm/git.py
+++ b/setuptools_scm/git.py
@@ -1,40 +1,69 @@
-from .utils import do, do_ex, trace
+from .utils import do_ex, trace
from .version import meta
from os.path import abspath, normcase, realpath
FILES_COMMAND = 'git ls-files'
-DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*'
+DEFAULT_DESCRIBE = 'git describe --tags --long --match *.*'
+
+
+def _normalized(path):
+ return normcase(abspath(realpath(path)))
+
+
+class GitWorkdir(object):
+ def __init__(self, path):
+ self.path = path
+
+ def do_ex(self, cmd):
+ return do_ex(cmd, cwd=self.path)
+
+ @classmethod
+ def from_potential_worktree(cls, wd):
+ real_wd, _, ret = do_ex('git rev-parse --show-toplevel', wd)
+ if ret:
+ return
+ trace('real root', real_wd)
+ if _normalized(real_wd) != _normalized(wd):
+ return
+
+ return cls(real_wd)
+
+ def is_dirty(self):
+ out, _, _ = self.do_ex("git status --porcelain")
+ return bool(out)
+
+ def node(self):
+ rev_node, _, ret = self.do_ex('git rev-parse --verify --quiet HEAD')
+ if not ret:
+ return rev_node[:7]
+
+ def count_all_nodes(self):
+ revs, _, _ = self.do_ex('git rev-list HEAD')
+ return revs.count('\n') + 1
def parse(root, describe_command=DEFAULT_DESCRIBE):
- real_root, _, ret = do_ex('git rev-parse --show-toplevel', root)
- if ret:
- return
- trace('real root', real_root)
- if (normcase(abspath(realpath(real_root))) !=
- normcase(abspath(realpath(root)))):
- return
- rev_node, _, ret = do_ex('git rev-parse --verify --quiet HEAD', root)
- if ret:
- return meta('0.0')
- rev_node = rev_node[:7]
+ wd = GitWorkdir(root)
+
+ rev_node = wd.node()
+ dirty = wd.is_dirty()
+
+ if rev_node is None:
+ return meta('0.0', dirty=dirty)
+
out, err, ret = do_ex(describe_command, root)
- if '-' not in out and '.' not in out:
- revs = do('git rev-list HEAD', root)
- count = revs.count('\n')
- if ret:
- out = rev_node
- return meta('0.0', distance=count + 1, node=out)
if ret:
- return
- dirty = out.endswith('-dirty')
- if dirty:
- out = out.rsplit('-', 1)[0]
+ return meta(
+ '0.0',
+ distance=wd.count_all_nodes(),
+ node=rev_node,
+ dirty=dirty,
+ )
tag, number, node = out.rsplit('-', 2)
number = int(number)
if number:
return meta(tag, distance=number, node=node, dirty=dirty)
else:
- return meta(tag, dirty=dirty, node=node)
+ return meta(tag, node=node, dirty=dirty)
diff --git a/setuptools_scm/utils.py b/setuptools_scm/utils.py
index 5623ff0..7ea068d 100644
--- a/setuptools_scm/utils.py
+++ b/setuptools_scm/utils.py
@@ -43,8 +43,11 @@ def _always_strings(env_dict):
def do_ex(cmd, cwd='.'):
trace('cmd', repr(cmd))
+ if not isinstance(cmd, (list, tuple)):
+ cmd = shlex.split(cmd)
+
p = subprocess.Popen(
- shlex.split(cmd),
+ cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=str(cwd),
@@ -70,7 +73,6 @@ def do_ex(cmd, cwd='.'):
def do(cmd, cwd='.'):
out, err, ret = do_ex(cmd, cwd)
if ret:
- trace('ret', ret)
print(err)
return out
| Dirty working directory not being correctly detected when tags are missing
I'm working with a "big repo" with many libraries/services. One service, "tasks," references the "model" library in its requirements.txt
../lib/model
If I run
$ pex -r requirements.txt -o test.pex
Then the generated wheel in ~/.pex/build is called `model-0.1.dev239+n6e5cad8-py2-none-any.whl`, which doesn't have the timestamp that the documentation suggests, despite the fact that the repository is dirty:
Changes to be committed:
(use "git reset HEAD <file>..." to unstage)
modified: lib/model/setup.py
modified: workers/tasks/requirements.txt | pypa/setuptools_scm | diff --git a/testing/test_git.py b/testing/test_git.py
index 5401b66..0819fad 100644
--- a/testing/test_git.py
+++ b/testing/test_git.py
@@ -1,5 +1,6 @@
from setuptools_scm import integration
import pytest
+from datetime import date
@pytest.fixture
@@ -32,6 +33,17 @@ def test_version_from_git(wd):
assert wd.version.startswith('0.2')
[email protected](86)
+def test_git_dirty_notag(wd):
+ wd.commit_testfile()
+ wd.write('test.txt', 'test2')
+ wd("git add test.txt")
+ assert wd.version.startswith('0.1.dev1')
+ today = date.today()
+ # we are dirty, check for the tag
+ assert today.strftime('.d%Y%m%d') in wd.version
+
+
def test_find_files_stop_at_root_git(wd):
wd.commit_testfile()
wd.cwd.ensure('project/setup.cfg')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.13 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"python setup.py egg_info"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/pypa/setuptools_scm.git@9f360e6f70b3818275580d3f4ca285a81a5b86ce#egg=setuptools_scm
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: setuptools_scm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/setuptools_scm
| [
"testing/test_git.py::test_git_dirty_notag"
]
| []
| [
"testing/test_git.py::test_version_from_git",
"testing/test_git.py::test_find_files_stop_at_root_git",
"testing/test_git.py::test_alphanumeric_tags_match"
]
| []
| MIT License | 802 | [
"CHANGELOG.rst",
"setuptools_scm/utils.py",
"setuptools_scm/git.py"
]
| [
"CHANGELOG.rst",
"setuptools_scm/utils.py",
"setuptools_scm/git.py"
]
|
|
sendgrid__sendgrid-python-233 | ca96c8dcd66224e13b38ab8fd2d2b429dd07dd02 | 2016-10-11 13:11:27 | ca96c8dcd66224e13b38ab8fd2d2b429dd07dd02 | diff --git a/sendgrid/helpers/mail/mail.py b/sendgrid/helpers/mail/mail.py
index 48d25f1..661c036 100644
--- a/sendgrid/helpers/mail/mail.py
+++ b/sendgrid/helpers/mail/mail.py
@@ -117,7 +117,11 @@ class Mail(object):
def add_header(self, header):
if self.headers is None:
self.headers = []
- self.headers.append(header)
+ if isinstance(header, dict):
+ (k,v) = list(header.items())[0]
+ self.headers.append(Header(k,v))
+ else:
+ self.headers.append(header)
def add_category(self, category):
if self.categories is None:
| Using .add_header() raises a TypeError
### Issue Summary
.add_header() cannot be used because python raises a TypeError exception.
### Steps to Reproduce
1. Create an instance of Mail `mail = Mail( Email(from_email), subject, Email(to_emails), Content('text/html', message) )`
2. Add a header to the instance
`mail.add_header( {'Reply-To' : '[email protected], [email protected]'} )`
3. Send the Request
`response = sg.client.mail.send.post(request_body=mail.get())`
### Stack Trace
[File "mail.py", line 63](https://github.com/sendgrid/sendgrid-python/blob/master/sendgrid/helpers/mail/mail.py#L63), in **get**
> headers.update(key**.get()**)
TypeError: get expected at least 1 arguments, got 0 | sendgrid/sendgrid-python | diff --git a/test/test_mail.py b/test/test_mail.py
index e0dc945..9b482dc 100644
--- a/test/test_mail.py
+++ b/test/test_mail.py
@@ -100,6 +100,8 @@ class UnitTests(unittest.TestCase):
mail.add_header(Header("X-Test1", "test1"))
mail.add_header(Header("X-Test3", "test2"))
+ mail.add_header({"X-Test4" : "test4"})
+
mail.add_category(Category("May"))
mail.add_category(Category("2016"))
@@ -131,4 +133,4 @@ class UnitTests(unittest.TestCase):
mail.set_reply_to(Email("[email protected]"))
- self.assertEqual(json.dumps(mail.get(), sort_keys=True), '{"asm": {"group_id": 99, "groups_to_display": [4, 5, 6, 7, 8]}, "attachments": [{"content": "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3JhcyBwdW12", "content_id": "Balance Sheet", "disposition": "attachment", "filename": "balance_001.pdf", "type": "application/pdf"}, {"content": "BwdW", "content_id": "Banner", "disposition": "inline", "filename": "banner.png", "type": "image/png"}], "batch_id": "sendgrid_batch_id", "categories": ["May", "2016"], "content": [{"type": "text/plain", "value": "some text here"}, {"type": "text/html", "value": "<html><body>some text here</body></html>"}], "custom_args": {"campaign": "welcome", "weekday": "morning"}, "from": {"email": "[email protected]", "name": "Example User"}, "headers": {"X-Test1": "test1", "X-Test3": "test2"}, "ip_pool_name": "24", "mail_settings": {"bcc": {"email": "[email protected]", "enable": true}, "bypass_list_management": {"enable": true}, "footer": {"enable": true, "html": "<html><body>Footer Text</body></html>", "text": "Footer Text"}, "sandbox_mode": {"enable": true}, "spam_check": {"enable": true, "post_to_url": "https://spamcatcher.sendgrid.com", "threshold": 1}}, "personalizations": [{"bcc": [{"email": "[email protected]"}, {"email": "[email protected]"}], "cc": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}], "custom_args": {"type": "marketing", "user_id": "343"}, "headers": {"X-Mock": "true", "X-Test": "test"}, "send_at": 1443636843, "subject": "Hello World from the Personalized SendGrid Python Library", "substitutions": {"%city%": "Denver", "%name%": "Example User"}, "to": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}]}, {"bcc": [{"email": "[email protected]"}, {"email": "[email protected]"}], "cc": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}], "custom_args": {"type": "marketing", "user_id": "343"}, "headers": {"X-Mock": "true", "X-Test": "test"}, "send_at": 1443636843, "subject": "Hello World from the Personalized SendGrid Python Library", "substitutions": {"%city%": "Denver", "%name%": "Example User"}, "to": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}]}], "reply_to": {"email": "[email protected]"}, "sections": {"%section1%": "Substitution Text for Section 1", "%section2%": "Substitution Text for Section 2"}, "send_at": 1443636842, "subject": "Hello World from the SendGrid Python Library", "template_id": "13b8f94f-bcae-4ec6-b752-70d6cb59f932", "tracking_settings": {"click_tracking": {"enable": true, "enable_text": true}, "ganalytics": {"enable": true, "utm_campaign": "some campaign", "utm_content": "some content", "utm_medium": "some medium", "utm_source": "some source", "utm_term": "some term"}, "open_tracking": {"enable": true, "substitution_tag": "Optional tag to replace with the open image in the body of the message"}, "subscription_tracking": {"enable": true, "html": "<html><body>html to insert into the text/html portion of the message</body></html>", "substitution_tag": "Optional tag to replace with the open image in the body of the message", "text": "text to insert into the text/plain portion of the message"}}}')
+ self.assertEqual(json.dumps(mail.get(), sort_keys=True), '{"asm": {"group_id": 99, "groups_to_display": [4, 5, 6, 7, 8]}, "attachments": [{"content": "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3JhcyBwdW12", "content_id": "Balance Sheet", "disposition": "attachment", "filename": "balance_001.pdf", "type": "application/pdf"}, {"content": "BwdW", "content_id": "Banner", "disposition": "inline", "filename": "banner.png", "type": "image/png"}], "batch_id": "sendgrid_batch_id", "categories": ["May", "2016"], "content": [{"type": "text/plain", "value": "some text here"}, {"type": "text/html", "value": "<html><body>some text here</body></html>"}], "custom_args": {"campaign": "welcome", "weekday": "morning"}, "from": {"email": "[email protected]", "name": "Example User"}, "headers": {"X-Test1": "test1", "X-Test3": "test2", "X-Test4": "test4"}, "ip_pool_name": "24", "mail_settings": {"bcc": {"email": "[email protected]", "enable": true}, "bypass_list_management": {"enable": true}, "footer": {"enable": true, "html": "<html><body>Footer Text</body></html>", "text": "Footer Text"}, "sandbox_mode": {"enable": true}, "spam_check": {"enable": true, "post_to_url": "https://spamcatcher.sendgrid.com", "threshold": 1}}, "personalizations": [{"bcc": [{"email": "[email protected]"}, {"email": "[email protected]"}], "cc": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}], "custom_args": {"type": "marketing", "user_id": "343"}, "headers": {"X-Mock": "true", "X-Test": "test"}, "send_at": 1443636843, "subject": "Hello World from the Personalized SendGrid Python Library", "substitutions": {"%city%": "Denver", "%name%": "Example User"}, "to": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}]}, {"bcc": [{"email": "[email protected]"}, {"email": "[email protected]"}], "cc": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}], "custom_args": {"type": "marketing", "user_id": "343"}, "headers": {"X-Mock": "true", "X-Test": "test"}, "send_at": 1443636843, "subject": "Hello World from the Personalized SendGrid Python Library", "substitutions": {"%city%": "Denver", "%name%": "Example User"}, "to": [{"email": "[email protected]", "name": "Example User"}, {"email": "[email protected]", "name": "Example User"}]}], "reply_to": {"email": "[email protected]"}, "sections": {"%section1%": "Substitution Text for Section 1", "%section2%": "Substitution Text for Section 2"}, "send_at": 1443636842, "subject": "Hello World from the SendGrid Python Library", "template_id": "13b8f94f-bcae-4ec6-b752-70d6cb59f932", "tracking_settings": {"click_tracking": {"enable": true, "enable_text": true}, "ganalytics": {"enable": true, "utm_campaign": "some campaign", "utm_content": "some content", "utm_medium": "some medium", "utm_source": "some source", "utm_term": "some term"}, "open_tracking": {"enable": true, "substitution_tag": "Optional tag to replace with the open image in the body of the message"}, "subscription_tracking": {"enable": true, "html": "<html><body>html to insert into the text/html portion of the message</body></html>", "substitution_tag": "Optional tag to replace with the open image in the body of the message", "text": "text to insert into the text/plain portion of the message"}}}')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 3.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"unittest2",
"unittest2py3k",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
dataclasses==0.8
Flask==0.10.1
importlib-metadata==4.8.3
iniconfig==1.1.1
itsdangerous==2.0.1
Jinja2==3.0.3
linecache2==1.0.0
MarkupSafe==2.0.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-http-client==2.2.1
PyYAML==3.11
-e git+https://github.com/sendgrid/sendgrid-python.git@ca96c8dcd66224e13b38ab8fd2d2b429dd07dd02#egg=sendgrid
six==1.17.0
tomli==1.2.3
traceback2==1.4.0
typing_extensions==4.1.1
unittest2==1.1.0
unittest2py3k==0.5.1
Werkzeug==2.0.3
zipp==3.6.0
| name: sendgrid-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==22.2.0
- dataclasses==0.8
- flask==0.10.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- itsdangerous==2.0.1
- jinja2==3.0.3
- linecache2==1.0.0
- markupsafe==2.0.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-http-client==2.2.1
- pyyaml==3.11
- six==1.17.0
- tomli==1.2.3
- traceback2==1.4.0
- typing-extensions==4.1.1
- unittest2==1.1.0
- unittest2py3k==0.5.1
- werkzeug==2.0.3
- zipp==3.6.0
prefix: /opt/conda/envs/sendgrid-python
| [
"test/test_mail.py::UnitTests::test_kitchenSink"
]
| []
| [
"test/test_mail.py::UnitTests::test_helloEmail"
]
| []
| MIT License | 803 | [
"sendgrid/helpers/mail/mail.py"
]
| [
"sendgrid/helpers/mail/mail.py"
]
|
|
napjon__krisk-51 | bc85b5f7aaff361e80bd06051650c49a00576e86 | 2016-10-11 16:05:08 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/51?src=pr) is 92.87% (diff: 100%)
> Merging [#51](https://codecov.io/gh/napjon/krisk/pull/51?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will increase coverage by **0.80%**
```diff
@@ 0.2-develop #51 diff @@
=============================================
Files 9 8 -1
Lines 353 351 -2
Methods 0 0
Messages 0 0
Branches 50 50
=============================================
+ Hits 325 326 +1
+ Misses 18 17 -1
+ Partials 10 8 -2
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [bc85b5f...1bf0d73](https://codecov.io/gh/napjon/krisk/compare/bc85b5f7aaff361e80bd06051650c49a00576e86...1bf0d732cc133232ed04a977a3aedbb216da826f?src=pr) | diff --git a/.coveragerc b/.coveragerc
index 5c46c59..2c4b66b 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,5 +2,6 @@
branch: True
omit =
*/test_*.py
+ krisk/__init__.py
source =
krisk
diff --git a/krisk/__init__.py b/krisk/__init__.py
index a63c1ea..c4e343f 100644
--- a/krisk/__init__.py
+++ b/krisk/__init__.py
@@ -1,6 +1,11 @@
# Krisk is a module to bridge E-Charts to python data science ecosystem
-from krisk.connections import init_notebook
+
+from krisk.util import init_notebook
+import krisk.plot.api as plot
+# Override chart to prevent all variables to be accessed except two below
+import krisk.chart.api as chart
+from krisk.chart.api import rcParams, Chart
__version__ = '0.1.11'
diff --git a/krisk/chart/__init__.py b/krisk/chart/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/krisk/chart/api.py b/krisk/chart/api.py
new file mode 100644
index 0000000..95f3271
--- /dev/null
+++ b/krisk/chart/api.py
@@ -0,0 +1,1 @@
+from krisk.chart.core import rcParams, Chart
\ No newline at end of file
diff --git a/krisk/chart.py b/krisk/chart/core.py
similarity index 92%
rename from krisk/chart.py
rename to krisk/chart/core.py
index beb692d..5adea11 100644
--- a/krisk/chart.py
+++ b/krisk/chart/core.py
@@ -3,13 +3,17 @@
import uuid
import json
from copy import deepcopy
-from krisk.connections import get_paths
from IPython.display import Javascript
from krisk.util import get_content, join_current_dir
+__all__ = ['rcParams', 'Chart']
+
+
+JS_LIBS = ['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons']
JS_TEMPLATE_PATH = 'static/krisk.js'
EVENT_TEMPLATE_PATH = 'static/on_event.js'
HTML_TEMPLATE_PATH = 'static/template.html'
+DEFAULT_CFG_PATH = 'static/defaults.json'
APPEND_ELEMENT = """
$('#{id}').attr('id','{id}'+'_old');
@@ -30,6 +34,37 @@ OPTION_TEMPLATE = {
'series': []
}
+rcParams = dict(theme='',
+ size=dict(width=600,height=400),
+ color=dict(background='',palette=''),
+ # TODO: Add tooltip, legend, and toolbox in 0.3
+ # tooltip_style=dict(trigger='item',
+ # axis_pointer='line',
+ # trigger_on='mousemove',
+ # font_style='normal',
+ # font_family='sans-serif',
+ # font_size=14),
+ # legend=dict(align='auto',
+ # orient='horizontal',
+ # x_pos='auto',
+ # y_pos='auto'),
+ # toolbox=dict(save_format=None,
+ # restore=False,
+ # data_view=None,
+ # data_zoom=False,
+ # magic_type=None,
+ # brush=None,
+ # align='auto',
+ # orient='horizontal',
+ # x_pos='auto',
+ # y_pos='auto')
+ )
+
+
+# def set(rc):
+# DEFAULTS.update(dict)
+# return DEFAULTS
+
class Chart(object):
"""Chart Object"""
@@ -45,10 +80,15 @@ class Chart(object):
self._chartId = str(uuid.uuid4())
self.option = deepcopy(OPTION_TEMPLATE)
self._kwargs_chart_ = kwargs
- self._theme = ''
self._axes_swapped = True
self._events = {}
- self._size = {'width': 600, 'height': 400}
+
+ self._size = rcParams['size']
+ self._theme = rcParams['theme']
+ self.set_color(**rcParams['color'])
+ # self.set_legend(**rcParams['legend'])
+ # self.set_toolbox(**rcParams['toolbox'])
+ # self.set_tooltip_style(**rcParams['tooltip_style'])
# Color and Themes
@@ -63,7 +103,7 @@ class Chart(object):
{'dark','vintage','roma','shine','infographic','macarons'}, default None
"""
- themes = get_paths()[1:]
+ themes = JS_LIBS[1:] + ['']
if theme not in themes:
raise AssertionError("Invalid theme name: {theme}".format(
@@ -519,7 +559,7 @@ class Chart(object):
event=e, function=self._events[e]) for e in self._events]
OPTION_KWS = dict(
- requires=get_paths().__repr__(),
+ requires=JS_LIBS.__repr__(),
chartId=self._chartId,
theme=self._theme,
option=json.dumps(
diff --git a/krisk/connections.py b/krisk/connections.py
deleted file mode 100644
index b44a60e..0000000
--- a/krisk/connections.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#TODO FIX LOCAL PATH! NEED TO DO nbextension install
-#from collections import OrderedDict
-#import json
-#from krisk.util import join_current_dir
-
-# ECHARTS_URL = 'https://cdnjs.cloudflare.com/ajax/libs/echarts/3.2.0/'
-# ECHARTS_FILE = 'echarts.min'
-# d_paths = OrderedDict({})
-THEMES = ['dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons']
-
-# THEMES_URL='//echarts.baidu.com/asset/theme/'
-# PATH_LOCAL = join_current_dir('static')
-# PATH_LOCAL = 'pandas-echarts/krisk/static'
-
-
-def init_notebook():
- """
- Inject Javascript to notebook, default using local js.
- This function must be last executed in a cell to produce the Javascript in the output cell
- """
- from IPython.display import Javascript
- return Javascript("""
- require.config({
- baseUrl : "//cdn.rawgit.com/napjon/krisk/master/krisk/static",
- paths: {
- echarts: "//cdnjs.cloudflare.com/ajax/libs/echarts/3.2.1/echarts.min"
- }
- });
- """)
-
-
-def get_paths():
- return ['echarts'] + THEMES
diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index 281688f..e69de29 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -1,153 +0,0 @@
-from krisk.plot.make_chart import make_chart
-
-
-def bar(df,
- x,
- y=None,
- c=None,
- how='count',
- stacked=False,
- annotate=None,
- full=False,
- trendline=False):
- """
- Parameters
- ----------
- df: pd.DataFrame
- data to be used for the chart
- x: string
- columns to be used as category axis
- y: string, default to None
- if None, use count of category value. otherwise aggregate based on y columns
- category: string, default to None
- another grouping columns inside x-axis
- how: string, default to None
- to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
- reduced operations.
- stacked: Boolean, default to False.
- Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the
- last category. if 'all' and stacked, annotate all category
- full: boolean, default to False.
- If true, set to full area stacked chart. Only work if stacked is True.
- trendline: boolean, default to False.
- If true, add line that connected the bars. Only work if not category, category but stacked,
- or not full.
-
- Returns
- -------
- Chart Object
- """
-
- # TODO: add optional argument trendline
-
- return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
- trendline=trendline,
- annotate='top' if annotate == True else annotate)
-
-
-def line(df,
- x,
- y=None,
- c=None,
- how=None,
- stacked=False,
- area=False,
- annotate=None,
- full=False,
- smooth=False):
- """
- Parameters
- ----------
- df: pd.DataFrame
- data to be used for the chart
- x: string
- columns to be used as category axis
- y: string, default to None
- if None, use count of category value. otherwise aggregate based on y columns
- c: string, default to None
- category column inside x-axis
- how: string, default to None
- to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
- reduced operations.
- stacked: Boolean, default to False.
- Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
- full: boolean, default to False.
- If true, set to full area stacked chart. Only work if stacked is True.
- smooth: boolean, default to False.
- If true, smooth the line.
-
- Returns
- -------
- Chart Object
- """
- return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
- smooth=smooth,
- annotate='top' if annotate == True else annotate)
-
-
-def hist(df,
- x,
- c=None,
- bins=10,
- normed=False,
- stacked=False,
- annotate=None,
- density=False):
- """
- Parameters
- ----------
- df: pd.DataFrame
- data to be used for the chart
- x: string
- columns to be used as category axis
- c: string, default to None
- another grouping columns inside x-axis
- bins: int, default to 10
- Set number of bins in histogram
- normed: boolean, default to False
- Whether normalize the histogram
- stacked: Boolean, default to False.
- Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
- density: boolean, default to False.
- Whether to add density to the plot
-
- Returns
- -------
- Chart Object
- """
- return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,stacked=stacked,
- density=density,
- annotate='top' if annotate == True else annotate)
-
-
-def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
- """
- Parameters
- ----------
- df: pd.DataFrame
- data to be used for the chart
- x,y: string, columns in pd.DataFrame
- Used as coordinate in scatter chart
- s: string, columns in pd.DataFrame default to None
- Used as sizing value of the scatter points
- c: string, default to None
- column used as grouping color category
- saturation
- size_px: tuple, default to (10,70)
- boundary size, lower and upper limit in pixel for min-max scatter points
-
-
- Returns
- -------
- Chart Object
- """
- #TODO add saturation
- return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,saturate=saturate,size_px=size_px)
diff --git a/krisk/plot/api.py b/krisk/plot/api.py
new file mode 100644
index 0000000..281688f
--- /dev/null
+++ b/krisk/plot/api.py
@@ -0,0 +1,153 @@
+from krisk.plot.make_chart import make_chart
+
+
+def bar(df,
+ x,
+ y=None,
+ c=None,
+ how='count',
+ stacked=False,
+ annotate=None,
+ full=False,
+ trendline=False):
+ """
+ Parameters
+ ----------
+ df: pd.DataFrame
+ data to be used for the chart
+ x: string
+ columns to be used as category axis
+ y: string, default to None
+ if None, use count of category value. otherwise aggregate based on y columns
+ category: string, default to None
+ another grouping columns inside x-axis
+ how: string, default to None
+ to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
+ reduced operations.
+ stacked: Boolean, default to False.
+ Whether to stacked category on top of the other categories.
+ annotate: string, {'all',True} default to None
+ if True, annotate value on top of the plot element. If stacked is also True, annotate the
+ last category. if 'all' and stacked, annotate all category
+ full: boolean, default to False.
+ If true, set to full area stacked chart. Only work if stacked is True.
+ trendline: boolean, default to False.
+ If true, add line that connected the bars. Only work if not category, category but stacked,
+ or not full.
+
+ Returns
+ -------
+ Chart Object
+ """
+
+ # TODO: add optional argument trendline
+
+ return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
+ trendline=trendline,
+ annotate='top' if annotate == True else annotate)
+
+
+def line(df,
+ x,
+ y=None,
+ c=None,
+ how=None,
+ stacked=False,
+ area=False,
+ annotate=None,
+ full=False,
+ smooth=False):
+ """
+ Parameters
+ ----------
+ df: pd.DataFrame
+ data to be used for the chart
+ x: string
+ columns to be used as category axis
+ y: string, default to None
+ if None, use count of category value. otherwise aggregate based on y columns
+ c: string, default to None
+ category column inside x-axis
+ how: string, default to None
+ to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
+ reduced operations.
+ stacked: Boolean, default to False.
+ Whether to stacked category on top of the other categories.
+ annotate: string, {'all',True} default to None
+ if True, annotate value on top of the plot element. If stacked is also True, annotate the last
+ category. if 'all' and stacked, annotate all category
+ full: boolean, default to False.
+ If true, set to full area stacked chart. Only work if stacked is True.
+ smooth: boolean, default to False.
+ If true, smooth the line.
+
+ Returns
+ -------
+ Chart Object
+ """
+ return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
+ smooth=smooth,
+ annotate='top' if annotate == True else annotate)
+
+
+def hist(df,
+ x,
+ c=None,
+ bins=10,
+ normed=False,
+ stacked=False,
+ annotate=None,
+ density=False):
+ """
+ Parameters
+ ----------
+ df: pd.DataFrame
+ data to be used for the chart
+ x: string
+ columns to be used as category axis
+ c: string, default to None
+ another grouping columns inside x-axis
+ bins: int, default to 10
+ Set number of bins in histogram
+ normed: boolean, default to False
+ Whether normalize the histogram
+ stacked: Boolean, default to False.
+ Whether to stacked category on top of the other categories.
+ annotate: string, {'all',True} default to None
+ if True, annotate value on top of the plot element. If stacked is also True, annotate the last
+ category. if 'all' and stacked, annotate all category
+ density: boolean, default to False.
+ Whether to add density to the plot
+
+ Returns
+ -------
+ Chart Object
+ """
+ return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,stacked=stacked,
+ density=density,
+ annotate='top' if annotate == True else annotate)
+
+
+def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
+ """
+ Parameters
+ ----------
+ df: pd.DataFrame
+ data to be used for the chart
+ x,y: string, columns in pd.DataFrame
+ Used as coordinate in scatter chart
+ s: string, columns in pd.DataFrame default to None
+ Used as sizing value of the scatter points
+ c: string, default to None
+ column used as grouping color category
+ saturation
+ size_px: tuple, default to (10,70)
+ boundary size, lower and upper limit in pixel for min-max scatter points
+
+
+ Returns
+ -------
+ Chart Object
+ """
+ #TODO add saturation
+ return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,saturate=saturate,size_px=size_px)
diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index ea90926..924e015 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -1,5 +1,5 @@
from copy import deepcopy
-from krisk.chart import Chart
+from krisk.chart.api import Chart
def round_list(arr):
diff --git a/krisk/static/defaults.json b/krisk/static/defaults.json
new file mode 100644
index 0000000..34a9fe4
--- /dev/null
+++ b/krisk/static/defaults.json
@@ -0,0 +1,1 @@
+{"size": {"width": 600, "height": 400}, "palette": "", "background": "", "theme": ""}
\ No newline at end of file
diff --git a/krisk/util.py b/krisk/util.py
index fb01c5e..db8bcb3 100644
--- a/krisk/util.py
+++ b/krisk/util.py
@@ -14,3 +14,20 @@ def get_content(filepath):
abs_path = join_current_dir(filepath)
with open(abs_path, 'r') as f:
return f.read()
+
+
+def init_notebook():
+ """
+ Inject Javascript to notebook, useful when you provide html notebook generated (e.g nbviewwer).
+ You don't have to use this when using notebook, as it already provided by nbextension.
+ This function must be last executed in a cell to produce Javascript in the output cell
+ """
+ from IPython.display import Javascript
+ return Javascript("""
+ require.config({
+ baseUrl : "//cdn.rawgit.com/napjon/krisk/master/krisk/static",
+ paths: {
+ echarts: "//cdnjs.cloudflare.com/ajax/libs/echarts/3.2.1/echarts.min"
+ }
+ });
+ """)
\ No newline at end of file
diff --git a/notebooks/declarative-visualization.ipynb b/notebooks/declarative-visualization.ipynb
index fcb4805..d59fa2f 100644
--- a/notebooks/declarative-visualization.ipynb
+++ b/notebooks/declarative-visualization.ipynb
@@ -9,7 +9,7 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 3,
"metadata": {
"collapsed": false
},
@@ -30,7 +30,7 @@
"<IPython.core.display.Javascript object>"
]
},
- "execution_count": 16,
+ "execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
@@ -42,18 +42,18 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
- "from krisk.chart import Chart"
+ "from krisk import Chart"
]
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 5,
"metadata": {
"collapsed": false
},
@@ -62,32 +62,32 @@
"data": {
"application/javascript": [
"\n",
- "$('#42633d9b-b939-477c-998f-6239dc3d1ad9').attr('id','42633d9b-b939-477c-998f-6239dc3d1ad9'+'_old');\n",
- "element.append('<div id=\"42633d9b-b939-477c-998f-6239dc3d1ad9\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
+ "$('#9e9dc869-da07-484d-b76f-c90c03d73d94').attr('id','9e9dc869-da07-484d-b76f-c90c03d73d94'+'_old');\n",
+ "element.append('<div id=\"9e9dc869-da07-484d-b76f-c90c03d73d94\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
" function parseFunction(str){\n",
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"42633d9b-b939-477c-998f-6239dc3d1ad9\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"9e9dc869-da07-484d-b76f-c90c03d73d94\"),\"\");\n",
" \n",
" var option = {\n",
" \"legend\": {\n",
" \"data\": []\n",
" },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": []\n",
+ " },\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -100,10 +100,10 @@
"\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x104af0588>"
+ "<krisk.chart.core.Chart at 0x104c136a0>"
]
},
- "execution_count": 18,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -163,7 +163,7 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": 6,
"metadata": {
"collapsed": false
},
@@ -172,35 +172,35 @@
"data": {
"application/javascript": [
"\n",
- "$('#64f7e294-5831-4f59-8498-414051a776e7').attr('id','64f7e294-5831-4f59-8498-414051a776e7'+'_old');\n",
- "element.append('<div id=\"64f7e294-5831-4f59-8498-414051a776e7\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
+ "$('#ddb3080c-eece-494a-93a9-8d3c7c2edf23').attr('id','ddb3080c-eece-494a-93a9-8d3c7c2edf23'+'_old');\n",
+ "element.append('<div id=\"ddb3080c-eece-494a-93a9-8d3c7c2edf23\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
" function parseFunction(str){\n",
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"64f7e294-5831-4f59-8498-414051a776e7\"),\"vintage\");\n",
+ " var myChart = echarts.init(document.getElementById(\"ddb3080c-eece-494a-93a9-8d3c7c2edf23\"),\"vintage\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
" },\n",
- " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
" \"title\": {\n",
" \"left\": \"center\",\n",
- " \"text\": \"This is a blank visualization\",\n",
- " \"bottom\": \"auto\"\n",
+ " \"bottom\": \"auto\",\n",
+ " \"text\": \"This is a blank visualization\"\n",
" },\n",
" \"series\": [],\n",
" \"xAxis\": {\n",
" \"data\": []\n",
- " }\n",
+ " },\n",
+ " \"yAxis\": {}\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -212,10 +212,10 @@
"\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x104b3ee48>"
+ "<krisk.chart.core.Chart at 0x104c13908>"
]
},
- "execution_count": 14,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -236,7 +236,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 7,
"metadata": {
"collapsed": false
},
@@ -245,25 +245,20 @@
"data": {
"application/javascript": [
"\n",
- "$('#1b7e3e6c-c4f1-437b-8b02-199165980052').attr('id','1b7e3e6c-c4f1-437b-8b02-199165980052'+'_old');\n",
- "element.append('<div id=\"1b7e3e6c-c4f1-437b-8b02-199165980052\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
+ "$('#9e9dc869-da07-484d-b76f-c90c03d73d94').attr('id','9e9dc869-da07-484d-b76f-c90c03d73d94'+'_old');\n",
+ "element.append('<div id=\"9e9dc869-da07-484d-b76f-c90c03d73d94\" style=\"width: 600px;height:400px;\"></div>');require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
" function parseFunction(str){\n",
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"1b7e3e6c-c4f1-437b-8b02-199165980052\"),\"vintage\");\n",
+ " var myChart = echarts.init(document.getElementById(\"9e9dc869-da07-484d-b76f-c90c03d73d94\"),\"vintage\");\n",
" \n",
" var option = {\n",
" \"legend\": {\n",
" \"data\": []\n",
" },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -277,12 +272,12 @@
" \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"left\": \"center\",\n",
- " \"text\": \"This is a blank visualization\",\n",
- " \"bottom\": \"auto\"\n",
+ " \"bottom\": \"auto\",\n",
+ " \"text\": \"This is a blank visualization\"\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Americas\",\n",
@@ -291,6 +286,11 @@
" \"Oceania\",\n",
" \"Europe\"\n",
" ]\n",
+ " },\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -303,10 +303,10 @@
"\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x104b3cef0>"
+ "<krisk.chart.core.Chart at 0x104c136a0>"
]
},
- "execution_count": 15,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
@@ -326,8 +326,9 @@
}
],
"metadata": {
+ "anaconda-cloud": {},
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python [default]",
"language": "python",
"name": "python3"
},
@@ -341,7 +342,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.5.2"
+ "version": "3.5.1"
}
},
"nbformat": 4,
| Set global config for all charts in current session
Users don't have to set config for each chart. Currently targeting for:
* Themes, Color, and Pallette
* Size | napjon/krisk | diff --git a/krisk/tests/conftest.py b/krisk/tests/conftest.py
index bbf3e2e..8e09042 100644
--- a/krisk/tests/conftest.py
+++ b/krisk/tests/conftest.py
@@ -24,7 +24,7 @@ def gapminder():
@pytest.fixture(scope="module")
def decl_chart():
"Declarative Chart"
- from krisk.chart import Chart
+ from krisk.chart.api import Chart
chart = Chart()
chart.option['series'] = [{'data': [10, 3, 7, 4, 5], 'name': 'continent', 'type': 'bar'}]
chart.option['xAxis'] = {'data': ['Americas', 'Asia', 'Africa', 'Oceania', 'Europe']}
diff --git a/krisk/tests/test_template.py b/krisk/tests/test_template.py
index 041f0db..e5ef7e5 100644
--- a/krisk/tests/test_template.py
+++ b/krisk/tests/test_template.py
@@ -4,7 +4,7 @@ ECHARTS_VERSION = '3.2.1'
def test_html():
- from krisk.chart import Chart
+ from krisk.chart.api import Chart
c = Chart()
c.to_html('../sample.html')
@@ -16,7 +16,7 @@ def test_html():
def test_init_nb():
- from krisk.connections import init_notebook
+ from krisk.util import init_notebook
js_data = init_notebook().data
js_init_template = """
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 6
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@bc85b5f7aaff361e80bd06051650c49a00576e86#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_template.py::test_html",
"krisk/tests/test_template.py::test_init_nb"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 804 | [
"krisk/__init__.py",
"krisk/chart/api.py",
"krisk/connections.py",
"krisk/plot/__init__.py",
"krisk/util.py",
"krisk/plot/make_chart.py",
"krisk/static/defaults.json",
"krisk/chart.py",
".coveragerc",
"notebooks/declarative-visualization.ipynb",
"krisk/chart/__init__.py",
"krisk/plot/api.py"
]
| [
"krisk/chart/core.py",
"krisk/__init__.py",
"krisk/chart/api.py",
"krisk/connections.py",
"krisk/plot/__init__.py",
"krisk/util.py",
"krisk/plot/make_chart.py",
"krisk/static/defaults.json",
".coveragerc",
"notebooks/declarative-visualization.ipynb",
"krisk/chart/__init__.py",
"krisk/plot/api.py"
]
|
kytos__python-openflow-201 | 013e9c24bd4c234f0bf39210686a58af4c586d7d | 2016-10-11 23:39:44 | 013e9c24bd4c234f0bf39210686a58af4c586d7d | diff --git a/pyof/foundation/basic_types.py b/pyof/foundation/basic_types.py
index 297baa2..a3b5461 100644
--- a/pyof/foundation/basic_types.py
+++ b/pyof/foundation/basic_types.py
@@ -130,10 +130,11 @@ class DPID(GenericType):
self._value = ':'.join(bytes)
def pack(self, value=None):
- buffer = b''
- for value in self._value.split(":"):
- buffer += struct.pack('!B', int(value, 16))
- return buffer
+ if isinstance(value, type(self)):
+ return value.pack()
+ if value is None:
+ value = self._value
+ return struct.pack('!8B', *[int(v, 16) for v in value.split(':')])
class Char(GenericType):
diff --git a/pyof/v0x01/controller2switch/features_reply.py b/pyof/v0x01/controller2switch/features_reply.py
index 8dff843..33f5541 100644
--- a/pyof/v0x01/controller2switch/features_reply.py
+++ b/pyof/v0x01/controller2switch/features_reply.py
@@ -62,7 +62,7 @@ class SwitchFeatures(GenericMessage):
Args:
xid (int): xid to be used on the message header.
- datapath_id (int): UBInt64 datapath unique ID.
+ datapath_id (str or :class:`.DPID`): datapath unique ID.
The lower 48-bits are for MAC address, while
the upper 16-bits are implementer-defined.
n_buffers (int): UBInt32 max packets buffered at once.
| Fix tests
I ran bisect assigning zeros to default DPID value and it lead to d207690facd4844557fb7d53aebbd5d2fb66a414 (added a new basic type: dpid). | kytos/python-openflow | diff --git a/tests/v0x01/test_controller2switch/test_features_reply.py b/tests/v0x01/test_controller2switch/test_features_reply.py
index 2728ded..3f56325 100644
--- a/tests/v0x01/test_controller2switch/test_features_reply.py
+++ b/tests/v0x01/test_controller2switch/test_features_reply.py
@@ -1,5 +1,5 @@
"""Echo request message tests."""
-from pyof.foundation.basic_types import HWAddress
+from pyof.foundation.basic_types import HWAddress, DPID
from pyof.v0x01.common.phy_port import PhyPort, PortConfig, PortState
from pyof.v0x01.controller2switch.features_reply import FeaturesReply
from tests.test_struct import TestStruct
@@ -19,8 +19,9 @@ class TestFeaturesReply(TestStruct):
def _get_kwargs():
- return {'xid': 2, 'datapath_id': 1, 'n_buffers': 256, 'n_tables': 254,
- 'capabilities': 0x000000c7, 'actions': 4095, 'ports': _get_ports()}
+ return {'xid': 2, 'datapath_id': DPID('00:00:00:00:00:00:00:01'),
+ 'n_buffers': 256, 'n_tables': 254, 'capabilities': 0x000000c7,
+ 'actions': 4095, 'ports': _get_ports()}
def _get_ports():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_git_commit_hash",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"pylama",
"radon"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | colorama==0.4.6
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
mando==0.7.1
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pydocstyle==6.3.0
pyflakes==3.3.2
pylama==8.4.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
-e git+https://github.com/kytos/python-openflow.git@013e9c24bd4c234f0bf39210686a58af4c586d7d#egg=python_openflow
radon==6.0.1
six==1.17.0
snowballstemmer==2.2.0
tomli==2.2.1
typing_extensions==4.13.0
| name: python-openflow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- colorama==0.4.6
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- mando==0.7.1
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pydocstyle==6.3.0
- pyflakes==3.3.2
- pylama==8.4.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- radon==6.0.1
- six==1.17.0
- snowballstemmer==2.2.0
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/python-openflow
| [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_pack",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_unpack"
]
| [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_minimum_size",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_pack",
"tests/v0x01/test_controller2switch/test_features_reply.py::TestStruct::test_unpack"
]
| [
"tests/v0x01/test_controller2switch/test_features_reply.py::TestFeaturesReply::test_minimum_size"
]
| []
| MIT License | 805 | [
"pyof/foundation/basic_types.py",
"pyof/v0x01/controller2switch/features_reply.py"
]
| [
"pyof/foundation/basic_types.py",
"pyof/v0x01/controller2switch/features_reply.py"
]
|
|
napjon__krisk-53 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | 2016-10-12 05:48:29 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/53?src=pr) is 92.03% (diff: 71.42%)
> Merging [#53](https://codecov.io/gh/napjon/krisk/pull/53?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will decrease coverage by **0.84%**
```diff
@@ 0.2-develop #53 diff @@
=============================================
Files 8 8
Lines 351 364 +13
Methods 0 0
Messages 0 0
Branches 50 54 +4
=============================================
+ Hits 326 335 +9
- Misses 17 18 +1
- Partials 8 11 +3
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [19fb690...a83683d](https://codecov.io/gh/napjon/krisk/compare/19fb69026ff1339649fac5ad82548ccbdc7b8d19...a83683dc71a8efa67aeb5a3367d22d2d43823b4e?src=pr) | diff --git a/krisk/plot/api.py b/krisk/plot/api.py
index 281688f..dd4437d 100644
--- a/krisk/plot/api.py
+++ b/krisk/plot/api.py
@@ -9,7 +9,10 @@ def bar(df,
stacked=False,
annotate=None,
full=False,
- trendline=False):
+ trendline=False,
+ sort_on='index',
+ sort_c_on=None,
+ ascending=True):
"""
Parameters
----------
@@ -17,33 +20,42 @@ def bar(df,
data to be used for the chart
x: string
columns to be used as category axis
- y: string, default to None
+ y: string, default None
if None, use count of category value. otherwise aggregate based on y columns
- category: string, default to None
+ category: string, default None
another grouping columns inside x-axis
- how: string, default to None
+ how: string, default None
to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
reduced operations.
- stacked: Boolean, default to False.
+ stacked: Boolean, default False.
Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
+ annotate: string, {'all',True} default None
if True, annotate value on top of the plot element. If stacked is also True, annotate the
last category. if 'all' and stacked, annotate all category
- full: boolean, default to False.
+ full: boolean, default False.
If true, set to full area stacked chart. Only work if stacked is True.
- trendline: boolean, default to False.
+ trendline: boolean, default False.
If true, add line that connected the bars. Only work if not category, category but stacked,
- or not full.
+ or not full.
+ sort_on: {'index', 'values', int, 'count', 'mean', 'std', 'min', '25%', '50%', '75%', 'max'},
+ default 'index'.
+ Add sort mode. Only work when c is None.
+ If index, sort index on lexicographical order. use as s.sort_index()
+ if values, sort based on values. Use as s.sort_values()
+ If string, deviation from value provided by pd.Series.describe()
+ if integer, treat as value and deviate from that value
+ sort_c_on: string, default None.
+ specify a category as basis sort value if c is specified. Must be specified when use
+ sort_on other than default value.
+ ascending: boolean, default True
+ sort ascending vs. descending
Returns
-------
Chart Object
"""
-
- # TODO: add optional argument trendline
-
return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
- trendline=trendline,
+ trendline=trendline, sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
annotate='top' if annotate == True else annotate)
@@ -56,7 +68,10 @@ def line(df,
area=False,
annotate=None,
full=False,
- smooth=False):
+ smooth=False,
+ sort_on='index',
+ sort_c_on=None,
+ ascending=True):
"""
Parameters
----------
@@ -64,29 +79,41 @@ def line(df,
data to be used for the chart
x: string
columns to be used as category axis
- y: string, default to None
+ y: string, default None
if None, use count of category value. otherwise aggregate based on y columns
- c: string, default to None
+ c: string, default None
category column inside x-axis
- how: string, default to None
+ how: string, default None
to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
reduced operations.
- stacked: Boolean, default to False.
+ stacked: Boolean, default False.
Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
+ annotate: string, {'all',True} default None
if True, annotate value on top of the plot element. If stacked is also True, annotate the last
category. if 'all' and stacked, annotate all category
- full: boolean, default to False.
+ full: boolean, default False.
If true, set to full area stacked chart. Only work if stacked is True.
- smooth: boolean, default to False.
+ smooth: boolean, default False.
If true, smooth the line.
-
+ sort_on: {'index', 'values', int, 'count', 'mean', 'std', 'min', '25%', '50%', '75%', 'max'},
+ default 'index'.
+ Add sort mode. Only work when c is None.
+ If index, sort index on lexicographical order. use as s.sort_index()
+ if values, sort based on values. Use as s.sort_values()
+ If string, deviation from value provided by pd.Series.describe()
+ if integer, treat as value and deviate from that value
+ sort_c_on: string, default None.
+ specify a category as basis sort value if c is specified. Must be specified when use
+ sort_on other than default value.
+ ascending: boolean, default True
+ sort ascending vs. descending
+
Returns
-------
Chart Object
"""
return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
- smooth=smooth,
+ smooth=smooth, sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
annotate='top' if annotate == True else annotate)
@@ -105,18 +132,18 @@ def hist(df,
data to be used for the chart
x: string
columns to be used as category axis
- c: string, default to None
+ c: string, default None
another grouping columns inside x-axis
- bins: int, default to 10
+ bins: int, default 10
Set number of bins in histogram
- normed: boolean, default to False
+ normed: boolean, default False
Whether normalize the histogram
- stacked: Boolean, default to False.
+ stacked: Boolean, default False.
Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default to None
+ annotate: string, {'all',True} default None
if True, annotate value on top of the plot element. If stacked is also True, annotate the last
category. if 'all' and stacked, annotate all category
- density: boolean, default to False.
+ density: boolean, default False.
Whether to add density to the plot
Returns
@@ -136,12 +163,12 @@ def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
data to be used for the chart
x,y: string, columns in pd.DataFrame
Used as coordinate in scatter chart
- s: string, columns in pd.DataFrame default to None
+ s: string, columns in pd.DataFrame default None
Used as sizing value of the scatter points
- c: string, default to None
+ c: string, default None
column used as grouping color category
saturation
- size_px: tuple, default to (10,70)
+ size_px: tuple, default (10,70)
boundary size, lower and upper limit in pixel for min-max scatter points
@@ -149,5 +176,4 @@ def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
-------
Chart Object
"""
- #TODO add saturation
return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,saturate=saturate,size_px=size_px)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index 73ffa54..931a83e 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -95,11 +95,7 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
else:
raise AssertionError('Density must either stacked category, or not category')
- series.append(density)
-
-
-
-
+ series.append(density)
def get_bar_line_data(df, x, c, y, **kwargs):
@@ -119,11 +115,29 @@ def get_bar_line_data(df, x, c, y, **kwargs):
else:
data = df[x].value_counts()
+ #Specify sort_on and order method
+ sort_on = kwargs['sort_on']
+ descr_keys = pd.Series([0]).describe().keys().tolist()
+
+ if isinstance(sort_on, str):
+ assert sort_on in ['index','values'] + descr_keys
+
+ if sort_on == 'index':
+ data.sort_index(inplace=True, ascending=kwargs['ascending'])
+ else:
+ if sort_on != 'values':
+ val_deviation = data.describe().loc[sort_on] if isinstance(sort_on, str) else sort_on
+ data = data - val_deviation
+ if c:
+ assert kwargs['sort_c_on'] is not None
+ data.sort_values(kwargs['sort_c_on'], inplace=True, ascending=kwargs['ascending'])
+ else:
+ data.sort_values(inplace=True, ascending=kwargs['ascending'])
+ # Stacked when category
if c and kwargs['stacked'] and kwargs['full']:
data = data.div(data.sum(1),axis=0)
-
return data
| Add Bar Mode for Sequential, Diverging, and Qualitative
add 'mode' as keyword parameter for bar chart.
* If None, treat the bar mode as "Qualitative", and sort on lexicographical order.
* If Boolean, treat the bar as "Sequential", whether the order sort ascending.
* If string, either median or mean and deviation from that value
* if integer, treat as value and deviate from that value | napjon/krisk | diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index ebbd7f5..26fc661 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -103,6 +103,25 @@ def test_full_bar_line(gapminder):
assert remove_name_label(bar).option == remove_name_label(line).option == true_option
+def test_sort_bar_line(gapminder):
+ p = kk.line(gapminder,'year', y='pop', how='mean',c='continent', sort_on='mean', sort_c_on='Americas')
+
+ assert p.option['xAxis']['data'] == [1952, 1957, 1962, 1967, 1972, 1977, 1982, 1987, 1992, 1997, 2002, 2007]
+ assert p.option['legend']['data'] == ['Africa', 'Americas', 'Asia', 'Europe', 'Oceania']
+ assert p.option['series'][0] == {'data': [-10595881.167,
+ -9604550.167,
+ -8874458.167,
+ -7114907.167,
+ -5114619.167,
+ -2722602.167,
+ 158346.833,
+ 3379549.833,
+ 6422966.833,
+ 9196608.833,
+ 11411735.833,
+ 13457809.833],
+ 'name': 'Africa',
+ 'type': 'line'}
def test_hist(gapminder):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@19fb69026ff1339649fac5ad82548ccbdc7b8d19#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_sort_bar_line"
]
| []
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_density",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 806 | [
"krisk/plot/bar_line.py",
"krisk/plot/api.py"
]
| [
"krisk/plot/bar_line.py",
"krisk/plot/api.py"
]
|
zalando-stups__senza-393 | 5df821c7b09729f59debf89c0ff1e4068340cbc1 | 2016-10-12 12:15:38 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index 1a0fdb4..68972bd 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -76,13 +76,11 @@ def get_ssl_cert(subdomain, main_zone, configuration, account_info: AccountArgum
return ssl_cert
-def get_listeners(subdomain, main_zone, configuration,
- account_info: AccountArguments):
- ssl_cert = get_ssl_cert(subdomain, main_zone, configuration, account_info)
+def get_listeners(configuration):
return [
{
"PolicyNames": [],
- "SSLCertificateId": ssl_cert,
+ "SSLCertificateId": configuration.get('SSLCertificateId'),
"Protocol": "HTTPS",
"InstancePort": configuration["HTTPPort"],
"LoadBalancerPort": 443
@@ -90,6 +88,16 @@ def get_listeners(subdomain, main_zone, configuration,
]
+def resolve_ssl_certificates(listeners, subdomain, main_zone, account_info):
+ new_listeners = []
+ for listener in listeners:
+ if listener.get('Protocol') in ('HTTPS', 'SSL'):
+ ssl_cert = get_ssl_cert(subdomain, main_zone, listener, account_info)
+ listener['SSLCertificateId'] = ssl_cert
+ new_listeners.append(listener)
+ return new_listeners
+
+
def component_elastic_load_balancer(definition,
configuration: dict,
args: TemplateArguments,
@@ -123,7 +131,8 @@ def component_elastic_load_balancer(definition,
subdomain = domain['Subdomain']
main_zone = domain['Zone'] # type: str
- listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration, account_info)
+ listeners = configuration.get('Listeners') or get_listeners(configuration)
+ listeners = resolve_ssl_certificates(listeners, subdomain, main_zone, account_info)
health_check_protocol = configuration.get('HealthCheckProtocol') or 'HTTP'
| Make property overwriting for senza components less destructive
When using a senza component to build a stack, it would be great if it was possible to overwrite properties in a less destructive way.
### Expected Behavior
We are using the default component `WeightedDnsElasticLoadBalancer`. Now we want to open port 80 in addition to port 443 in order to allow our application to redirect users from http to https. We want to keep all of the configuration and extend it with one additional entry.
### Actual Behavior
Currently this is not possible, because we can only use the option to overwrite the `Listeners` property like this:
```
- AppLoadBalancer:
Type: Senza::WeightedDnsElasticLoadBalancer
...
Listeners:
- LoadBalancerPort: 80
Protocol: HTTP
InstancePort: 80
InstanceProtocol: HTTP
- LoadBalancerPort: 443
Protocol: HTTPS
InstancePort: 80
InstanceProtocol: HTTP
SSLCertificateId: "????"
```
Doing this, we completely miss out on the ease of use that the component provides with regard to selecting the correct SSL certificate, because our definition completely overwrites the one senza creates.
--
As a solution it would be nice if we could either flag our `Listeners` entry as "these are additional, not a replacement" or alternatively have a way of tell senza to inject the SSLCertificateId. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index dfbf912..a7fe6f0 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -198,6 +198,31 @@ def test_component_load_balancer_http_only(monkeypatch):
assert 'Bar' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["Foo"]
+def test_component_load_balancer_listeners_ssl(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999",
+ "Listeners": [{"Protocol": "SSL"}]
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer.resolve_security_groups', mock_string_result)
+
+ get_ssl_cert = MagicMock()
+ get_ssl_cert.return_value = 'my-ssl-arn'
+ monkeypatch.setattr('senza.components.elastic_load_balancer.get_ssl_cert', get_ssl_cert)
+
+ result = component_elastic_load_balancer(definition, configuration, args, info, False, MagicMock())
+ assert 'my-ssl-arn' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["SSLCertificateId"]
+
+
def test_component_load_balancer_namelength(monkeypatch):
configuration = {
"Name": "test_lb",
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@5df821c7b09729f59debf89c0ff1e4068340cbc1#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_load_balancer_listeners_ssl"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_cert_arn",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2",
"tests/test_components.py::test_max_description_length",
"tests/test_components.py::test_component_load_balancer_default_internal_scheme",
"tests/test_components.py::test_component_load_balancer_v2_default_internal_scheme"
]
| []
| Apache License 2.0 | 807 | [
"senza/components/elastic_load_balancer.py"
]
| [
"senza/components/elastic_load_balancer.py"
]
|
|
laterpay__laterpay-client-python-85 | a4738be03b9cc0680d24466b7eefe5fdde6b7d2d | 2016-10-12 13:56:38 | a4738be03b9cc0680d24466b7eefe5fdde6b7d2d | coveralls:
[](https://coveralls.io/builds/8301375)
Coverage increased (+0.03%) to 96.838% when pulling **24133bcd29248dc6ffe3b280de287fa99d356153 on bugfix/84** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
suligap: Looks good. Could you add a relevant entry to the CHANGELOG please?
MarkusH: This might actually not finally solve the issue (need to look into it again), but we do some more casting in `sort_params()`. This needs to be reworked.
coveralls:
[](https://coveralls.io/builds/8321514)
Coverage decreased (-0.03%) to 96.787% when pulling **60c77ad543749d54ee27b625bc7d5dccc1828198 on bugfix/84** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
coveralls:
[](https://coveralls.io/builds/8338167)
Coverage decreased (-0.03%) to 96.787% when pulling **5adc95c90fa7b86fd8debd47d347339963ec7ffc on bugfix/84** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
coveralls:
[](https://coveralls.io/builds/8623388)
Coverage decreased (-0.03%) to 96.787% when pulling **820e3994a2eee44e93658ef588cdcf1739c88690 on bugfix/84** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
| diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8484c44..532fd0e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,7 +1,11 @@
# Changelog
-## Unreleased
+## 4.6.0 (under development)
+
+* Fixed encoding issues when passing byte string parameters on Python 3
+ ([#84](https://github.com/laterpay/laterpay-client-python/issues/84))
+
## 4.5.0
diff --git a/laterpay/signing.py b/laterpay/signing.py
index 24b1bf2..347aa31 100644
--- a/laterpay/signing.py
+++ b/laterpay/signing.py
@@ -53,12 +53,8 @@ def sort_params(param_dict):
for name, value_list in six.iteritems(param_dict):
if isinstance(value_list, (list, tuple)):
for value in value_list:
- if not isinstance(value, six.string_types):
- value = str(value)
param_list.append((name, value))
else:
- if not isinstance(value_list, six.string_types):
- value_list = str(value_list)
param_list.append((name, value_list))
return sorted(param_list)
@@ -127,10 +123,13 @@ def create_base_message(params, url, method='POST'):
values_str = []
- # If any non six.string_types objects, ``str()`` them.
for value in values:
- if not isinstance(value, six.string_types):
+ if not isinstance(value, (six.string_types, six.binary_type)):
+ # If any non-string or non-bytes like objects, ``str()`` them.
value = str(value)
+ if six.PY3 and isinstance(value, six.binary_type):
+ # Issue #84, decode byte strings before using them on Python 3
+ value = value.decode()
values_str.append(value)
data[key] = [quote(compat.encode_if_unicode(value_str), safe='') for value_str in values_str]
| Decode byte strings before using them during signing
On Python 3, when singing a request containing a byte string (`b'foo'`) the value is casted into a string, resulting in `"b'foo'"`, instead of properly decoding the string to `'foo'`.
| laterpay/laterpay-client-python | diff --git a/tests/test_signing.py b/tests/test_signing.py
index ecbff1f..2f0fe02 100644
--- a/tests/test_signing.py
+++ b/tests/test_signing.py
@@ -25,7 +25,7 @@ class TestSigningHelper(unittest.TestCase):
)
def test_create_message_bytestrings(self):
- params = {'parĄm1': 'valuĘ'}
+ params = {b'par\xc4\x84m1': b'valu\xc4\x98'}
url = 'https://endpoint.com/ąpi'
msg = signing.create_base_message(params, url)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8==2.6.0",
"coverage==4.1",
"pydocstyle==1.0.0",
"furl==0.4.95",
"mock==2.0.0",
"responses==0.5.1"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2021.5.30
charset-normalizer==2.0.12
cookies==2.2.1
coverage==4.1
flake8==2.6.0
furl==0.4.95
idna==3.10
-e git+https://github.com/laterpay/laterpay-client-python.git@a4738be03b9cc0680d24466b7eefe5fdde6b7d2d#egg=laterpay_client
mccabe==0.5.3
mock==2.0.0
orderedmultidict==1.0.1
pbr==6.1.1
pycodestyle==2.0.0
pydocstyle==1.0.0
pyflakes==1.2.3
requests==2.27.1
responses==0.5.1
six==1.17.0
urllib3==1.26.20
| name: laterpay-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- charset-normalizer==2.0.12
- cookies==2.2.1
- coverage==4.1
- flake8==2.6.0
- furl==0.4.95
- idna==3.10
- mccabe==0.5.3
- mock==2.0.0
- orderedmultidict==1.0.1
- pbr==6.1.1
- pycodestyle==2.0.0
- pydocstyle==1.0.0
- pyflakes==1.2.3
- requests==2.27.1
- responses==0.5.1
- six==1.17.0
- urllib3==1.26.20
prefix: /opt/conda/envs/laterpay-client-python
| [
"tests/test_signing.py::TestSigningHelper::test_create_message_bytestrings"
]
| []
| [
"tests/test_signing.py::TestSigningHelper::test_create_message_sorting_and_combining_params",
"tests/test_signing.py::TestSigningHelper::test_create_message_unicode",
"tests/test_signing.py::TestSigningHelper::test_create_message_wrong_method",
"tests/test_signing.py::TestSigningHelper::test_normalise_param_structure",
"tests/test_signing.py::TestSigningHelper::test_sign",
"tests/test_signing.py::TestSigningHelper::test_sign_and_encode",
"tests/test_signing.py::TestSigningHelper::test_signing_with_item",
"tests/test_signing.py::TestSigningHelper::test_url_verification",
"tests/test_signing.py::TestSigningHelper::test_verify_invalid_unicode_signature",
"tests/test_signing.py::TestSigningHelper::test_verify_str_signature",
"tests/test_signing.py::TestSigningHelper::test_verify_unicode_signature"
]
| []
| MIT License | 808 | [
"CHANGELOG.md",
"laterpay/signing.py"
]
| [
"CHANGELOG.md",
"laterpay/signing.py"
]
|
softlayer__softlayer-python-776 | 7aca9ce012fb449117e3099d0840afce1704cdd2 | 2016-10-12 16:50:10 | 7aca9ce012fb449117e3099d0840afce1704cdd2 | diff --git a/.gitignore b/.gitignore
index 41170939..438f1d70 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,5 @@ docs/_build/*
build/*
dist/*
*.egg-info
+.cache
+
diff --git a/SoftLayer/fixtures/SoftLayer_Virtual_Guest.py b/SoftLayer/fixtures/SoftLayer_Virtual_Guest.py
index 10440668..966e8a40 100644
--- a/SoftLayer/fixtures/SoftLayer_Virtual_Guest.py
+++ b/SoftLayer/fixtures/SoftLayer_Virtual_Guest.py
@@ -33,10 +33,13 @@
"primaryNetworkComponent": {"speed": 10, "maxSpeed": 100},
'hourlyBillingFlag': False,
'createDate': '2013-08-01 15:23:45',
- 'blockDevices': [{"device": 0, 'mountType': 'Disk', "uuid": 1},
- {"device": 1, 'mountType': 'Disk'},
- {"device": 2, 'mountType': 'CD'},
- {"device": 3, 'mountType': 'Disk', "uuid": 3}],
+ 'blockDevices': [{'device': 0, 'mountType': 'Disk', "uuid": 1},
+ {'device': 1, 'mountType': 'Disk',
+ 'diskImage': {'type': {'keyName': 'SWAP'}}},
+ {'device': 2, 'mountType': 'CD'},
+ {'device': 3, 'mountType': 'Disk', 'uuid': 3},
+ {'device': 4, 'mountType': 'Disk', 'uuid': 4,
+ 'diskImage': {'metadataFlag': True}}],
'notes': 'notes',
'networkVlans': [{'networkSpace': 'PUBLIC',
'vlanNumber': 23,
diff --git a/SoftLayer/managers/vs.py b/SoftLayer/managers/vs.py
index 3a89eb8c..9d0741c0 100644
--- a/SoftLayer/managers/vs.py
+++ b/SoftLayer/managers/vs.py
@@ -721,19 +721,42 @@ def capture(self, instance_id, name, additional_disks=False, notes=None):
notes = "Some notes about this image"
result = mgr.capture(instance_id=12345, name=name, notes=notes)
"""
- vsi = self.get_instance(instance_id)
- disk_filter = lambda x: x['device'] == '0'
- # Skip disk 1 (swap partition) and CD mounts
- if additional_disks:
- disk_filter = lambda x: (str(x['device']) != '1' and
- x['mountType'] != 'CD')
+ vsi = self.client.call(
+ 'Virtual_Guest',
+ 'getObject',
+ id=instance_id,
+ mask="""id,
+ blockDevices[id,device,mountType,
+ diskImage[id,metadataFlag,type[keyName]]]""")
- disks = [block_device for block_device in vsi['blockDevices']
- if disk_filter(block_device)]
+ disks_to_capture = []
+ for block_device in vsi['blockDevices']:
+
+ # We never want metadata disks
+ if utils.lookup(block_device, 'diskImage', 'metadataFlag'):
+ continue
+
+ # We never want swap devices
+ type_name = utils.lookup(block_device,
+ 'diskImage',
+ 'type',
+ 'keyName')
+ if type_name == 'SWAP':
+ continue
+
+ # We never want CD images
+ if block_device['mountType'] == 'CD':
+ continue
+
+ # Only use the first block device if we don't want additional disks
+ if not additional_disks and str(block_device['device']) != '0':
+ continue
+
+ disks_to_capture.append(block_device)
return self.guest.createArchiveTransaction(
- name, disks, notes, id=instance_id)
+ name, disks_to_capture, notes, id=instance_id)
def upgrade(self, instance_id, cpus=None, memory=None,
nic_speed=None, public=True):
| slcli vs capture --all and metadata
### Expected Behavior:
Capture all disks including metadata
### Actual Behavior
Invalid block device supplied. Please be sure to not include any metadata disk block devices.
### Environment Information
Operating System:
RHEL 6.7
softlayer-python version (`slcli --version`):
slcli (SoftLayer Command-line), version 4.1.1
Hello,
I am trying to automate the capture of several VSIs. I have two devices with multiple disks and a metadata block device.
The following slcli cmd is failing with the behavior above:
slcli vs capture --name="description" --note="additional notes" 17800945 --all True
Is it possible to identify only the disks I want to capture? Example Disk, 0,2,4 and exclude disk 7 (metadata)
Thanks | softlayer/softlayer-python | diff --git a/tests/managers/vs_tests.py b/tests/managers/vs_tests.py
index a0cecb20..fb1a2c2a 100644
--- a/tests/managers/vs_tests.py
+++ b/tests/managers/vs_tests.py
@@ -615,7 +615,7 @@ def test_captures(self):
expected = fixtures.SoftLayer_Virtual_Guest.createArchiveTransaction
self.assertEqual(result, expected)
- args = ('a', [], None)
+ args = ('a', [{'device': 0, 'uuid': 1, 'mountType': 'Disk'}], None)
self.assert_called_with('SoftLayer_Virtual_Guest',
'createArchiveTransaction',
args=args,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | 5.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"tools/test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
coverage==6.2
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
fixtures==4.0.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pbr==6.1.1
platformdirs==2.4.0
pluggy==1.0.0
prettytable==2.5.0
prompt-toolkit==3.0.36
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
-e git+https://github.com/softlayer/softlayer-python.git@7aca9ce012fb449117e3099d0840afce1704cdd2#egg=SoftLayer
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
testtools==2.6.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
wcwidth==0.2.13
zipp==3.6.0
| name: softlayer-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- click==8.0.4
- coverage==6.2
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- fixtures==4.0.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pbr==6.1.1
- platformdirs==2.4.0
- pluggy==1.0.0
- prettytable==2.5.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testtools==2.6.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- wcwidth==0.2.13
- zipp==3.6.0
prefix: /opt/conda/envs/softlayer-python
| [
"tests/managers/vs_tests.py::VSTests::test_captures"
]
| []
| [
"tests/managers/vs_tests.py::VSTests::test_cancel_instance",
"tests/managers/vs_tests.py::VSTests::test_capture_additional_disks",
"tests/managers/vs_tests.py::VSTests::test_change_port_speed_private",
"tests/managers/vs_tests.py::VSTests::test_change_port_speed_public",
"tests/managers/vs_tests.py::VSTests::test_create_instance",
"tests/managers/vs_tests.py::VSTests::test_create_instances",
"tests/managers/vs_tests.py::VSTests::test_create_verify",
"tests/managers/vs_tests.py::VSTests::test_edit_blank",
"tests/managers/vs_tests.py::VSTests::test_edit_full",
"tests/managers/vs_tests.py::VSTests::test_edit_metadata",
"tests/managers/vs_tests.py::VSTests::test_edit_tags",
"tests/managers/vs_tests.py::VSTests::test_edit_tags_blank",
"tests/managers/vs_tests.py::VSTests::test_generate_basic",
"tests/managers/vs_tests.py::VSTests::test_generate_datacenter",
"tests/managers/vs_tests.py::VSTests::test_generate_dedicated",
"tests/managers/vs_tests.py::VSTests::test_generate_image_id",
"tests/managers/vs_tests.py::VSTests::test_generate_missing",
"tests/managers/vs_tests.py::VSTests::test_generate_monthly",
"tests/managers/vs_tests.py::VSTests::test_generate_multi_disk",
"tests/managers/vs_tests.py::VSTests::test_generate_network",
"tests/managers/vs_tests.py::VSTests::test_generate_no_disks",
"tests/managers/vs_tests.py::VSTests::test_generate_os_and_image",
"tests/managers/vs_tests.py::VSTests::test_generate_post_uri",
"tests/managers/vs_tests.py::VSTests::test_generate_private_network_only",
"tests/managers/vs_tests.py::VSTests::test_generate_private_vlan",
"tests/managers/vs_tests.py::VSTests::test_generate_public_vlan",
"tests/managers/vs_tests.py::VSTests::test_generate_single_disk",
"tests/managers/vs_tests.py::VSTests::test_generate_sshkey",
"tests/managers/vs_tests.py::VSTests::test_generate_userdata",
"tests/managers/vs_tests.py::VSTests::test_get_create_options",
"tests/managers/vs_tests.py::VSTests::test_get_instance",
"tests/managers/vs_tests.py::VSTests::test_get_item_id_for_upgrade",
"tests/managers/vs_tests.py::VSTests::test_list_instances",
"tests/managers/vs_tests.py::VSTests::test_list_instances_hourly",
"tests/managers/vs_tests.py::VSTests::test_list_instances_monthly",
"tests/managers/vs_tests.py::VSTests::test_list_instances_neither",
"tests/managers/vs_tests.py::VSTests::test_list_instances_with_filters",
"tests/managers/vs_tests.py::VSTests::test_reload_instance",
"tests/managers/vs_tests.py::VSTests::test_reload_instance_posturi_sshkeys",
"tests/managers/vs_tests.py::VSTests::test_reload_instance_with_new_os",
"tests/managers/vs_tests.py::VSTests::test_rescue",
"tests/managers/vs_tests.py::VSTests::test_resolve_ids_hostname",
"tests/managers/vs_tests.py::VSTests::test_resolve_ids_ip",
"tests/managers/vs_tests.py::VSTests::test_resolve_ids_ip_invalid",
"tests/managers/vs_tests.py::VSTests::test_resolve_ids_ip_private",
"tests/managers/vs_tests.py::VSTests::test_upgrade",
"tests/managers/vs_tests.py::VSTests::test_upgrade_blank",
"tests/managers/vs_tests.py::VSTests::test_upgrade_full",
"tests/managers/vs_tests.py::VSTests::test_upgrade_skips_location_based_prices",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_active_and_provisiondate",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_active_not_provisioned",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_active_provision_pending",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_active_reload",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_iter_20_incomplete",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_iter_four_complete",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_iter_once_complete",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_iter_two_incomplete",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_ready_iter_once_incomplete",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_reload_no_pending",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_reload_pending",
"tests/managers/vs_tests.py::VSWaitReadyGoTests::test_wait_interface"
]
| []
| MIT License | 809 | [
".gitignore",
"SoftLayer/fixtures/SoftLayer_Virtual_Guest.py",
"SoftLayer/managers/vs.py"
]
| [
".gitignore",
"SoftLayer/fixtures/SoftLayer_Virtual_Guest.py",
"SoftLayer/managers/vs.py"
]
|
|
pypa__setuptools_scm-108 | 5ef809f2d0406de1c2c120ec20a415d058387956 | 2016-10-12 20:32:05 | 79610468966055b8a2ca0e76b9b2fa59a241740e | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 35b1322..3239187 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,7 @@ v1.14.0
=======
* publish bdist_egg for python 2.6, 2.7 and 3.3-3.5
+* fix issue #107 - dont use node if it is None
v1.13.1
=======
diff --git a/setuptools_scm/version.py b/setuptools_scm/version.py
index 40bd89d..b255eef 100644
--- a/setuptools_scm/version.py
+++ b/setuptools_scm/version.py
@@ -106,7 +106,7 @@ def guess_next_dev_version(version):
def get_local_node_and_date(version):
- if version.exact:
+ if version.exact or version.node is None:
return version.format_choice("", "+d{time:%Y%m%d}")
else:
return version.format_choice("+n{node}", "+n{node}.d{time:%Y%m%d}")
| empty history results in distance None
`1.11.1` gets `0.0` there
recent releases get `0.1.dev0+nnone.d20161011)` | pypa/setuptools_scm | diff --git a/testing/test_functions.py b/testing/test_functions.py
index eae9e9c..f5e5d87 100644
--- a/testing/test_functions.py
+++ b/testing/test_functions.py
@@ -30,15 +30,15 @@ VERSIONS = {
@pytest.mark.parametrize('version,scheme,expected', [
('exact', 'guess-next-dev node-and-date', '1.1'),
- ('zerodistance', 'guess-next-dev node-and-date', '1.2.dev0+nNone'),
- ('dirty', 'guess-next-dev node-and-date', '1.2.dev0+nNone.dtime'),
- ('distance', 'guess-next-dev node-and-date', '1.2.dev3+nNone'),
- ('distancedirty', 'guess-next-dev node-and-date', '1.2.dev3+nNone.dtime'),
+ ('zerodistance', 'guess-next-dev node-and-date', '1.2.dev0'),
+ ('dirty', 'guess-next-dev node-and-date', '1.2.dev0+dtime'),
+ ('distance', 'guess-next-dev node-and-date', '1.2.dev3'),
+ ('distancedirty', 'guess-next-dev node-and-date', '1.2.dev3+dtime'),
('exact', 'post-release node-and-date', '1.1'),
- ('zerodistance', 'post-release node-and-date', '1.1.post0+nNone'),
- ('dirty', 'post-release node-and-date', '1.1.post0+nNone.dtime'),
- ('distance', 'post-release node-and-date', '1.1.post3+nNone'),
- ('distancedirty', 'post-release node-and-date', '1.1.post3+nNone.dtime'),
+ ('zerodistance', 'post-release node-and-date', '1.1.post0'),
+ ('dirty', 'post-release node-and-date', '1.1.post0+dtime'),
+ ('distance', 'post-release node-and-date', '1.1.post3'),
+ ('distancedirty', 'post-release node-and-date', '1.1.post3+dtime'),
])
def test_format_version(version, monkeypatch, scheme, expected):
version = VERSIONS[version]
diff --git a/testing/test_git.py b/testing/test_git.py
index 0819fad..c3ae002 100644
--- a/testing/test_git.py
+++ b/testing/test_git.py
@@ -33,6 +33,12 @@ def test_version_from_git(wd):
assert wd.version.startswith('0.2')
[email protected](108)
+def test_git_worktree(wd):
+ wd.write('test.txt', 'test2')
+ assert wd.version.startswith('0.1.dev0+d')
+
+
@pytest.mark.issue(86)
def test_git_dirty_notag(wd):
wd.commit_testfile()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 1.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"python setup.py egg_info"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/pypa/setuptools_scm.git@5ef809f2d0406de1c2c120ec20a415d058387956#egg=setuptools_scm
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: setuptools_scm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/setuptools_scm
| [
"testing/test_functions.py::test_format_version[zerodistance-guess-next-dev",
"testing/test_functions.py::test_format_version[dirty-guess-next-dev",
"testing/test_functions.py::test_format_version[distance-guess-next-dev",
"testing/test_functions.py::test_format_version[distancedirty-guess-next-dev",
"testing/test_functions.py::test_format_version[zerodistance-post-release",
"testing/test_functions.py::test_format_version[dirty-post-release",
"testing/test_functions.py::test_format_version[distance-post-release",
"testing/test_functions.py::test_format_version[distancedirty-post-release",
"testing/test_git.py::test_git_worktree"
]
| []
| [
"testing/test_functions.py::test_next_tag[1.1-1.2.dev0]",
"testing/test_functions.py::test_next_tag[1.2.dev-1.2.dev0]",
"testing/test_functions.py::test_next_tag[1.1a2-1.1a3.dev0]",
"testing/test_functions.py::test_format_version[exact-guess-next-dev",
"testing/test_functions.py::test_format_version[exact-post-release",
"testing/test_functions.py::test_dump_version_doesnt_bail_on_value_error",
"testing/test_functions.py::test_dump_version_works_with_pretend",
"testing/test_git.py::test_version_from_git",
"testing/test_git.py::test_git_dirty_notag",
"testing/test_git.py::test_find_files_stop_at_root_git",
"testing/test_git.py::test_alphanumeric_tags_match"
]
| []
| MIT License | 810 | [
"CHANGELOG.rst",
"setuptools_scm/version.py"
]
| [
"CHANGELOG.rst",
"setuptools_scm/version.py"
]
|
|
docker__docker-py-1255 | 008730c670afb2f88c7db308901586fb24f1a60c | 2016-10-12 23:07:38 | 9050e1c6e05b5b6807357def0aafc59e3b3ae378 | diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index b565732d..e1c7ad0c 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -453,8 +453,8 @@ def parse_host(addr, is_win32=False, tls=False):
"Bind address needs a port: {0}".format(addr))
if proto == "http+unix" or proto == 'npipe':
- return "{0}://{1}".format(proto, host)
- return "{0}://{1}:{2}{3}".format(proto, host, port, path)
+ return "{0}://{1}".format(proto, host).rstrip('/')
+ return "{0}://{1}:{2}{3}".format(proto, host, port, path).rstrip('/')
def parse_devices(devices):
| Client should tolerate trailing slashes in base_url
docker/compose#3869 | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 2a2759d0..059c82d3 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -522,6 +522,11 @@ class ParseHostTest(base.BaseTestCase):
expected_result = 'https://myhost.docker.net:3348'
assert parse_host(host_value, tls=True) == expected_result
+ def test_parse_host_trailing_slash(self):
+ host_value = 'tcp://myhost.docker.net:2376/'
+ expected_result = 'http://myhost.docker.net:2376'
+ assert parse_host(host_value) == expected_result
+
class ParseRepositoryTagTest(base.BaseTestCase):
sha = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@008730c670afb2f88c7db308901586fb24f1a60c#egg=docker_py
docker-pycreds==0.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- docker-pycreds==0.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_trailing_slash"
]
| []
| [
"tests/unit/utils_test.py::DecoratorsTest::test_update_headers",
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_dns_opt",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_kernel_memory",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_mem_reservation",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_pids_limit",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_userns_mode",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 811 | [
"docker/utils/utils.py"
]
| [
"docker/utils/utils.py"
]
|
|
pypa__setuptools_scm-110 | 79610468966055b8a2ca0e76b9b2fa59a241740e | 2016-10-13 06:41:27 | 79610468966055b8a2ca0e76b9b2fa59a241740e | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 3239187..385ac83 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,3 +1,12 @@
+v1.14.1
+=======
+
+* fix #109: when detecting a dirty git workdir
+ don't consider untracked file
+ (this was a regression due to #86 in v1.13.1)
+* consider the distance 0 when the git node is unknown
+ (happens when you haven't commited anything)
+
v1.14.0
=======
diff --git a/setuptools_scm/git.py b/setuptools_scm/git.py
index 10280e5..aa326f3 100644
--- a/setuptools_scm/git.py
+++ b/setuptools_scm/git.py
@@ -30,7 +30,7 @@ class GitWorkdir(object):
return cls(real_wd)
def is_dirty(self):
- out, _, _ = self.do_ex("git status --porcelain")
+ out, _, _ = self.do_ex("git status --porcelain --untracked-files=no")
return bool(out)
def node(self):
@@ -50,7 +50,7 @@ def parse(root, describe_command=DEFAULT_DESCRIBE):
dirty = wd.is_dirty()
if rev_node is None:
- return meta('0.0', dirty=dirty)
+ return meta('0.0', distance=0, dirty=dirty)
out, err, ret = do_ex(describe_command, root)
if ret:
| untracked files in git are creating dirty state
In Mercurial and in Git prior to the fix for #86 released with setuptools_scm 1.13.1, untracked files would not cause a `dirty` state, but since 1.13.1, they do.
```
$ git status
On branch master
Your branch is up-to-date with 'origin/master'.
Untracked files:
(use "git add <file>..." to include in what will be committed)
.tox/
nothing added to commit but untracked files present (use "git add" to track)
$ python -m pip install setuptools_scm==1.13.0
Collecting setuptools_scm==1.13.0
Downloading setuptools_scm-1.13.0-py2.py3-none-any.whl
Installing collected packages: setuptools-scm
Successfully installed setuptools-scm-1.13.0
$ python setup.py --quiet sdist
$ ls dist
rwt-2.13.tar.gz
$ python -m pip uninstall -y setuptools_scm
Uninstalling setuptools-scm-1.13.0:
Successfully uninstalled setuptools-scm-1.13.0
$ python setup.py --quiet sdist
$ ls dist
rwt-2.13.tar.gz rwt-2.14.dev0+ngd08ab93.d20161012.tar.gz
```
My opinion and the expectation of environments based on past behavior is that untracked files shouldn't create a dirty state. Only modified files or perhaps staged files should cause the project to detect as dirty. The `git describe` output seems to be the same before and after removing the untracked files.
```
$ git describe --tags --long
2.13-0-gd08ab93
$ rm -R .tox
$ git describe --tags --long
2.13-0-gd08ab93
``` | pypa/setuptools_scm | diff --git a/testing/test_git.py b/testing/test_git.py
index c3ae002..7179a57 100644
--- a/testing/test_git.py
+++ b/testing/test_git.py
@@ -14,7 +14,7 @@ def wd(wd):
def test_version_from_git(wd):
- assert wd.version == '0.0'
+ assert wd.version == '0.1.dev0'
wd.commit_testfile()
assert wd.version.startswith('0.1.dev1+')
@@ -34,8 +34,12 @@ def test_version_from_git(wd):
@pytest.mark.issue(108)
[email protected](109)
def test_git_worktree(wd):
wd.write('test.txt', 'test2')
+ # untracked files dont change the state
+ assert wd.version == '0.1.dev0'
+ wd('git add test.txt')
assert wd.version.startswith('0.1.dev0+d')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"python setup.py egg_info"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/pypa/setuptools_scm.git@79610468966055b8a2ca0e76b9b2fa59a241740e#egg=setuptools_scm
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: setuptools_scm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/setuptools_scm
| [
"testing/test_git.py::test_version_from_git",
"testing/test_git.py::test_git_worktree"
]
| []
| [
"testing/test_git.py::test_git_dirty_notag",
"testing/test_git.py::test_find_files_stop_at_root_git",
"testing/test_git.py::test_alphanumeric_tags_match"
]
| []
| MIT License | 812 | [
"CHANGELOG.rst",
"setuptools_scm/git.py"
]
| [
"CHANGELOG.rst",
"setuptools_scm/git.py"
]
|
|
dask__dask-1657 | f14ec6a4ec0d4fb9ff2499ac53d0df4116465c8a | 2016-10-13 13:31:23 | f14ec6a4ec0d4fb9ff2499ac53d0df4116465c8a | diff --git a/.travis.yml b/.travis.yml
index 7baeae74e..9a5e9d5c5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,11 +3,11 @@ sudo: false
env:
matrix:
- - PYTHON=2.7 NUMPY=1.10.4 PANDAS=0.19.0 COVERAGE='true' XTRATESTARGS=
- - PYTHON=2.7 NUMPY=1.11.0 PANDAS=0.18.1 COVERAGE='false' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
- - PYTHON=3.3 NUMPY=1.9.2 PANDAS=0.18.1 COVERAGE='false' XTRATESTARGS=
- - PYTHON=3.4 NUMPY=1.10.4 PANDAS=0.18.0 COVERAGE='false' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
- - PYTHON=3.5 NUMPY=1.11.0 PANDAS=0.19.0 COVERAGE='false' XTRATESTARGS=
+ - PYTHON=2.7 NUMPY=1.10.4 PANDAS=0.19.0 COVERAGE='true' PARALLEL='false' XTRATESTARGS=
+ - PYTHON=2.7 NUMPY=1.11.0 PANDAS=0.18.1 COVERAGE='false' PARALLEL='true' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
+ - PYTHON=3.3 NUMPY=1.9.2 PANDAS=0.18.1 COVERAGE='false' PARALLEL='true' XTRATESTARGS=
+ - PYTHON=3.4 NUMPY=1.10.4 PANDAS=0.18.0 COVERAGE='false' PARALLEL='true' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
+ - PYTHON=3.5 NUMPY=1.11.0 PANDAS=0.19.0 COVERAGE='false' PARALLEL='true' XTRATESTARGS=
addons:
apt:
@@ -35,11 +35,16 @@ install:
- pip install blosc --upgrade
- pip install graphviz moto flake8
- if [[ $PYTHON < '3' ]]; then pip install git+https://github.com/Blosc/castra; fi
+ # For parallel testing (`-n` argument in XTRATESTARGS)
+ - pip install pytest-xdist
# Install dask
- pip install --no-deps -e .[complete]
script:
+ # Need to make test order deterministic when parallelizing tests, hence PYTHONHASHSEED
+ # (see https://github.com/pytest-dev/pytest-xdist/issues/63)
+ - if [[ $PARALLEL == 'true' ]]; then export XTRATESTARGS="-n3 $XTRATESTARGS"; export PYTHONHASHSEED=42; fi
- if [[ $COVERAGE == 'true' ]]; then coverage run $(which py.test) dask --runslow --doctest-modules --verbose $XTRATESTARGS; else py.test dask --runslow --verbose $XTRATESTARGS; fi
- flake8 dask
diff --git a/dask/array/core.py b/dask/array/core.py
index e37806fb7..551be8537 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -17,7 +17,7 @@ import warnings
from toolz.curried import (pipe, partition, concat, pluck, join, first,
memoize, map, groupby, valmap, accumulate, merge,
- reduce, interleave, sliding_window)
+ reduce, interleave, sliding_window, assoc)
import numpy as np
from . import chunk
@@ -205,7 +205,6 @@ def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1,)),
Parameters
----------
-
argpairs: iterable
name, ijk index pairs
numblocks: dict
@@ -217,7 +216,6 @@ def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1,)),
Examples
--------
-
>>> argpairs = [('x', 'ij'), ('y', 'ji')]
>>> numblocks = {'x': (2, 3), 'y': (3, 2)}
>>> broadcast_dimensions(argpairs, numblocks)
@@ -246,8 +244,8 @@ def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1,)),
g2 = dict((k, v - set(sentinels) if len(v) > 1 else v) for k, v in g.items())
- if consolidate is not None:
- g2 = valmap(consolidate, g2)
+ if consolidate:
+ return valmap(consolidate, g2)
if g2 and not set(map(len, g2.values())) == set([1]):
raise ValueError("Shapes do not align %s" % g)
@@ -447,7 +445,7 @@ def map_blocks(func, *args, **kwargs):
dtype: np.dtype
Datatype of resulting array
chunks: tuple (optional)
- chunk shape of resulting blocks if the function does not preserve shape
+ Chunk shape of resulting blocks if the function does not preserve shape
drop_axis: number or iterable (optional)
Dimensions lost by the function
new_axis: number or iterable (optional)
@@ -510,7 +508,7 @@ def map_blocks(func, *args, **kwargs):
>>> y.numblocks
(10,)
- If these must match (up to broadcasting rules) then we can map arbitrary
+ If these match (up to broadcasting rules) then we can map arbitrary
functions across blocks
>>> def func(a, b):
@@ -551,63 +549,95 @@ def map_blocks(func, *args, **kwargs):
if isinstance(new_axis, Number):
new_axis = [new_axis]
+ if drop_axis and new_axis:
+ raise ValueError("Can't specify drop_axis and new_axis together")
+
arrs = [a for a in args if isinstance(a, Array)]
args = [(i, a) for i, a in enumerate(args) if not isinstance(a, Array)]
- arginds = [(a, tuple(range(a.ndim))[::-1]) for a in arrs]
-
- numblocks = dict([(a.name, a.numblocks) for a, _ in arginds])
- argindsstr = list(concat([(a.name, ind) for a, ind in arginds]))
+ argpairs = [(a.name, tuple(range(a.ndim))[::-1]) for a in arrs]
+ numblocks = {a.name: a.numblocks for a in arrs}
+ arginds = list(concat(argpairs))
out_ind = tuple(range(max(a.ndim for a in arrs)))[::-1]
+ try:
+ spec = getargspec(func)
+ block_id = ('block_id' in spec.args or
+ 'block_id' in getattr(spec, 'kwonly_args', ()))
+ except:
+ block_id = False
+
+ if block_id:
+ kwargs['block_id'] = '__dummy__'
+
if args:
- dsk = top(partial_by_order, name, out_ind, *argindsstr,
+ dsk = top(partial_by_order, name, out_ind, *arginds,
numblocks=numblocks, function=func, other=args,
**kwargs)
else:
- dsk = top(func, name, out_ind, *argindsstr, numblocks=numblocks,
+ dsk = top(func, name, out_ind, *arginds, numblocks=numblocks,
**kwargs)
- # If func has block_id as an argument then swap out func
- # for func with block_id partialed in
- try:
- spec = getargspec(func)
- except:
- spec = None
- if spec:
- args = spec.args
- try:
- args += spec.kwonlyargs
- except AttributeError:
- pass
- if 'block_id' in args:
- for k in dsk.keys():
- dsk[k] = (partial(func, block_id=k[1:]),) + dsk[k][1:]
+ # If func has block_id as an argument, add it to the kwargs for each call
+ if block_id:
+ for k in dsk.keys():
+ dsk[k] = dsk[k][:-1] + (assoc(dsk[k][-1], 'block_id', k[1:]),)
- numblocks = list(arrs[0].numblocks)
+ if len(arrs) == 1:
+ numblocks = list(arrs[0].numblocks)
+ else:
+ dims = broadcast_dimensions(argpairs, numblocks)
+ numblocks = [b for (_, b) in reversed(list(dims.items()))]
if drop_axis:
+ if any(numblocks[i] > 1 for i in drop_axis):
+ raise ValueError("Can't drop an axis with more than 1 block. "
+ "Please use `atop` instead.")
dsk = dict((tuple(k for i, k in enumerate(k)
if i - 1 not in drop_axis), v)
for k, v in dsk.items())
numblocks = [n for i, n in enumerate(numblocks) if i not in drop_axis]
-
- if new_axis:
+ elif new_axis:
dsk, old_dsk = dict(), dsk
for key in old_dsk:
new_key = list(key)
for i in new_axis:
new_key.insert(i + 1, 0)
dsk[tuple(new_key)] = old_dsk[key]
- for i in sorted(new_axis, reverse=False):
+ for i in sorted(new_axis):
numblocks.insert(i, 1)
- if chunks is not None and chunks and not isinstance(chunks[0], tuple):
- chunks = [nb * (bs,) for nb, bs in zip(numblocks, chunks)]
- if chunks is not None:
- chunks = tuple(chunks)
+ if chunks:
+ if len(chunks) != len(numblocks):
+ raise ValueError("Provided chunks have {0} dims, expected {1} "
+ "dims.".format(len(chunks), len(numblocks)))
+ chunks2 = []
+ for i, (c, nb) in enumerate(zip(chunks, numblocks)):
+ if isinstance(c, tuple):
+ if not len(c) == nb:
+ raise ValueError("Dimension {0} has {1} blocks, "
+ "chunks specified with "
+ "{2} blocks".format(i, nb, len(c)))
+ chunks2.append(c)
+ else:
+ chunks2.append(nb * (c,))
else:
- chunks = broadcast_chunks(*[a.chunks for a in arrs])
+ if len(arrs) == 1:
+ chunks2 = list(arrs[0].chunks)
+ else:
+ try:
+ chunks2 = list(broadcast_chunks(*[a.chunks for a in arrs]))
+ except:
+ raise ValueError("Arrays in `map_blocks` don't align, can't "
+ "infer output chunks. Please provide "
+ "`chunks` kwarg.")
+ if drop_axis:
+ chunks2 = [c for (i, c) in enumerate(chunks2) if i not in drop_axis]
+ elif new_axis:
+ for i in sorted(new_axis):
+ chunks2.insert(i, (1,))
+
+ chunks = tuple(chunks2)
return Array(merge(dsk, *[a.dask for a in arrs]), name, chunks, dtype)
@@ -1726,19 +1756,49 @@ def common_blockdim(blockdims):
--------
>>> common_blockdim([(3,), (2, 1)])
- set([(2, 1)])
+ (2, 1)
+ >>> common_blockdim([(1, 2), (2, 1)])
+ (1, 1, 1)
>>> common_blockdim([(2, 2), (3, 1)]) # doctest: +SKIP
Traceback (most recent call last):
...
ValueError: Chunks do not align
"""
non_trivial_dims = set([d for d in blockdims if len(d) > 1])
- if len(non_trivial_dims) > 1:
- raise ValueError('Chunks do not align %s' % non_trivial_dims)
- elif non_trivial_dims:
- return non_trivial_dims
- else:
- return blockdims
+ if len(non_trivial_dims) == 1:
+ return first(non_trivial_dims)
+ if len(non_trivial_dims) == 0:
+ return max(blockdims, key=first)
+
+ if len(set(map(sum, non_trivial_dims))) > 1:
+ raise ValueError("Chunks do not add up to same value", blockdims)
+
+ # We have multiple non-trivial chunks on this axis
+ # e.g. (5, 2) and (4, 3)
+
+ # We create a single chunk tuple with the same total length
+ # that evenly divides both, e.g. (4, 1, 2)
+
+ # To accomplish this we walk down all chunk tuples together, finding the
+ # smallest element, adding it to the output, and subtracting it from all
+ # other elements and remove the element itself. We stop once we have
+ # burned through all of the chunk tuples.
+ # For efficiency's sake we reverse the lists so that we can pop off the end
+ rchunks = [list(ntd)[::-1] for ntd in non_trivial_dims]
+ total = sum(first(non_trivial_dims))
+ i = 0
+
+ out = []
+ while i < total:
+ m = min(c[-1] for c in rchunks)
+ out.append(m)
+ for c in rchunks:
+ c[-1] -= m
+ if c[-1] == 0:
+ c.pop()
+ i += m
+
+ return tuple(out)
def unify_chunks(*args):
@@ -1766,6 +1826,12 @@ def unify_chunks(*args):
chunkss = broadcast_dimensions(nameinds, blockdim_dict,
consolidate=common_blockdim)
+ max_parts = max(arg.npartitions for arg in args[::2])
+ nparts = np.prod(list(map(len, chunkss.values())))
+
+ if nparts >= max_parts * 10:
+ warnings.warn("Increasing number of chunks by factor of %d" %
+ (nparts / max_parts))
arrays = [a.rechunk(tuple(chunkss[j] if a.shape[n] > 1 else 1
for n, j in enumerate(i)))
for a, i in arginds]
diff --git a/dask/async.py b/dask/async.py
index 930133f8f..a26611eff 100644
--- a/dask/async.py
+++ b/dask/async.py
@@ -117,6 +117,9 @@ from __future__ import absolute_import, division, print_function
import sys
import traceback
+from toolz import identity
+
+from .compatibility import Queue
from .core import (istask, flatten, reverse_dict, get_dependencies, ishashable,
has_tasks)
from .context import _globals
@@ -251,7 +254,7 @@ def _execute_task(arg, cache, dsk=None):
return arg
-def execute_task(key, task, data, queue, get_id, raise_on_exception=False):
+def execute_task(key, task_info, dumps, loads, get_id, raise_on_exception=False):
"""
Compute task and handle all administration
@@ -260,23 +263,24 @@ def execute_task(key, task, data, queue, get_id, raise_on_exception=False):
_execute_task - actually execute task
"""
try:
+ task, data = loads(task_info)
result = _execute_task(task, data)
id = get_id()
- result = key, result, None, id
- except Exception as e:
- if raise_on_exception:
- raise
- exc_type, exc_value, exc_traceback = sys.exc_info()
- tb = ''.join(traceback.format_tb(exc_traceback))
- result = key, e, tb, None
- try:
- queue.put(result)
+ result = dumps((result, None, id))
except Exception as e:
if raise_on_exception:
raise
exc_type, exc_value, exc_traceback = sys.exc_info()
tb = ''.join(traceback.format_tb(exc_traceback))
- queue.put((key, e, tb, None))
+ try:
+ result = dumps((e, tb, None))
+ except Exception as e:
+ if raise_on_exception:
+ raise
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ tb = ''.join(traceback.format_tb(exc_traceback))
+ result = dumps((e, tb, None))
+ return key, result
def release_data(key, state, delete=True):
@@ -372,8 +376,10 @@ The main function of the scheduler. Get is the main entry point.
def get_async(apply_async, num_workers, dsk, result, cache=None,
- queue=None, get_id=default_get_id, raise_on_exception=False,
- rerun_exceptions_locally=None, callbacks=None, **kwargs):
+ get_id=default_get_id, raise_on_exception=False,
+ rerun_exceptions_locally=None, callbacks=None,
+ dumps=identity, loads=identity,
+ **kwargs):
""" Asynchronous get function
This is a general version of various asynchronous schedulers for dask. It
@@ -400,6 +406,11 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
rerun_exceptions_locally : bool, optional
Whether to rerun failing tasks in local process to enable debugging
(False by default)
+ dumps: callable, optional
+ Function to serialize task data and results to communicate between
+ worker and parent. Defaults to identity.
+ loads: callable, optional
+ Inverse function of `dumps`. Defaults to identity.
callbacks : tuple or list of tuples, optional
Callbacks are passed in as tuples of length 5. Multiple sets of
callbacks may be passed in as a list of tuples. For more information,
@@ -410,7 +421,7 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
threaded.get
"""
- assert queue
+ queue = Queue()
if callbacks is None:
callbacks = _globals['callbacks']
@@ -458,8 +469,10 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
data = dict((dep, state['cache'][dep])
for dep in get_dependencies(dsk, key))
# Submit
- apply_async(execute_task, args=[key, dsk[key], data, queue,
- get_id, raise_on_exception])
+ apply_async(execute_task,
+ args=(key, dumps((dsk[key], data)),
+ dumps, loads, get_id, raise_on_exception),
+ callback=queue.put)
# Seed initial tasks into the thread pool
while state['ready'] and len(state['running']) < num_workers:
@@ -467,7 +480,14 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
# Main loop, wait on tasks to finish, insert new ones
while state['waiting'] or state['ready'] or state['running']:
- key, res, tb, worker_id = queue.get()
+ key, res_info = queue.get()
+ try:
+ res, tb, worker_id = loads(res_info)
+ except Exception:
+ for _, _, _, _, finish in callbacks:
+ if finish:
+ finish(dsk, state, True)
+ raise
if isinstance(res, Exception):
for _, _, _, _, finish in callbacks:
if finish:
@@ -483,8 +503,10 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
finish_task(dsk, key, state, results, keyorder.get)
for f in posttask_cbs:
f(key, res, dsk, state, worker_id)
+
while state['ready'] and len(state['running']) < num_workers:
fire_task()
+
except KeyboardInterrupt:
for cb in started_cbs:
if cb[-1]:
@@ -510,9 +532,11 @@ GIL
"""
-def apply_sync(func, args=(), kwds={}):
+def apply_sync(func, args=(), kwds={}, callback=None):
""" A naive synchronous version of apply_async """
- return func(*args, **kwds)
+ res = func(*args, **kwds)
+ if callback is not None:
+ callback(res)
def get_sync(dsk, keys, **kwargs):
@@ -520,10 +544,8 @@ def get_sync(dsk, keys, **kwargs):
Can be useful for debugging.
"""
- from .compatibility import Queue
kwargs.pop('num_workers', None) # if num_workers present, remove it
- queue = Queue()
- return get_async(apply_sync, 1, dsk, keys, queue=queue,
+ return get_async(apply_sync, 1, dsk, keys,
raise_on_exception=True, **kwargs)
diff --git a/dask/multiprocessing.py b/dask/multiprocessing.py
index ac7cea83b..ff9bf66d0 100644
--- a/dask/multiprocessing.py
+++ b/dask/multiprocessing.py
@@ -9,7 +9,6 @@ from .context import _globals
from .optimize import fuse, cull
import cloudpickle
-from toolz import curry
if sys.version_info.major < 3:
@@ -63,13 +62,6 @@ def get(dsk, keys, num_workers=None, func_loads=None, func_dumps=None,
else:
cleanup = False
- manager = multiprocessing.Manager()
- queue = manager.Queue()
-
- apply_async = pickle_apply_async(pool.apply_async,
- func_dumps=func_dumps,
- func_loads=func_loads)
-
# Optimize Dask
dsk2, dependencies = cull(dsk, keys)
if optimize_graph:
@@ -77,50 +69,20 @@ def get(dsk, keys, num_workers=None, func_loads=None, func_dumps=None,
else:
dsk3 = dsk2
+ # We specify marshalling functions in order to catch serialization
+ # errors and report them to the user.
+ loads = func_loads or _globals.get('func_loads') or _loads
+ dumps = func_dumps or _globals.get('func_dumps') or _dumps
+
+ # Note former versions used a multiprocessing Manager to share
+ # a Queue between parent and workers, but this is fragile on Windows
+ # (issue #1652).
try:
# Run
- result = get_async(apply_async, len(pool._pool), dsk3, keys,
- queue=queue, get_id=_process_get_id, **kwargs)
+ result = get_async(pool.apply_async, len(pool._pool), dsk3, keys,
+ get_id=_process_get_id,
+ dumps=dumps, loads=loads, **kwargs)
finally:
if cleanup:
pool.close()
return result
-
-
-def apply_func(sfunc, may_fail, wont_fail, loads=None):
- loads = loads or _globals.get('loads') or _loads
- func = loads(sfunc)
- key, queue, get_id, raise_on_exception = loads(wont_fail)
- try:
- task, data = loads(may_fail)
- except Exception as e:
- # Need a new reference for the exception, as `e` falls out of scope in
- # python 3
- exception = e
-
- def serialization_failure():
- raise exception
-
- task = (serialization_failure,)
- data = {}
-
- return func(key, task, data, queue, get_id,
- raise_on_exception=raise_on_exception)
-
-
-@curry
-def pickle_apply_async(apply_async, func, args=(),
- func_loads=None, func_dumps=None):
- # XXX: To deal with deserialization errors of tasks, this version of
- # apply_async doesn't actually match that of `pool.apply_async`. It's
- # customized to fit the signature of `dask.async.execute_task`, which is
- # the only function ever actually passed as `func`. This is a bit of a
- # hack, but it works pretty well. If the signature of `execute_task`
- # changes, then this will need to be changed as well.
- dumps = func_dumps or _globals.get('func_dumps') or _dumps
- key, task, data, queue, get_id, raise_on_exception = args
- sfunc = dumps(func)
- may_fail = dumps((task, data))
- wont_fail = dumps((key, queue, get_id, raise_on_exception))
- return apply_async(curry(apply_func, loads=func_loads),
- args=[sfunc, may_fail, wont_fail])
diff --git a/dask/threaded.py b/dask/threaded.py
index d1b96fc05..c2fffc1d8 100644
--- a/dask/threaded.py
+++ b/dask/threaded.py
@@ -11,7 +11,6 @@ import threading
from threading import current_thread, Lock
from .async import get_async
-from .compatibility import Queue
from .context import _globals
from .utils_test import inc, add # noqa: F401
@@ -65,9 +64,8 @@ def get(dsk, result, cache=None, num_workers=None, **kwargs):
pool = ThreadPool(num_workers)
pools[thread][num_workers] = pool
- queue = Queue()
results = get_async(pool.apply_async, len(pool._pool), dsk, result,
- cache=cache, queue=queue, get_id=_thread_get_id,
+ cache=cache, get_id=_thread_get_id,
**kwargs)
# Cleanup pools associated to dead threads
diff --git a/dask/utils.py b/dask/utils.py
index 9c14d3a7c..5f7f38257 100644
--- a/dask/utils.py
+++ b/dask/utils.py
@@ -118,6 +118,28 @@ def filetext(text, extension='', open=open, mode='w'):
yield filename
+@contextmanager
+def changed_cwd(new_cwd):
+ old_cwd = os.getcwd()
+ os.chdir(new_cwd)
+ try:
+ yield
+ finally:
+ os.chdir(old_cwd)
+
+
+@contextmanager
+def tmp_cwd(dir=None):
+ with tmpdir(dir) as dirname:
+ with changed_cwd(dirname):
+ yield dirname
+
+
+@contextmanager
+def noop_context():
+ yield
+
+
def repr_long_list(seq):
"""
@@ -150,27 +172,32 @@ class IndexCallable(object):
@contextmanager
-def filetexts(d, open=open, mode='t'):
+def filetexts(d, open=open, mode='t', use_tmpdir=True):
""" Dumps a number of textfiles to disk
d - dict
a mapping from filename to text like {'a.csv': '1,1\n2,2'}
+
+ Since this is meant for use in tests, this context manager will
+ automatically switch to a temporary current directory, to avoid
+ race conditions when running tests in parallel.
"""
- for filename, text in d.items():
- f = open(filename, 'w' + mode)
- try:
- f.write(text)
- finally:
+ with (tmp_cwd() if use_tmpdir else noop_context()):
+ for filename, text in d.items():
+ f = open(filename, 'w' + mode)
try:
- f.close()
- except AttributeError:
- pass
-
- yield list(d)
-
- for filename in d:
- if os.path.exists(filename):
- os.remove(filename)
+ f.write(text)
+ finally:
+ try:
+ f.close()
+ except AttributeError:
+ pass
+
+ yield list(d)
+
+ for filename in d:
+ if os.path.exists(filename):
+ os.remove(filename)
compressions = {'gz': 'gzip', 'bz2': 'bz2', 'xz': 'xz'}
| Test failures when parallel testing
This is on Linux. To reproduce: install the xdist plugin with pip (see https://pytest.org/dev/xdist.html#installation-of-xdist-plugin) then run something like `PYTHONHASHSEED=1 py.test -n=auto --tb=short dask`.
These are the failures I get here:
```
_______________________________________________________________ test_read_text[xz-None-ascii1] ________________________________________________________________
[gw0] linux -- Python 3.5.2 /home/antoine/miniconda3/envs/dask35/bin/python
dask/bag/tests/test_text.py:41: in test_read_text
L, = compute(b)
dask/base.py:171: in compute
results = get(dsk, keys, **kwargs)
dask/async.py:549: in get_sync
raise_on_exception=True, **kwargs)
dask/async.py:478: in get_async
fire_task()
dask/async.py:474: in fire_task
callback=queue.put)
dask/async.py:535: in apply_sync
res = func(*args, **kwds)
dask/async.py:266: in execute_task
result = _execute_task(task, data)
dask/async.py:247: in _execute_task
return func(*args2)
../miniconda3/envs/dask35/lib/python3.5/lzma.py:210: in read1
return self._buffer.read1(size)
../miniconda3/envs/dask35/lib/python3.5/_compression.py:68: in readinto
data = self.read(len(byte_view))
../miniconda3/envs/dask35/lib/python3.5/_compression.py:103: in read
data = self._decompressor.decompress(rawblock, size)
E _lzma.LZMAError: Input format not supported by decoder
_______________________________________________________________________ test_read_bytes _______________________________________________________________________
[gw0] linux -- Python 3.5.2 /home/antoine/miniconda3/envs/dask35/bin/python
dask/bytes/tests/test_local.py:40: in test_read_bytes
results = compute(*concat(values))
dask/base.py:171: in compute
results = get(dsk, keys, **kwargs)
dask/async.py:549: in get_sync
raise_on_exception=True, **kwargs)
dask/async.py:478: in get_async
fire_task()
dask/async.py:474: in fire_task
callback=queue.put)
dask/async.py:535: in apply_sync
res = func(*args, **kwds)
dask/async.py:266: in execute_task
result = _execute_task(task, data)
dask/async.py:247: in _execute_task
return func(*args2)
dask/bytes/local.py:74: in read_block_from_file
with open(path, 'rb') as f:
E FileNotFoundError: [Errno 2] No such file or directory: '/home/antoine/dask/.test.accounts.2.json'
```
| dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index 887abac6e..c852b3cd2 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -8,6 +8,7 @@ import time
from distutils.version import LooseVersion
from operator import add, sub, getitem
from threading import Lock
+import warnings
from toolz import merge, countby, concat
from toolz.curried import identity
@@ -29,7 +30,8 @@ from dask.array.core import (getem, getarray, getarray_nofancy, top, dotmany,
broadcast_to, reshape, fromfunction,
blockdims_from_blockshape, store, optimize,
from_func, normalize_chunks, broadcast_chunks,
- atop, from_delayed, concatenate_axes)
+ atop, from_delayed, concatenate_axes,
+ common_blockdim)
from dask.array.utils import assert_eq
# temporary until numpy functions migrated
@@ -780,8 +782,8 @@ def test_map_blocks2():
x = np.arange(10, dtype='i8')
d = from_array(x, chunks=(2,))
- def func(block, block_id=None):
- return np.ones_like(block) * sum(block_id)
+ def func(block, block_id=None, c=0):
+ return np.ones_like(block) * sum(block_id) + c
out = d.map_blocks(func, dtype='i8')
expected = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4], dtype='i8')
@@ -789,6 +791,12 @@ def test_map_blocks2():
assert_eq(out, expected)
assert same_keys(d.map_blocks(func, dtype='i8'), out)
+ out = d.map_blocks(func, dtype='i8', c=1)
+ expected = expected + 1
+
+ assert_eq(out, expected)
+ assert same_keys(d.map_blocks(func, dtype='i8', c=1), out)
+
def test_map_blocks_with_constants():
d = da.arange(10, chunks=3)
@@ -1852,15 +1860,38 @@ def test_map_blocks_with_changed_dimension():
e = d.map_blocks(lambda b: b.sum(axis=0), chunks=(4,), drop_axis=0,
dtype=d.dtype)
- assert e.ndim == 1
assert e.chunks == ((4, 4),)
assert_eq(e, x.sum(axis=0))
+ # Provided chunks have wrong shape
+ with pytest.raises(ValueError):
+ d.map_blocks(lambda b: b.sum(axis=0), chunks=(7, 4), drop_axis=0)
+
+ with pytest.raises(ValueError):
+ d.map_blocks(lambda b: b.sum(axis=0), chunks=((4, 4, 4),), drop_axis=0)
+
+ # Can't drop axis with more than 1 block
+ with pytest.raises(ValueError):
+ d.map_blocks(lambda b: b.sum(axis=1), drop_axis=1, dtype=d.dtype)
+
+ # Can't use both drop_axis and new_axis
+ with pytest.raises(ValueError):
+ d.map_blocks(lambda b: b, drop_axis=1, new_axis=1)
+
+ d = da.from_array(x, chunks=(4, 8))
+ e = d.map_blocks(lambda b: b.sum(axis=1), drop_axis=1, dtype=d.dtype)
+ assert e.chunks == ((4, 3),)
+ assert_eq(e, x.sum(axis=1))
+
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = d.map_blocks(lambda b: b[None, :, :, None],
chunks=(1, 4, 4, 1), new_axis=[0, 3], dtype=d.dtype)
- assert e.ndim == 4
+ assert e.chunks == ((1,), (4, 4), (4, 4), (1,))
+ assert_eq(e, x[None, :, :, None])
+
+ e = d.map_blocks(lambda b: b[None, :, :, None],
+ new_axis=[0, 3], dtype=d.dtype)
assert e.chunks == ((1,), (4, 4), (4, 4), (1,))
assert_eq(e, x[None, :, :, None])
@@ -2235,3 +2266,61 @@ def test_atop_concatenate():
z = atop(f, 'j', x, 'ijk', y, 'ki', y, 'ij', concatenate=True,
dtype=x._dtype)
assert_eq(z, np.ones(10))
+
+
+def test_common_blockdim():
+ assert common_blockdim([(5,), (5,)]) == (5,)
+ assert common_blockdim([(5,), (2, 3,)]) == (2, 3)
+ assert common_blockdim([(5, 5), (2, 3, 5)]) == (2, 3, 5)
+ assert common_blockdim([(5, 5), (2, 3, 5)]) == (2, 3, 5)
+ assert common_blockdim([(5, 2, 3), (2, 3, 5)]) == (2, 3, 2, 3)
+
+ assert common_blockdim([(1, 2), (2, 1)]) == (1, 1, 1)
+ assert common_blockdim([(1, 2, 2), (2, 1, 2), (2, 2, 1)]) == (1, 1, 1, 1, 1)
+
+
+def test_uneven_chunks_that_fit_neatly():
+ x = da.arange(10, chunks=((5, 5),))
+ y = da.ones(10, chunks=((5, 2, 3),))
+
+ assert_eq(x + y, np.arange(10) + np.ones(10))
+
+ z = x + y
+ assert z.chunks == ((5, 2, 3),)
+
+
+def test_elemwise_uneven_chunks():
+ x = da.arange(10, chunks=((4, 6),))
+ y = da.ones(10, chunks=((6, 4),))
+
+ assert_eq(x + y, np.arange(10) + np.ones(10))
+
+ z = x + y
+ assert z.chunks == ((4, 2, 4),)
+
+ x = da.random.random((10, 10), chunks=((4, 6), (5, 2, 3)))
+ y = da.random.random((4, 10, 10), chunks=((2, 2), (6, 4), (2, 3, 5)))
+
+ z = x + y
+ assert_eq(x + y, x.compute() + y.compute())
+ assert z.chunks == ((2, 2), (4, 2, 4), (2, 3, 2, 3))
+
+
+def test_uneven_chunks_atop():
+ x = da.random.random((10, 10), chunks=((2, 3, 2, 3), (5, 5)))
+ y = da.random.random((10, 10), chunks=((4, 4, 2), (4, 2, 4)))
+ z = atop(np.dot, 'ik', x, 'ij', y, 'jk', dtype=x._dtype, concatenate=True)
+ assert z.chunks == (x.chunks[0], y.chunks[1])
+
+ assert_eq(z, x.compute().dot(y))
+
+
+def test_warn_bad_rechunking():
+ x = da.ones((20, 20), chunks=(20, 1))
+ y = da.ones((20, 20), chunks=(1, 20))
+
+ with warnings.catch_warnings(record=True) as record:
+ x + y
+
+ assert record
+ assert '20' in record[0].message.args[0]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 6
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@f14ec6a4ec0d4fb9ff2499ac53d0df4116465c8a#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.13.3
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- distributed==1.13.3
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_common_blockdim",
"dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly",
"dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks",
"dask/array/tests/test_array_core.py::test_uneven_chunks_atop",
"dask/array/tests/test_array_core.py::test_warn_bad_rechunking"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_reshape",
"dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_to_hdf5",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_clip",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_digitize",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_h5py_newaxis",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_h5py_tokenize",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_new_axes",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_array_picklable",
"dask/array/tests/test_array_core.py::test_swapaxes",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_array_core.py::test_concatenate_axes",
"dask/array/tests/test_array_core.py::test_atop_concatenate"
]
| []
| BSD 3-Clause "New" or "Revised" License | 813 | [
"dask/multiprocessing.py",
".travis.yml",
"dask/threaded.py",
"dask/array/core.py",
"dask/async.py",
"dask/utils.py"
]
| [
"dask/multiprocessing.py",
".travis.yml",
"dask/threaded.py",
"dask/array/core.py",
"dask/async.py",
"dask/utils.py"
]
|
|
zalando-stups__senza-397 | 7d3726dec5badf48bab03bcee60eee43281b512c | 2016-10-14 14:42:48 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/subcommands/root.py b/senza/subcommands/root.py
index a121009..e163658 100644
--- a/senza/subcommands/root.py
+++ b/senza/subcommands/root.py
@@ -99,9 +99,9 @@ def check_senza_version(current_version: str):
if latest_version is not None and current_version < latest_version:
if __file__.startswith('/home'):
# if it's installed in the user folder
- cmd = "pip install --upgrade stups-senza"
+ cmd = "pip3 install --upgrade stups-senza"
else:
- cmd = "sudo pip install --upgrade stups-senza"
+ cmd = "sudo pip3 install --upgrade stups-senza"
warning("Your senza version ({current}) is outdated. "
"Please install the new one using '{cmd}'".format(current=current_version,
cmd=cmd))
| Discrepancy between README and error messages
README.md states:
`sudo pip3 install --upgrade stups-senza`
But if you have an old version of senza installed, the reported message is:
`...Please install the new one using 'sudo pip install --upgrade stups-senza'`
Note that `pip3` is specified in README and `pip` is specified in the error message.
| zalando-stups/senza | diff --git a/tests/test_subcommands/test_root.py b/tests/test_subcommands/test_root.py
index 86f5a4f..c16be12 100644
--- a/tests/test_subcommands/test_root.py
+++ b/tests/test_subcommands/test_root.py
@@ -71,7 +71,7 @@ def test_check_senza_version(monkeypatch,
check_senza_version("0.40")
mock_warning.assert_called_once_with(
"Your senza version (0.40) is outdated. "
- "Please install the new one using 'pip install --upgrade stups-senza'"
+ "Please install the new one using 'pip3 install --upgrade stups-senza'"
)
with TemporaryDirectory() as temp_dir_4:
@@ -83,7 +83,7 @@ def test_check_senza_version(monkeypatch,
mock_warning.assert_called_once_with(
"Your senza version (0.40) is outdated. "
"Please install the new one using "
- "'sudo pip install --upgrade stups-senza'"
+ "'sudo pip3 install --upgrade stups-senza'"
)
@@ -115,7 +115,7 @@ def test_check_senza_version_outdated_cache(monkeypatch, # noqa: F811
mock_warning.assert_called_once_with(
"Your senza version (0.40) is outdated. "
"Please install the new one using "
- "'sudo pip install --upgrade stups-senza'"
+ "'sudo pip3 install --upgrade stups-senza'"
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
raven==6.10.0
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@7d3726dec5badf48bab03bcee60eee43281b512c#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- raven==6.10.0
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_subcommands/test_root.py::test_check_senza_version",
"tests/test_subcommands/test_root.py::test_check_senza_version_outdated_cache"
]
| []
| [
"tests/test_subcommands/test_root.py::test_check_senza_version_notty",
"tests/test_subcommands/test_root.py::test_check_senza_version_timeout",
"tests/test_subcommands/test_root.py::test_check_senza_version_exception",
"tests/test_subcommands/test_root.py::test_version"
]
| []
| Apache License 2.0 | 814 | [
"senza/subcommands/root.py"
]
| [
"senza/subcommands/root.py"
]
|
|
laterpay__laterpay-client-python-87 | 67ebeb9dac24e5fe59d65ca4986c97aa38cfe6b4 | 2016-10-14 17:44:04 | 67ebeb9dac24e5fe59d65ca4986c97aa38cfe6b4 | coveralls:
[](https://coveralls.io/builds/8342487)
Coverage increased (+0.5%) to 97.285% when pulling **4c62c987296b09ad5a637f32fa245ccbe67e03e3 on feature/remove-deprecation** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
coveralls:
[](https://coveralls.io/builds/8623418)
Coverage increased (+0.5%) to 97.285% when pulling **a57e5e326075d000775e74d51cfad2d885020032 on feature/remove-deprecation** into **a4738be03b9cc0680d24466b7eefe5fdde6b7d2d on develop**.
coveralls:
[](https://coveralls.io/builds/8743835)
Coverage increased (+0.5%) to 97.285% when pulling **992f34dd28f22e9da3f3fe77636e9ea94e7b76ad on feature/remove-deprecation** into **e2863d34ea369cd4a11ae624014724cda6bae04a on develop**.
coveralls:
[](https://coveralls.io/builds/8747170)
Coverage increased (+0.5%) to 97.285% when pulling **4d4bbb201bdc9cf9d85397b004f0398d66b70dea on feature/remove-deprecation** into **e2863d34ea369cd4a11ae624014724cda6bae04a on develop**.
coveralls:
[](https://coveralls.io/builds/8747170)
Coverage increased (+0.5%) to 97.285% when pulling **4d4bbb201bdc9cf9d85397b004f0398d66b70dea on feature/remove-deprecation** into **e2863d34ea369cd4a11ae624014724cda6bae04a on develop**.
coveralls:
[](https://coveralls.io/builds/8747170)
Coverage increased (+0.5%) to 97.285% when pulling **4d4bbb201bdc9cf9d85397b004f0398d66b70dea on feature/remove-deprecation** into **e2863d34ea369cd4a11ae624014724cda6bae04a on develop**.
coveralls:
[](https://coveralls.io/builds/8747403)
Coverage increased (+0.5%) to 97.285% when pulling **d0cc11830027acca25b9bff8ca4867ad7aa23d2d on feature/remove-deprecation** into **67ebeb9dac24e5fe59d65ca4986c97aa38cfe6b4 on develop**.
coveralls:
[](https://coveralls.io/builds/8809919)
Coverage increased (+0.5%) to 97.285% when pulling **3801fb17a5cc69742cd935b7d2db4f3a3ba51ecd on feature/remove-deprecation** into **67ebeb9dac24e5fe59d65ca4986c97aa38cfe6b4 on develop**.
| diff --git a/CHANGELOG.md b/CHANGELOG.md
index ee7351b..55bab53 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,30 @@
# Changelog
+## 5.0.0 (under development)
+
+* Removed the following long deprecated methods from the
+ `laterpay.LaterPayClient`:
+
+ * `get_access()`, use `get_access_data()` instead
+ * `get_iframeapi_balance_url()`, us `get_controls_balance_url()` instead
+ * `get_iframeapi_links_url()`, us `get_controls_links_url()` instead
+ * `get_identify_url()` is not needed following our modern access control
+ checks
+
+* Removed the following deprecated arguments from `laterpay.LaterPayClient`
+ methods:
+
+ * `use_dialog_api` from `get_login_dialog_url()`
+ * `use_dialog_api` from `get_signup_dialog_url()`
+ * `use_dialog_api` from `get_logout_dialog_url()`
+
+* Removed the following public methods from `laterpay.signing`:
+
+ * `sign_and_encode()` in favor of `laterpay.utils.signed_query()`
+ * `sign_get_url()` in favor of `laterpay.utils.signed_url()`
+
+* Removed the deprecated `cp` argument from `laterpay.ItemDefinition`
+
## 4.6.0
diff --git a/laterpay/__init__.py b/laterpay/__init__.py
index 458c82b..c3bbbf8 100644
--- a/laterpay/__init__.py
+++ b/laterpay/__init__.py
@@ -9,20 +9,17 @@ http://docs.laterpay.net/
from __future__ import absolute_import, print_function
-import json
import logging
import pkg_resources
import random
import re
import string
import time
-import warnings
import requests
import six
from six.moves.urllib.parse import quote_plus
-from six.moves.urllib.request import Request, urlopen
from . import signing, utils
@@ -64,7 +61,7 @@ class ItemDefinition(object):
For Single item purchases: http://docs.laterpay.net/platform/dialogs/buy/
"""
- def __init__(self, item_id, pricing, url, title, cp=None, expiry=None):
+ def __init__(self, item_id, pricing, url, title, expiry=None):
for price in pricing.split(','):
if not re.match('[A-Z]{3}\d+', price):
@@ -74,9 +71,6 @@ class ItemDefinition(object):
raise InvalidItemDefinition("Invalid expiry value %s, it should be '+3600' or UTC-based "
"epoch timestamp in seconds of type int" % expiry)
- if cp is not None: # pragma: no cover
- warnings.warn("ItemDefinition's cp parameter is deprecated and will be ignored.", DeprecationWarning)
-
self.data = {
'article_id': item_id,
'pricing': pricing,
@@ -126,43 +120,6 @@ class LaterPayClient(object):
}
return utils.signed_url(self.shared_secret, data, url, method='GET')
- def get_identify_url(self, identify_callback=None): # pragma: no cover
- """
- Deprecated.
- """
- warnings.warn(
- "LaterPayClient.get_identify_url() is deprecated "
- "and will be removed in a future release.",
- DeprecationWarning,
- )
-
- base_url = self._identify_url
- data = {'cp': self.cp_key}
-
- if identify_callback is not None:
- data['callback_url'] = identify_callback
-
- params = self._sign_and_encode(data, url=base_url, method="GET")
- url = '%s?%s' % (base_url, params)
-
- return url
-
- def get_iframeapi_links_url(self,
- next_url,
- css_url=None,
- forcelang=None,
- show_greeting=False,
- show_long_greeting=False,
- show_login=False,
- show_signup=False,
- show_long_signup=False,
- use_jsevents=False): # pragma: no cover
- """Deprecated, see get_controls_links_url."""
- warnings.warn("get_iframe_links_url is deprecated. Please use get_controls_links_url. "
- "It will be removed on a future release.", DeprecationWarning)
- return self.get_controls_links_url(next_url, css_url, forcelang, show_greeting, show_long_greeting,
- show_login, show_signup, show_long_signup, use_jsevents)
-
def get_controls_links_url(self,
next_url,
css_url=None,
@@ -203,12 +160,6 @@ class LaterPayClient(object):
return utils.signed_url(self.shared_secret, data, url, method='GET')
- def get_iframeapi_balance_url(self, forcelang=None): # pragma: no cover
- """Deprecated, see get_controls_balance_url."""
- warnings.warn("get_iframe_balance_url is deprecated. Please use get_controls_balance_url. "
- "It will be removed on a future release.", DeprecationWarning)
- return self.get_controls_balance_url(forcelang)
-
def get_controls_balance_url(self, forcelang=None):
"""
Get the URL for an iframe showing the user's invoice balance.
@@ -224,63 +175,40 @@ class LaterPayClient(object):
return utils.signed_url(self.shared_secret, data, base_url, method='GET')
- def _get_dialog_api_url(self, url):
- return '%s/dialog-api?url=%s' % (self.web_root, quote_plus(url))
-
- def get_login_dialog_url(self, next_url, use_jsevents=False, use_dialog_api=True):
+ def get_login_dialog_url(self, next_url, use_jsevents=False):
"""Get the URL for a login page."""
- url = '%s/account/dialog/login?next=%s%s%s' % (self.web_root, quote_plus(next_url),
- "&jsevents=1" if use_jsevents else "",
- "&cp=%s" % self.cp_key)
- if use_dialog_api:
- warnings.warn("The Dialog API Wrapper is deprecated and no longer recommended. "
- "Please set use_dialog_api to False when calling get_login_dialog_url. "
- "Future releases will not use the Dialog API Wrapper by default. "
- "See http://docs.laterpay.net/platform/dialogs/third_party_cookies/",
- DeprecationWarning)
- return self._get_dialog_api_url(url)
+ url = '%s/account/dialog/login?next=%s%s%s' % (
+ self.web_root,
+ quote_plus(next_url),
+ "&jsevents=1" if use_jsevents else "",
+ "&cp=%s" % self.cp_key,
+ )
return url
- def get_signup_dialog_url(self, next_url, use_jsevents=False, use_dialog_api=True):
+ def get_signup_dialog_url(self, next_url, use_jsevents=False):
"""Get the URL for a signup page."""
- url = '%s/account/dialog/signup?next=%s%s%s' % (self.web_root, quote_plus(next_url),
- "&jsevents=1" if use_jsevents else "",
- "&cp=%s" % self.cp_key)
- if use_dialog_api:
- warnings.warn("The Dialog API Wrapper is deprecated and no longer recommended. "
- "Please set use_dialog_api to False when calling get_signup_dialog_url. "
- "Future releases will not use the Dialog API Wrapper by default. "
- "See http://docs.laterpay.net/platform/dialogs/third_party_cookies/",
- DeprecationWarning)
- return self._get_dialog_api_url(url)
+ url = '%s/account/dialog/signup?next=%s%s%s' % (
+ self.web_root,
+ quote_plus(next_url),
+ "&jsevents=1" if use_jsevents else "",
+ "&cp=%s" % self.cp_key,
+ )
return url
- def get_logout_dialog_url(self, next_url, use_jsevents=False, use_dialog_api=True):
+ def get_logout_dialog_url(self, next_url, use_jsevents=False):
"""Get the URL for a logout page."""
- url = '%s/account/dialog/logout?next=%s%s%s' % (self.web_root, quote_plus(next_url),
- "&jsevents=1" if use_jsevents else "",
- "&cp=%s" % self.cp_key)
- if use_dialog_api:
- warnings.warn("The Dialog API Wrapper is deprecated and no longer recommended. "
- "Please set use_dialog_api to False when calling get_logout_dialog_url. "
- "Future releases will not use the Dialog API Wrapper by default. "
- "See http://docs.laterpay.net/platform/dialogs/third_party_cookies/",
- DeprecationWarning)
- return self._get_dialog_api_url(url)
+ url = '%s/account/dialog/logout?next=%s%s%s' % (
+ self.web_root,
+ quote_plus(next_url),
+ "&jsevents=1" if use_jsevents else "",
+ "&cp=%s" % self.cp_key,
+ )
return url
@property
def _access_url(self):
return '%s/access' % self.api_root
- @property
- def _add_url(self):
- return '%s/add' % self.api_root
-
- @property
- def _identify_url(self):
- return '%s/identify' % self.api_root
-
@property
def _gettoken_url(self):
return '%s/gettoken' % self.api_root
@@ -291,12 +219,10 @@ class LaterPayClient(object):
product_key=None,
dialog=True,
use_jsevents=False,
- skip_add_to_invoice=False,
transaction_reference=None,
consumable=False,
return_url=None,
failure_url=None,
- use_dialog_api=True,
**kwargs):
# filter out params with None value.
@@ -322,10 +248,6 @@ class LaterPayClient(object):
data['tref'] = transaction_reference
- if skip_add_to_invoice:
- warnings.warn('The param skip_add_to_invoice is deprecated and it '
- 'will be removed in a future release.', DeprecationWarning)
-
if dialog:
prefix = '%s/%s' % (self.web_root, 'dialog')
else:
@@ -338,16 +260,7 @@ class LaterPayClient(object):
data.update(kwargs)
- url = utils.signed_url(self.shared_secret, data, base_url, method='GET')
-
- if use_dialog_api:
- warnings.warn("The Dialog API Wrapper is deprecated and no longer recommended. "
- "Please set use_dialog_api to False when calling get_buy_url or get_add_url. "
- "Future releases will not use the Dialog API Wrapper by default. "
- "See http://docs.laterpay.net/platform/dialogs/third_party_cookies/",
- DeprecationWarning)
- return self._get_dialog_api_url(url)
- return url
+ return utils.signed_url(self.shared_secret, data, base_url, method='GET')
def get_buy_url(self,
item_definition,
@@ -359,7 +272,6 @@ class LaterPayClient(object):
consumable=False,
return_url=None,
failure_url=None,
- use_dialog_api=True,
**kwargs):
"""
Get the URL at which a user can start the checkout process to buy a single item.
@@ -377,7 +289,6 @@ class LaterPayClient(object):
consumable=consumable,
return_url=return_url,
failure_url=failure_url,
- use_dialog_api=use_dialog_api,
**kwargs)
def get_add_url(self,
@@ -390,7 +301,6 @@ class LaterPayClient(object):
consumable=False,
return_url=None,
failure_url=None,
- use_dialog_api=True,
**kwargs):
"""
Get the URL at which a user can add an item to their invoice to pay later.
@@ -408,50 +318,8 @@ class LaterPayClient(object):
consumable=consumable,
return_url=return_url,
failure_url=failure_url,
- use_dialog_api=use_dialog_api,
**kwargs)
- def _sign_and_encode(self, params, url, method="GET"):
- return utils.signed_query(self.shared_secret, params, url=url, method=method)
-
- def _make_request(self, url, params, method='GET'): # pragma: no cover
- """
- Deprecated.
-
- Used by deprecated ``get_access()`` only.
- """
- params = self._sign_and_encode(params=params, url=url, method=method)
-
- headers = self.get_request_headers()
-
- if method == 'POST':
- req = Request(url, data=params, headers=headers)
- else:
- url = "%s?%s" % (url, params)
- req = Request(url, headers=headers)
-
- _logger.debug("Making request to %s", url)
-
- try:
- response = urlopen(req, timeout=self.timeout_seconds).read()
- except:
- # TODO: Add proper or no exception handling.
- # Pretending there was a response even if there was none
- # (can't connect / timeout) seems like a wrong idea.
- _logger.exception("Unexpected error with request")
- resp = {'status': 'unexpected error'}
- else:
- _logger.debug("Received response %s", response)
- resp = json.loads(response.decode())
-
- if 'new_token' in resp:
- self.lptoken = resp['new_token']
-
- if resp.get('status', None) == 'invalid_token':
- self.lptoken = None
-
- return resp
-
def has_token(self):
"""
Do we have an identifier token.
@@ -460,42 +328,6 @@ class LaterPayClient(object):
"""
return self.lptoken is not None
- def get_access(self, article_ids, product_key=None): # pragma: no cover
- """
- Deprecated. Consider using ``.get_access_data()`` instead.
-
- Get access data for a set of article ids.
-
- http://docs.laterpay.net/platform/access/access/
- """
- warnings.warn(
- "LaterPayClient.get_access() is deprecated "
- "and will be removed in a future release. "
- "Consider using ``.get_access_data()`` instead.",
- DeprecationWarning,
- )
-
- if not isinstance(article_ids, (list, tuple)):
- article_ids = [article_ids]
-
- params = {
- 'lptoken': self.lptoken,
- 'cp': self.cp_key,
- 'article_id': article_ids
- }
-
- if product_key is not None:
- params['product'] = product_key
-
- data = self._make_request(self._access_url, params)
-
- allowed_statuses = ['ok', 'invalid_token', 'connection_error']
-
- if data['status'] not in allowed_statuses:
- raise Exception(data['status'])
-
- return data
-
def get_request_headers(self):
"""
Return a ``dict`` of request headers to be sent to the API.
diff --git a/laterpay/signing.py b/laterpay/signing.py
index 347aa31..46875b8 100644
--- a/laterpay/signing.py
+++ b/laterpay/signing.py
@@ -3,11 +3,9 @@ from __future__ import absolute_import, print_function
import hashlib
import hmac
-import time
-import warnings
import six
-from six.moves.urllib.parse import parse_qsl, quote, urlencode, urlparse
+from six.moves.urllib.parse import quote, urlparse
from . import compat
@@ -190,99 +188,3 @@ def verify(signature, secret, params, url, method):
mac = sign(secret, params, url, method)
return time_independent_HMAC_compare(signature, mac)
-
-
-def sign_and_encode(secret, params, url, method="GET"): # pragma: no cover
- """
- Deprecated. Consider using ``laterpay.utils.signed_query()`` instead.
-
- Sign and encode a URL ``url`` with a ``secret`` key called via an HTTP ``method``.
-
- It adds the signature to the URL
- as the URL parameter "hmac" and also adds the required timestamp parameter "ts" if it's not already
- in the ``params`` dictionary. ``unicode()`` instances in params are handled correctly.
-
- :param secret: The shared secret as a hex-encoded string
- :param params: A dictionary of URL parameters. Each key can resolve to a
- single value string or a multi-string list.
- :param url: The URL being called
- :param method: An uppercase string representation of the HTTP method being
- used for the call (e.g. "GET", "POST")
- :return: A signed and correctly encoded URL
- """
- warnings.warn(
- "sign_and_encode is deprecated. It will be removed in a future release. "
- "Consider using ``laterpay.utils.signed_query()`` instead.",
- DeprecationWarning,
- )
-
- if 'ts' not in params:
- params['ts'] = str(int(time.time()))
-
- if 'hmac' in params:
- params.pop('hmac')
-
- sorted_data = []
- for k, v in sort_params(params):
- k = compat.encode_if_unicode(k)
- value = compat.encode_if_unicode(v)
- sorted_data.append((k, value))
-
- encoded = urlencode(sorted_data)
- hmac = sign(secret, params, url=url, method=method)
-
- return "%s&hmac=%s" % (encoded, hmac)
-
-
-def sign_get_url(secret, url, signature_paramname="hmac"): # pragma: no cover
- """
- Deprecated.
-
- Sign a URL to be GET-ed.
-
- This function takes a URL, parses it, sorts the URL parameters in
- alphabetical order, concatenates them with the character "&" inbetween and
- subsequently creates an HMAC using the secret key in ``hmac_key``.
-
- It then appends the signature in hex encoding in its own URL parameter,
- specified by ``signature_paramname`` and returns the resulting URL.
-
- This function is used for redirecting back to the merchant's page after a
- call to /identify or /gettoken
-
- :param secret: the secret key used to sign the URL
- :type secret: str
- :param url: the URL to sign
- :type url: str
- :param signature_paramname: the parameter name to append to ``url`` that
- will contain the signature (default: "hmac")
- :type signature_paramname: str
- :returns: ``str`` -- the URL, including the signature as an URL parameter
- """
- warnings.warn(
- "sign_get_url is deprecated. It will be removed in a future release. "
- "It wasn't intended for public use. It's recommended to use the core "
- "signing API which is sign() and verify().",
- DeprecationWarning,
- )
-
- parsed = urlparse(url)
-
- if parsed.query != "":
- # use parse_qsl, because .parse_qs seems to create problems
- # with urlencode()
- qs = parse_qsl(parsed.query, keep_blank_values=True)
-
- # create string to sign
-
- # .sort() will sort in alphabetical order
- qs.append(("ts", str(int(time.time()))))
- qs.sort()
-
- hmac = sign(str(secret), qs, url, method="GET")
-
- qs.append((signature_paramname, hmac))
- return parsed.scheme + "://" + parsed.netloc + parsed.path + \
- parsed.params + "?" + urlencode(qs) + parsed.fragment
-
- return None
| Actually remove deprecated code
We raise a couple of deprecation warnings for a long time now. We should actually clean up the code base and get rid of those function.
| laterpay/laterpay-client-python | diff --git a/tests/test_client.py b/tests/test_client.py
index f3069e7..bc01797 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -47,18 +47,8 @@ class TestLaterPayClient(unittest.TestCase):
'some-secret')
self.item = ItemDefinition(1, 'EUR20', 'http://example.com/', 'title')
- def get_qs_dict(self, url):
- o = urlparse(url)
- d = parse_qs(o.query)
- o = urlparse(d['url'][0])
- d = parse_qs(o.query)
- return d
-
- def get_dialog_api_furl(self, url):
- return furl(furl(url).query.params['url'])
-
def assertQueryString(self, url, key, value=None):
- d = self.get_qs_dict(url)
+ d = parse_qs(urlparse(url).query)
if not value:
return (key in d)
return d.get(key, None) == value
@@ -84,7 +74,7 @@ class TestLaterPayClient(unittest.TestCase):
dialog=True,
use_jsevents=True)
- d = self.get_qs_dict(url)
+ d = parse_qs(urlparse(url).query)
self.assertFalse('tref' in d)
with self.assertRaises(APIException):
@@ -123,16 +113,6 @@ class TestLaterPayClient(unittest.TestCase):
qd = parse_qs(urlparse(url).query)
self.assertEqual(qd['something_more'], ['x', 'y'])
- @mock.patch('laterpay.warnings.warn')
- def test_log_warning_for_skip_add_to_invoice_deprecation(self, warning_mock):
- item = ItemDefinition(1, 'EUR20', 'http://help.me/', 'title')
- self.lp.get_add_url(item, skip_add_to_invoice=True,
- use_dialog_api=False)
- warning_mock.assert_called_once_with("The param skip_add_to_invoice is "
- "deprecated and it will be removed "
- "in a future release.",
- DeprecationWarning)
-
def test_failure_url_param(self):
item = ItemDefinition(1, 'EUR20', 'http://help.me/', 'title')
url = self.lp.get_add_url(item, failure_url="http://example.com")
@@ -147,10 +127,10 @@ class TestLaterPayClient(unittest.TestCase):
`product_key` "product" query param.
"""
url = self.lp.get_add_url(self.item, product_key="hopes")
- data = self.get_qs_dict(url)
+ data = parse_qs(urlparse(url).query)
self.assertEqual(data['product'], ['hopes'])
self.assertEqual(
- str(self.get_dialog_api_furl(url).path),
+ str(furl(url).path),
'/dialog/add',
)
@@ -160,10 +140,10 @@ class TestLaterPayClient(unittest.TestCase):
`product_key` "product" query param.
"""
url = self.lp.get_buy_url(self.item, product_key="hopes")
- data = self.get_qs_dict(url)
+ data = parse_qs(urlparse(url).query)
self.assertEqual(data['product'], ['hopes'])
self.assertEqual(
- str(self.get_dialog_api_furl(url).path),
+ str(furl(url).path),
'/dialog/buy',
)
@@ -173,10 +153,10 @@ class TestLaterPayClient(unittest.TestCase):
"product" query param when no `product_key` method param is used.
"""
url = self.lp.get_add_url(self.item)
- data = self.get_qs_dict(url)
+ data = parse_qs(urlparse(url).query)
self.assertNotIn('product', data)
self.assertEqual(
- str(self.get_dialog_api_furl(url).path),
+ str(furl(url).path),
'/dialog/add',
)
@@ -186,79 +166,32 @@ class TestLaterPayClient(unittest.TestCase):
"product" query param when no `product_key` method param is used.
"""
url = self.lp.get_buy_url(self.item)
- data = self.get_qs_dict(url)
+ data = parse_qs(urlparse(url).query)
self.assertNotIn('product', data)
self.assertEqual(
- str(self.get_dialog_api_furl(url).path),
+ str(furl(url).path),
'/dialog/buy',
)
- def test_get_buy_url_with_use_dialog_api_false(self):
- """
- Assert that `.get_buy_url()` returns a direct buy url, with no
- dialog-api iframe, when `use_dialog_api=False`
- """
- url = self.lp.get_buy_url(self.item, use_dialog_api=False)
+ def test_get_buy_url(self):
+ url = self.lp.get_buy_url(self.item)
self.assertEqual(str(furl(url).path), '/dialog/buy')
- def test_get_add_url_with_use_dialog_api_false(self):
- """
- Assert that `.get_add_url()` returns a direct add url, with no
- dialog-api iframe, when `use_dialog_api=False`
- """
- url = self.lp.get_add_url(self.item, use_dialog_api=False)
+ def test_get_add_url(self):
+ url = self.lp.get_add_url(self.item)
self.assertEqual(str(furl(url).path), '/dialog/add')
def test_get_login_dialog_url_with_use_dialog_api_false(self):
- """
- Assert that `.get_login_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api=False`
- """
- url = self.lp.get_login_dialog_url('http://example.org',
- use_dialog_api=False)
- self.assertEqual(str(furl(url).path), '/account/dialog/login')
-
- def test_get_login_dialog_url_without_use_dialog_api(self):
- """
- Assert that `.get_login_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api` is not set (default)
- """
url = self.lp.get_login_dialog_url('http://example.org')
- self.assertEqual(str(furl(url).path), '/dialog-api')
+ self.assertEqual(str(furl(url).path), '/account/dialog/login')
def test_get_logout_dialog_url_with_use_dialog_api_false(self):
- """
- Assert that `.get_logout_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api=False`
- """
- url = self.lp.get_logout_dialog_url('http://example.org',
- use_dialog_api=False)
- self.assertEqual(str(furl(url).path), '/account/dialog/logout')
-
- def test_get_logout_dialog_url_without_use_dialog_api(self):
- """
- Assert that `.get_logout_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api` is not set (default)
- """
url = self.lp.get_logout_dialog_url('http://example.org')
- self.assertEqual(str(furl(url).path), '/dialog-api')
-
- def test_get_signup_dialog_url_with_use_dialog_api_false(self):
- """
- Assert that `.get_signup_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api=False`
- """
- url = self.lp.get_signup_dialog_url('http://example.org',
- use_dialog_api=False)
- self.assertEqual(str(furl(url).path), '/account/dialog/signup')
+ self.assertEqual(str(furl(url).path), '/account/dialog/logout')
- def test_get_signup_dialog_url_without_use_dialog_api(self):
- """
- Assert that `.get_signup_dialog_url()` returns a url with no
- dialog-api iframe, when `use_dialog_api` is not set (default)
- """
+ def test_get_signup_dialog_url(self):
url = self.lp.get_signup_dialog_url('http://example.org')
- self.assertEqual(str(furl(url).path), '/dialog-api')
+ self.assertEqual(str(furl(url).path), '/account/dialog/signup')
@mock.patch('laterpay.signing.sign')
@mock.patch('time.time')
diff --git a/tests/test_signing.py b/tests/test_signing.py
index 2f0fe02..dff73e0 100644
--- a/tests/test_signing.py
+++ b/tests/test_signing.py
@@ -5,8 +5,6 @@ from __future__ import absolute_import, print_function
import hashlib
import unittest
-from six.moves.urllib.parse import parse_qs
-
from laterpay import signing
@@ -80,24 +78,6 @@ class TestSigningHelper(unittest.TestCase):
'346f3d53ad762f3ed3fb7f2427dec2bbfaf0338bb7f91f0460aff15c',
)
- def test_sign_and_encode(self):
- params = {
- u'parĄm1': u'valuĘ',
- 'param2': ['value2', 'value3'],
- 'ts': '1330088810',
- }
- url = u'https://endpoint.com/api'
-
- secret = u'secret' # unicode is what we usually get from api/db..
-
- signed_and_encoded = signing.sign_and_encode(secret, params, url)
-
- self.assertEqual(
- signed_and_encoded,
- 'param2=value2¶m2=value3&par%C4%84m1=valu%C4%98&'
- 'ts=1330088810&hmac=01c928dcdbbf4ba467969ec9607bfdec0563524d93e06df7d8d3c80d'
- )
-
def test_verify_str_signature(self):
params = {
u'parĄm1': u'valuĘ',
@@ -172,71 +152,6 @@ class TestSigningHelper(unittest.TestCase):
false_params['ts'] = '1234567890'
self.assertFalse(signing.verify(false_params['hmac'], secret, false_params, url, method))
- def test_signing_with_item(self):
- secret = '401e9a684fcc49578c1f23176a730abc'
- base_url = 'http://local.laterpaytest.net:8005/dialog/mmss/buy'
- method = 'GET'
-
- # creating this data with ItemDefinition and copy.copy(item.data) doesn't work
- # since it has a purchase_date based on now(), so the signature isn't the same..
- data = {
- 'article_id': 154,
- 'cp': ['laternews'],
- 'jsevents': [1],
- 'pricing': ['EUR200'],
- 'purchase_date': [1398861228815],
- 'title': [u"VIDEO: Rwanda's genocide, 20 years on"],
- 'tref': ['4ebbf443-a12e-4ce9-89e4-999ba93ba1dc'],
- 'ts': ['1398861228'],
- 'url': ['http://local.laterpaytest.net:8003/mmss/154'],
- }
-
- params = signing.sign_and_encode(secret, data, base_url, method)
- expected_string = (
- 'article_id=154&'
- 'cp=laternews&'
- 'jsevents=1&'
- 'pricing=EUR200&'
- 'purchase_date=1398861228815&'
- 'title=VIDEO%3A+Rwanda%27s+genocide%2C+20+years+on&'
- 'tref=4ebbf443-a12e-4ce9-89e4-999ba93ba1dc&'
- 'ts=1398861228&'
- 'url=http%3A%2F%2Flocal.laterpaytest.net%3A8003%2Fmmss%2F154&'
- 'hmac=d51564b41c2a8719fcdcfc6bad46109d3b6c6f78afea4020d5801a3c'
- )
-
- self.assertEqual(expected_string, params)
-
- # expected signature based on params above
- signature = 'd51564b41c2a8719fcdcfc6bad46109d3b6c6f78afea4020d5801a3c'
-
- self.assertTrue(signing.verify(signature, secret, data, base_url, method))
-
- # changing the price in the url
- false_string = (
- 'article_id=154&'
- 'cp=laternews&'
- 'jsevents=1&'
- 'pricing=EUR150&'
- 'purchase_date=1398861228815&'
- 'title=VIDEO%3A+Rwanda%27s+genocide%2C+20+years+on&'
- 'tref=4ebbf443-a12e-4ce9-89e4-999ba93ba1dc&'
- 'ts=1398861228&'
- 'url=http%3A%2F%2Flocal.laterpaytest.net%3A8003%2Fmmss%2F154&'
- 'hmac=4d41f1adcb7c6bf6cf9c5eb15b179fdbec667d53f2749e2845c87315'
- )
- false_params = parse_qs(false_string)
-
- self.assertFalse(signing.verify(signature, secret, false_params, base_url, method))
-
- # changing the base_url
- false_base_url = 'http://local.laterpaytest.net:8005/dialog/mmss/add'
- self.assertFalse(signing.verify(signature, secret, data, false_base_url, method))
-
- # changing http method
- false_method = 'POST'
- self.assertFalse(signing.verify(signature, secret, data, base_url, false_method))
-
def test_normalise_param_structure(self):
params = {
'key1': 'value1',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 4.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
cookies==2.2.1
coverage==6.2
execnet==1.9.0
flake8==2.6.0
furl==0.4.95
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/laterpay/laterpay-client-python.git@67ebeb9dac24e5fe59d65ca4986c97aa38cfe6b4#egg=laterpay_client
mccabe==0.5.3
mock==2.0.0
orderedmultidict==1.0.1
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pycodestyle==2.0.0
pydocstyle==1.0.0
pyflakes==1.2.3
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
requests==2.27.1
responses==0.5.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: laterpay-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- cookies==2.2.1
- coverage==6.2
- execnet==1.9.0
- flake8==2.6.0
- furl==0.4.95
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mccabe==0.5.3
- mock==2.0.0
- orderedmultidict==1.0.1
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.0.0
- pydocstyle==1.0.0
- pyflakes==1.2.3
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- requests==2.27.1
- responses==0.5.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/laterpay-client-python
| [
"tests/test_client.py::TestLaterPayClient::test_get_add_url",
"tests/test_client.py::TestLaterPayClient::test_get_add_url_no_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_add_url_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url_no_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_login_dialog_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_logout_dialog_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_signup_dialog_url"
]
| []
| [
"tests/test_client.py::TestItemDefinition::test_item_definition",
"tests/test_client.py::TestLaterPayClient::test_failure_url_param",
"tests/test_client.py::TestLaterPayClient::test_get_access_data_success",
"tests/test_client.py::TestLaterPayClient::test_get_access_params",
"tests/test_client.py::TestLaterPayClient::test_get_controls_balance_url_all_defaults",
"tests/test_client.py::TestLaterPayClient::test_get_controls_balance_url_all_set",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_defaults",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_set_long",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_set_short",
"tests/test_client.py::TestLaterPayClient::test_get_gettoken_redirect",
"tests/test_client.py::TestLaterPayClient::test_get_web_url_extra_kwargs",
"tests/test_client.py::TestLaterPayClient::test_get_web_url_has_no_none_params",
"tests/test_client.py::TestLaterPayClient::test_has_token",
"tests/test_client.py::TestLaterPayClient::test_transaction_reference",
"tests/test_signing.py::TestSigningHelper::test_create_message_bytestrings",
"tests/test_signing.py::TestSigningHelper::test_create_message_sorting_and_combining_params",
"tests/test_signing.py::TestSigningHelper::test_create_message_unicode",
"tests/test_signing.py::TestSigningHelper::test_create_message_wrong_method",
"tests/test_signing.py::TestSigningHelper::test_normalise_param_structure",
"tests/test_signing.py::TestSigningHelper::test_sign",
"tests/test_signing.py::TestSigningHelper::test_url_verification",
"tests/test_signing.py::TestSigningHelper::test_verify_invalid_unicode_signature",
"tests/test_signing.py::TestSigningHelper::test_verify_str_signature",
"tests/test_signing.py::TestSigningHelper::test_verify_unicode_signature"
]
| []
| MIT License | 815 | [
"laterpay/__init__.py",
"CHANGELOG.md",
"laterpay/signing.py"
]
| [
"laterpay/__init__.py",
"CHANGELOG.md",
"laterpay/signing.py"
]
|
minio__minio-py-414 | d15c965c107f49465c710f9ff22c72120014ff2a | 2016-10-16 09:10:40 | f0493951cfa62af65ecf80cdd2ddd0c7a063b482 | diff --git a/docs/API.md b/docs/API.md
index 0a1f8cf..b5d578d 100644
--- a/docs/API.md
+++ b/docs/API.md
@@ -110,6 +110,7 @@ __Parameters__
| | | ap-northeast-1|
| | | ap-southeast-2|
| | | sa-east-1|
+| | | cn-north-1|
__Example__
diff --git a/minio/helpers.py b/minio/helpers.py
index 071d5d6..c0dd62f 100644
--- a/minio/helpers.py
+++ b/minio/helpers.py
@@ -207,6 +207,7 @@ AWS_S3_ENDPOINT_MAP = {
'ap-southeast-1': 's3-ap-southeast-1.amazonaws.com',
'ap-northeast-1': 's3-ap-northeast-1.amazonaws.com',
'ap-northeast-2': 's3-ap-northeast-2.amazonaws.com',
+ 'cn-north-1': 's3.cn-north-1.amazonaws.com.cn'
}
def get_s3_endpoint(region):
| Implement AWS China region support
Our Python Client library should also support AWS China region s3.cn-north-1.amazonaws.com.cn
It has been implemented in minio-go, implementation reference: https://github.com/minio/minio-go/commit/190ac7b4b7e7ba8a669553674b748bdcace6c59e | minio/minio-py | diff --git a/tests/unit/get_s3_endpoint_test.py b/tests/unit/get_s3_endpoint_test.py
new file mode 100644
index 0000000..775a889
--- /dev/null
+++ b/tests/unit/get_s3_endpoint_test.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Minio Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 Minio, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from unittest import TestCase
+from nose.tools import eq_
+from minio.helpers import get_s3_endpoint, is_valid_endpoint
+
+class GetS3Endpoint(TestCase):
+ def test_get_s3_endpoint(self):
+ eq_('s3.amazonaws.com', get_s3_endpoint('us-east-1'))
+ eq_('s3.amazonaws.com', get_s3_endpoint('foo'))
+ eq_('s3-eu-west-1.amazonaws.com', get_s3_endpoint('eu-west-1'))
+ eq_('s3.cn-north-1.amazonaws.com.cn', get_s3_endpoint('cn-north-1'))
+
+ def test_is_valid_endpoint(self):
+ eq_(True, is_valid_endpoint('s3.amazonaws.com'))
+ eq_(True, is_valid_endpoint('s3.cn-north-1.amazonaws.com.cn'))
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"mock",
"fake-factory",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2015.4.28
fake-factory==9999.9.9
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/minio/minio-py.git@d15c965c107f49465c710f9ff22c72120014ff2a#egg=minio
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: minio-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2015.04.28
- fake-factory==9999.9.9
- mock==5.2.0
- nose==1.3.7
- pytz==2025.2
- urllib3==1.26.20
prefix: /opt/conda/envs/minio-py
| [
"tests/unit/get_s3_endpoint_test.py::GetS3Endpoint::test_get_s3_endpoint"
]
| []
| [
"tests/unit/get_s3_endpoint_test.py::GetS3Endpoint::test_is_valid_endpoint"
]
| []
| Apache License 2.0 | 816 | [
"minio/helpers.py",
"docs/API.md"
]
| [
"minio/helpers.py",
"docs/API.md"
]
|
|
mesonbuild__meson-919 | ba578db0314aa85729ca13df734b2bd359d2aa35 | 2016-10-16 14:35:21 | b2a39dd06ee139152d5522c961e6ba04acdec791 | TingPing: ```python
File "/home/tingping/.local/bin/mesonintrospect", line 4, in <module>
__import__('pkg_resources').run_script('meson==0.36.0.dev1', 'mesonintrospect')
File "/usr/lib/python3.5/site-packages/pkg_resources/__init__.py", line 746, in run_script
self.require(requires)[0].run_script(script_name, ns)
File "/usr/lib/python3.5/site-packages/pkg_resources/__init__.py", line 1501, in run_script
exec(code, namespace, namespace)
File "/home/tingping/.local/lib/python3.5/site-packages/meson-0.36.0.dev1-py3.5.egg/EGG-INFO/scripts/mesonintrospect", line 20, in <module>
sys.exit(mintro.run(sys.argv[1:]))
File "/home/tingping/.local/lib/python3.5/site-packages/meson-0.36.0.dev1-py3.5.egg/mesonbuild/mintro.py", line 204, in run
list_targets(coredata, builddata, installdata)
File "/home/tingping/.local/lib/python3.5/site-packages/meson-0.36.0.dev1-py3.5.egg/mesonbuild/mintro.py", line 84, in list_targets
t['install_filename'] = determine_installed_path(target, installdata)
File "/home/tingping/.local/lib/python3.5/site-packages/meson-0.36.0.dev1-py3.5.egg/mesonbuild/mintro.py", line 51, in determine_installed_path
raise RuntimeError('Something weird happened. File a bug.')
RuntimeError: Something weird happened. File a bug.
```
jpakkane: Which project did this happen on?
TingPing: gnome-builder, it has quite a few targets and worked with current master. | diff --git a/authors.txt b/authors.txt
index 5931481af..f591d13db 100644
--- a/authors.txt
+++ b/authors.txt
@@ -53,3 +53,4 @@ Gautier Pelloux-Prayer
Alexandre Foley
Jouni Kosonen
Aurelien Jarno
+Mark Schulte
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index fd719241e..e91b44b8d 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -200,11 +200,15 @@ class Backend():
with open(exe_data, 'wb') as f:
if isinstance(exe, dependencies.ExternalProgram):
exe_fullpath = exe.fullpath
+ exe_needs_wrapper = False
elif isinstance(exe, (build.BuildTarget, build.CustomTarget)):
exe_fullpath = [self.get_target_filename_abs(exe)]
+ exe_needs_wrapper = exe.is_cross
else:
exe_fullpath = [exe]
- is_cross = self.environment.is_cross_build() and \
+ exe_needs_wrapper = False
+ is_cross = exe_needs_wrapper and \
+ self.environment.is_cross_build() and \
self.environment.cross_info.need_cross_compiler() and \
self.environment.cross_info.need_exe_wrapper()
if is_cross:
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 1d687d825..c3867e0ab 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -1267,6 +1267,9 @@ class CustomTarget:
def get_outputs(self):
return self.output
+ def get_filename(self):
+ return self.output[0]
+
def get_sources(self):
return self.sources
diff --git a/mesonbuild/compilers.py b/mesonbuild/compilers.py
index 2d7a08079..d7cd1f78e 100644
--- a/mesonbuild/compilers.py
+++ b/mesonbuild/compilers.py
@@ -1950,7 +1950,8 @@ class GnuCCompiler(GnuCompiler, CCompiler):
def get_options(self):
opts = {'c_std' : coredata.UserComboOption('c_std', 'C language standard to use',
- ['none', 'c89', 'c99', 'c11', 'gnu89', 'gnu99', 'gnu11'],
+ ['none', 'c89', 'c99', 'c11',
+ 'gnu89', 'gnu99', 'gnu11'],
'none')}
if self.gcc_type == GCC_MINGW:
opts.update({
@@ -2084,7 +2085,8 @@ class ClangCCompiler(ClangCompiler, CCompiler):
def get_options(self):
return {'c_std' : coredata.UserComboOption('c_std', 'C language standard to use',
- ['none', 'c89', 'c99', 'c11'],
+ ['none', 'c89', 'c99', 'c11',
+ 'gnu89', 'gnu99', 'gnu11',],
'none')}
def get_option_compile_args(self, options):
@@ -2111,8 +2113,9 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler):
def get_options(self):
return {'cpp_std' : coredata.UserComboOption('cpp_std', 'C++ language standard to use',
- ['none', 'c++03', 'c++11', 'c++14', 'c++1z'],
- 'none')}
+ ['none', 'c++03', 'c++11', 'c++14', 'c++1z',
+ 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++1z'],
+ 'none')}
def get_option_compile_args(self, options):
args = []
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index a18912eb1..492bf3f0e 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -20,7 +20,7 @@ Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
import json, pickle
-from . import coredata, build, mesonlib
+from . import coredata, build
import argparse
import sys, os
@@ -41,7 +41,20 @@ parser.add_argument('--dependencies', action='store_true', dest='dependencies',
help='list external dependencies.')
parser.add_argument('args', nargs='+')
-def list_targets(coredata, builddata):
+def determine_installed_path(target, installdata):
+ install_target = None
+ for i in installdata.targets:
+ if os.path.split(i[0])[1] == target.get_filename(): # FIXME, might clash due to subprojects.
+ install_target = i
+ break
+ if install_target is None:
+ raise RuntimeError('Something weird happened. File a bug.')
+ fname = i[0]
+ outdir = i[1]
+ outname = os.path.join(installdata.prefix, outdir, os.path.split(fname)[-1])
+ return outname
+
+def list_targets(coredata, builddata, installdata):
tlist = []
for (idname, target) in builddata.get_targets().items():
t = {}
@@ -68,6 +81,7 @@ def list_targets(coredata, builddata):
t['type'] = typename
if target.should_install():
t['installed'] = True
+ t['install_filename'] = determine_installed_path(target, installdata)
else:
t['installed'] = False
tlist.append(t)
@@ -173,6 +187,7 @@ def run(args):
bdir = ''
corefile = os.path.join(bdir, 'meson-private/coredata.dat')
buildfile = os.path.join(bdir, 'meson-private/build.dat')
+ installfile = os.path.join(bdir, 'meson-private/install.dat')
testfile = os.path.join(bdir, 'meson-private/meson_test_setup.dat')
benchmarkfile = os.path.join(bdir, 'meson-private/meson_benchmark_setup.dat')
with open(corefile, 'rb') as f:
@@ -180,11 +195,13 @@ def run(args):
with open(buildfile, 'rb') as f:
builddata = pickle.load(f)
with open(testfile, 'rb') as f:
- testdata = pickle.load(f)
+ testdata = pickle.load(f)
with open(benchmarkfile, 'rb') as f:
- benchmarkdata = pickle.load(f)
+ benchmarkdata = pickle.load(f)
+ with open(installfile, 'rb') as f:
+ installdata = pickle.load(f)
if options.list_targets:
- list_targets(coredata, builddata)
+ list_targets(coredata, builddata, installdata)
elif options.target_files is not None:
list_target_files(options.target_files, coredata, builddata)
elif options.buildsystem_files:
| mesonintrospect: Expose install_dir for targets
Mentioned on the [gnome-builder issue](https://bugzilla.gnome.org/show_bug.cgi?id=743280) this would be useful information. | mesonbuild/meson | diff --git a/run_unittests.py b/run_unittests.py
index 9391eaef7..0e3b7d5be 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -43,6 +43,7 @@ class LinuxlikeTests(unittest.TestCase):
self.builddir = tempfile.mkdtemp()
self.meson_command = [sys.executable, os.path.join(src_root, 'meson.py')]
self.mconf_command = [sys.executable, os.path.join(src_root, 'mesonconf.py')]
+ self.mintro_command = [sys.executable, os.path.join(src_root, 'mesonintrospect.py')]
self.ninja_command = [detect_ninja(), '-C', self.builddir]
self.common_test_dir = os.path.join(src_root, 'test cases/common')
self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
@@ -67,6 +68,10 @@ class LinuxlikeTests(unittest.TestCase):
with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile:
return json.load(ifile)
+ def introspect(self, arg):
+ out = subprocess.check_output(self.mintro_command + [arg, self.builddir])
+ return json.loads(out.decode('utf-8'))
+
def test_basic_soname(self):
testdir = os.path.join(self.common_test_dir, '4 shared')
self.init(testdir)
@@ -147,5 +152,14 @@ class LinuxlikeTests(unittest.TestCase):
self.assertTrue(compdb[3]['file'].endswith("libfile4.c"))
# FIXME: We don't have access to the linker command
+ def test_install_introspection(self):
+ testdir = os.path.join(self.common_test_dir, '8 install')
+ self.init(testdir)
+ intro = self.introspect('--targets')
+ if intro[0]['type'] == 'executable':
+ intro = intro[::-1]
+ self.assertEqual(intro[0]['install_filename'], '/usr/local/libtest/libstat.a')
+ self.assertEqual(intro[1]['install_filename'], '/usr/local/bin/prog')
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 5
} | 0.35 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y ninja-build python3"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/mesonbuild/meson.git@ba578db0314aa85729ca13df734b2bd359d2aa35#egg=meson
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: meson
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/meson
| [
"run_unittests.py::LinuxlikeTests::test_install_introspection"
]
| [
"run_unittests.py::LinuxlikeTests::test_pkgconfig_gen",
"run_unittests.py::LinuxlikeTests::test_vala_c_warnings"
]
| [
"run_unittests.py::LinuxlikeTests::test_basic_soname",
"run_unittests.py::LinuxlikeTests::test_custom_soname",
"run_unittests.py::LinuxlikeTests::test_pic",
"run_unittests.py::LinuxlikeTests::test_static_compile_order"
]
| []
| Apache License 2.0 | 817 | [
"mesonbuild/mintro.py",
"authors.txt",
"mesonbuild/backend/backends.py",
"mesonbuild/build.py",
"mesonbuild/compilers.py"
]
| [
"mesonbuild/mintro.py",
"authors.txt",
"mesonbuild/backend/backends.py",
"mesonbuild/build.py",
"mesonbuild/compilers.py"
]
|
Backblaze__B2_Command_Line_Tool-259 | aad90d09c01c9a8fe4e91fca3d6a1a0a85ded02a | 2016-10-16 16:45:57 | 4f2a17eb0342ba6efed8b97442dd20c4e80c1845 | diff --git a/README.md b/README.md
index a2de296..768e21b 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,7 @@ The command-line tool that gives easy access to all of the capabilities of B2 Cl
This program provides command-line access to the B2 service.
-Version 0.6.9
+Version 0.6.7
# Installation
@@ -33,7 +33,7 @@ this:
b2 clear_account
b2 create_bucket <bucketName> [allPublic | allPrivate]
b2 delete_bucket <bucketName>
- b2 delete_file_version [<fileName>] <fileId>
+ b2 delete_file_version <fileName> <fileId>
b2 download_file_by_id [--noProgress] <fileId> <localFileName>
b2 download_file_by_name [--noProgress] <bucketName> <fileName> <localFileName>
b2 get_file_info <fileId>
@@ -48,7 +48,7 @@ this:
b2 make_url <fileId>
b2 sync [--delete] [--keepDays N] [--skipNewer] [--replaceNewer] \
[--compareVersions <option>] [--threads N] [--noProgress] \
- [--excludeRegex <regex> [--includeRegex <regex>]] <source> <destination>
+ [--excludeRegex <regex>] <source> <destination>
b2 update_bucket <bucketName> [allPublic | allPrivate]
b2 upload_file [--sha1 <sha1sum>] [--contentType <contentType>] \
[--info <key>=<value>]* [--minPartSize N] \
diff --git a/b2/api.py b/b2/api.py
index 7bf33da..a985187 100644
--- a/b2/api.py
+++ b/b2/api.py
@@ -150,12 +150,13 @@ class B2Api(object):
self.cache.save_bucket(bucket)
return bucket
- def download_file_by_id(self, file_id, download_dest, progress_listener=None):
+ def download_file_by_id(self, file_id, download_dest, progress_listener=None, range_=None):
progress_listener = progress_listener or DoNothingProgressListener()
self.session.download_file_by_id(
file_id,
DownloadDestProgressWrapper(download_dest, progress_listener),
- url_factory=self.account_info.get_download_url
+ url_factory=self.account_info.get_download_url,
+ range_=range_,
)
progress_listener.close()
diff --git a/b2/bucket.py b/b2/bucket.py
index 722bd90..0d29fe4 100644
--- a/b2/bucket.py
+++ b/b2/bucket.py
@@ -112,16 +112,17 @@ class Bucket(object):
def cancel_large_file(self, file_id):
return self.api.cancel_large_file(file_id)
- def download_file_by_id(self, file_id, download_dest, progress_listener=None):
- self.api.download_file_by_id(file_id, download_dest, progress_listener)
+ def download_file_by_id(self, file_id, download_dest, progress_listener=None, range_=None):
+ self.api.download_file_by_id(file_id, download_dest, progress_listener, range_=range_)
- def download_file_by_name(self, file_name, download_dest, progress_listener=None):
+ def download_file_by_name(self, file_name, download_dest, progress_listener=None, range_=None):
progress_listener = progress_listener or DoNothingProgressListener()
self.api.session.download_file_by_name(
self.name,
file_name,
DownloadDestProgressWrapper(download_dest, progress_listener),
- url_factory=self.api.account_info.get_download_url
+ url_factory=self.api.account_info.get_download_url,
+ range_=range_,
)
progress_listener.close()
diff --git a/b2/download_dest.py b/b2/download_dest.py
index d19a65e..26181b9 100644
--- a/b2/download_dest.py
+++ b/b2/download_dest.py
@@ -9,14 +9,15 @@
######################################################################
import os
-from abc import (ABCMeta, abstractmethod)
+from abc import abstractmethod
import six
-from .progress import (StreamWithProgress)
+from .utils import B2TraceMetaAbstract, limit_trace_arguments
+from .progress import StreamWithProgress
[email protected]_metaclass(ABCMeta)
[email protected]_metaclass(B2TraceMetaAbstract)
class AbstractDownloadDestination(object):
"""
Interface to a destination for a downloaded file.
@@ -26,9 +27,10 @@ class AbstractDownloadDestination(object):
"""
@abstractmethod
+ @limit_trace_arguments(skip=['content_sha1',])
def open(
self, file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_=None
):
"""
Returns a binary file-like object to use for writing the contents of
@@ -40,6 +42,8 @@ class AbstractDownloadDestination(object):
:param content_sha1: the content sha1 from the headers (or "none" for large files)
:param file_info: the user file info from the headers
:param mod_time_millis: the desired file modification date in ms since 1970-01-01
+ :param range_: starting and ending offsets of the received file contents. Usually None,
+ which means that the whole file is downloaded.
:return: None
"""
@@ -82,7 +86,7 @@ class DownloadDestLocalFile(AbstractDownloadDestination):
def open(
self, file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_=None
):
self.file_id = file_id
self.file_name = file_name
@@ -90,6 +94,7 @@ class DownloadDestLocalFile(AbstractDownloadDestination):
self.content_type = content_type
self.content_sha1 = content_sha1
self.file_info = file_info
+ self.range_ = range_
return OpenLocalFileForWriting(self.local_file_path, mod_time_millis)
@@ -116,7 +121,7 @@ class DownloadDestBytes(AbstractDownloadDestination):
def open(
self, file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_=None
):
self.file_id = file_id
self.file_name = file_name
@@ -126,6 +131,7 @@ class DownloadDestBytes(AbstractDownloadDestination):
self.file_info = file_info
self.mod_time_millis = mod_time_millis
self.bytes_io = BytesCapture()
+ self.range_ = range_
return self.bytes_io
@@ -136,11 +142,14 @@ class DownloadDestProgressWrapper(AbstractDownloadDestination):
def open(
self, file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_=None
):
- self.progress_listener.set_total_bytes(content_length)
+ total_bytes = content_length
+ if range_ is not None:
+ total_bytes = range_[1] - range_[0]
+ self.progress_listener.set_total_bytes(total_bytes)
stream = self.download_dest.open(
file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_
)
return StreamWithProgress(stream.__enter__(), self.progress_listener)
diff --git a/b2/exception.py b/b2/exception.py
index 487b3ac..3eadb93 100644
--- a/b2/exception.py
+++ b/b2/exception.py
@@ -246,6 +246,10 @@ class TruncatedOutput(TransientErrorMixin, B2Error):
self.file_size,)
+class UnexpectedCloudBehaviour(B2SimpleError):
+ pass
+
+
class UnknownError(B2SimpleError):
pass
diff --git a/b2/raw_api.py b/b2/raw_api.py
index 9451eb1..bbdaabd 100644
--- a/b2/raw_api.py
+++ b/b2/raw_api.py
@@ -23,7 +23,7 @@ import six
from .b2http import (B2Http)
from .download_dest import DownloadDestBytes
-from .exception import (ChecksumMismatch, TruncatedOutput)
+from .exception import ChecksumMismatch, TruncatedOutput, UnexpectedCloudBehaviour
from .utils import b2_url_encode, hex_sha1_of_stream
@@ -156,17 +156,17 @@ class B2RawApi(AbstractRawApi):
fileName=file_name
)
- def download_file_by_id(self, download_url, account_auth_token_or_none, file_id, download_dest):
+ def download_file_by_id(self, download_url, account_auth_token_or_none, file_id, download_dest, range_=None):
url = download_url + '/b2api/v1/b2_download_file_by_id?fileId=' + file_id
- return self._download_file_from_url(url, account_auth_token_or_none, download_dest)
+ return self._download_file_from_url(url, account_auth_token_or_none, download_dest, range_=range_)
def download_file_by_name(
- self, download_url, account_auth_token_or_none, bucket_name, file_name, download_dest
+ self, download_url, account_auth_token_or_none, bucket_name, file_name, download_dest, range_=None
):
url = download_url + '/file/' + bucket_name + '/' + b2_url_encode(file_name)
- return self._download_file_from_url(url, account_auth_token_or_none, download_dest)
+ return self._download_file_from_url(url, account_auth_token_or_none, download_dest, range_=range_)
- def _download_file_from_url(self, url, account_auth_token_or_none, download_dest):
+ def _download_file_from_url(self, url, account_auth_token_or_none, download_dest, range_=None):
"""
Downloads a file from given url and stores it in the given download_destination.
@@ -179,6 +179,13 @@ class B2RawApi(AbstractRawApi):
:return:
"""
request_headers = {}
+ if range_ is not None:
+ assert len(range_) == 2, range_
+ assert (range_[0] + 0) <= (range_[1] + 0), range_ # not strings
+ assert range_[0] >= 0, range_
+ assert range_[1] >= 1, range_
+ request_headers['Range'] = "bytes=%d-%d" % range_
+
if account_auth_token_or_none is not None:
request_headers['Authorization'] = account_auth_token_or_none
@@ -191,6 +198,9 @@ class B2RawApi(AbstractRawApi):
content_type = info['content-type']
content_length = int(info['content-length'])
content_sha1 = info['x-bz-content-sha1']
+ if range_ is not None:
+ if 'Content-Range' not in info:
+ raise UnexpectedCloudBehaviour('Content-Range header was expected')
file_info = dict((k[10:], info[k]) for k in info if k.startswith('x-bz-info-'))
if 'src_last_modified_millis' in file_info:
@@ -204,20 +214,25 @@ class B2RawApi(AbstractRawApi):
with download_dest.open(
file_id, file_name, content_length, content_type, content_sha1, file_info,
- mod_time_millis
+ mod_time_millis, range_=range_
) as file:
for data in response.iter_content(chunk_size=block_size):
file.write(data)
digest.update(data)
bytes_read += len(data)
- if bytes_read != int(info['content-length']):
- raise TruncatedOutput(bytes_read, content_length)
-
- if content_sha1 != 'none' and digest.hexdigest() != content_sha1:
- raise ChecksumMismatch(
- checksum_type='sha1', expected=content_length, actual=digest.hexdigest()
- )
+ if range_ is None:
+ if bytes_read != int(info['content-length']):
+ raise TruncatedOutput(bytes_read, content_length)
+
+ if content_sha1 != 'none' and digest.hexdigest() != content_sha1:
+ raise ChecksumMismatch(
+ checksum_type='sha1', expected=content_length, actual=digest.hexdigest()
+ )
+ else:
+ desired_length = range_[1]-range_[0]
+ if bytes_read != desired_length:
+ raise TruncatedOutput(bytes_read, desired_length)
return dict(
fileId=file_id,
diff --git a/b2/raw_simulator.py b/b2/raw_simulator.py
index aa65cf7..47584b7 100644
--- a/b2/raw_simulator.py
+++ b/b2/raw_simulator.py
@@ -44,7 +44,7 @@ class FileSimulator(object):
def __init__(
self, account_id, bucket_id, file_id, action, name, content_type, content_sha1, file_info,
- data_bytes, upload_timestamp
+ data_bytes, upload_timestamp, range_=None
):
self.account_id = account_id
self.bucket_id = bucket_id
@@ -58,6 +58,9 @@ class FileSimulator(object):
self.file_info = file_info
self.data_bytes = data_bytes
self.upload_timestamp = upload_timestamp
+ self.range_ = range_
+ if range_ is not None:
+ self.data_bytes = data_bytes[range_[0]:range_[1]]
if action == 'start':
self.parts = []
@@ -204,11 +207,11 @@ class BucketSimulator(object):
del self.file_id_to_file[file_id]
return dict(fileId=file_id, fileName=file_name, uploadTimestamp=file_sim.upload_timestamp)
- def download_file_by_id(self, file_id, download_dest):
+ def download_file_by_id(self, file_id, download_dest, range_=None):
file_sim = self.file_id_to_file[file_id]
- self._download_file_sim(download_dest, file_sim)
+ self._download_file_sim(download_dest, file_sim, range_=range_)
- def download_file_by_name(self, file_name, download_dest):
+ def download_file_by_name(self, file_name, download_dest, range_=None):
files = self.list_file_names(file_name, 1)['files']
if len(files) == 0:
raise FileNotPresent(file_name)
@@ -216,14 +219,17 @@ class BucketSimulator(object):
if file_dict['fileName'] != file_name or file_dict['action'] != 'upload':
raise FileNotPresent(file_name)
file_sim = self.file_name_and_id_to_file[(file_name, file_dict['fileId'])]
- self._download_file_sim(download_dest, file_sim)
+ self._download_file_sim(download_dest, file_sim, range_=range_)
- def _download_file_sim(self, download_dest, file_sim):
+ def _download_file_sim(self, download_dest, file_sim, range_=None):
with download_dest.open(
file_sim.file_id, file_sim.name, file_sim.content_length, file_sim.content_type,
- file_sim.content_sha1, file_sim.file_info, file_sim.mod_time_millis()
+ file_sim.content_sha1, file_sim.file_info, file_sim.mod_time_millis(), range_
) as f:
- f.write(file_sim.data_bytes)
+ if range_ is None:
+ f.write(file_sim.data_bytes)
+ else:
+ f.write(file_sim.data_bytes[range_[0]:range_[1]])
def finish_large_file(self, file_id, part_sha1_array):
file_sim = self.file_id_to_file[file_id]
@@ -443,14 +449,14 @@ class RawSimulator(AbstractRawApi):
del self.bucket_id_to_bucket[bucket_id]
return bucket.bucket_dict()
- def download_file_by_id(self, download_url, account_auth_token_or_none, file_id, download_dest):
+ def download_file_by_id(self, download_url, account_auth_token_or_none, file_id, download_dest, range_=None):
# TODO: check auth token if bucket is not public
bucket_id = self.file_id_to_bucket_id[file_id]
bucket = self._get_bucket_by_id(bucket_id)
- bucket.download_file_by_id(file_id, download_dest)
+ bucket.download_file_by_id(file_id, download_dest, range_=range_)
def download_file_by_name(
- self, download_url, account_auth_token_or_none, bucket_name, file_name, download_dest
+ self, download_url, account_auth_token_or_none, bucket_name, file_name, download_dest, range_=None
):
assert download_url == self.DOWNLOAD_URL
# TODO: check auth token if bucket is not public
diff --git a/b2/version.py b/b2/version.py
index 70a2c20..2b7dfef 100644
--- a/b2/version.py
+++ b/b2/version.py
@@ -13,7 +13,7 @@ import sys
# To avoid confusion between official Backblaze releases of this tool and
# the versions on Github, we use the convention that the third number is
# odd for Github, and even for Backblaze releases.
-VERSION = '0.6.9'
+VERSION = '0.6.7'
PYTHON_VERSION = '.'.join(map(str, sys.version_info[:3])) # something like: 2.7.11
diff --git a/setup.py b/setup.py
index f48be52..b4be72c 100644
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,7 @@ setup(
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
- version='0.6.9',
+ version='0.6.7',
description='Command Line Tool for Backblaze B2',
long_description=long_description,
| Feature request: Optional range parameter for download request
Hi!
Developer of B2Fuse here. An idea was put forth to use B2 CLI Python API as backend for B2Fuse to handle integration against B2. This is an excellent idea but requires an additional feature from B2 CLI. In order to use B2 CLI Python API in B2Fuse the download call needs to be able to request part of a file. This should be implemented as an optional feature, as in some cases it will also be necessary to request entire files.
Would this be possible to add?
Best
Sondre | Backblaze/B2_Command_Line_Tool | diff --git a/test/test_bucket.py b/test/test_bucket.py
index f77c35d..dd35586 100644
--- a/test/test_bucket.py
+++ b/test/test_bucket.py
@@ -8,7 +8,7 @@
#
######################################################################
-from __future__ import absolute_import, division, print_function
+from __future__ import absolute_import, division
from nose import SkipTest
import os
@@ -462,6 +462,7 @@ class TestDownload(TestCaseWithBucket):
progress_listener = StubProgressListener()
self.bucket.download_file_by_id(file_info.id_, download, progress_listener)
self.assertEqual("11: 11 closed", progress_listener.get_history())
+ assert download.bytes_io.getvalue() == six.b('hello world')
def test_download_by_id_no_progress(self):
file_info = self.bucket.upload_bytes(six.b('hello world'), 'file1')
@@ -479,3 +480,13 @@ class TestDownload(TestCaseWithBucket):
self.bucket.upload_bytes(six.b('hello world'), 'file1')
download = DownloadDestBytes()
self.bucket.download_file_by_name('file1', download)
+
+
+class TestPartialDownload(TestCaseWithBucket):
+ def test_download_by_id_progress(self):
+ file_info = self.bucket.upload_bytes(six.b('hello world'), 'file1')
+ download = DownloadDestBytes()
+ progress_listener = StubProgressListener()
+ self.bucket.download_file_by_id(file_info.id_, download, progress_listener, range_=(3, 9))
+ self.assertEqual("6: 6 closed", progress_listener.get_history())
+ assert download.bytes_io.getvalue() == six.b('lo wor'), download.bytes_io.getvalue()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 9
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt",
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/Backblaze/B2_Command_Line_Tool.git@aad90d09c01c9a8fe4e91fca3d6a1a0a85ded02a#egg=b2
certifi==2021.5.30
charset-normalizer==2.0.12
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
logfury==1.0.1
mock==5.2.0
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyflakes==3.0.1
pyparsing==3.1.4
pytest==7.0.1
requests==2.27.1
six==1.17.0
tomli==1.2.3
tqdm==4.64.1
typing_extensions==4.1.1
urllib3==1.26.20
yapf==0.32.0
zipp==3.6.0
| name: B2_Command_Line_Tool
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- logfury==1.0.1
- mock==5.2.0
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyflakes==3.0.1
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- tqdm==4.64.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- yapf==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/B2_Command_Line_Tool
| [
"test/test_bucket.py::TestPartialDownload::test_download_by_id_progress"
]
| []
| [
"test/test_bucket.py::TestReauthorization::testCreateBucket",
"test/test_bucket.py::TestListParts::testEmpty",
"test/test_bucket.py::TestListParts::testThree",
"test/test_bucket.py::TestUploadPart::test_error_in_state",
"test/test_bucket.py::TestListUnfinished::test_empty",
"test/test_bucket.py::TestListUnfinished::test_one",
"test/test_bucket.py::TestListUnfinished::test_three",
"test/test_bucket.py::TestLs::test_delete_file_version",
"test/test_bucket.py::TestLs::test_empty",
"test/test_bucket.py::TestLs::test_hidden_file",
"test/test_bucket.py::TestLs::test_one_file_at_root",
"test/test_bucket.py::TestLs::test_started_large_file",
"test/test_bucket.py::TestLs::test_three_files_at_root",
"test/test_bucket.py::TestLs::test_three_files_in_dir",
"test/test_bucket.py::TestLs::test_three_files_multiple_versions",
"test/test_bucket.py::TestUpload::test_upload_bytes",
"test/test_bucket.py::TestUpload::test_upload_bytes_progress",
"test/test_bucket.py::TestUpload::test_upload_dead_symlink",
"test/test_bucket.py::TestUpload::test_upload_fifo",
"test/test_bucket.py::TestUpload::test_upload_file_one_fatal_error",
"test/test_bucket.py::TestUpload::test_upload_file_too_many_retryable_errors",
"test/test_bucket.py::TestUpload::test_upload_large",
"test/test_bucket.py::TestUpload::test_upload_large_resume",
"test/test_bucket.py::TestUpload::test_upload_large_resume_all_parts_there",
"test/test_bucket.py::TestUpload::test_upload_large_resume_file_info",
"test/test_bucket.py::TestUpload::test_upload_large_resume_file_info_does_not_match",
"test/test_bucket.py::TestUpload::test_upload_large_resume_no_parts",
"test/test_bucket.py::TestUpload::test_upload_large_resume_part_does_not_match",
"test/test_bucket.py::TestUpload::test_upload_large_resume_wrong_part_size",
"test/test_bucket.py::TestUpload::test_upload_local_file",
"test/test_bucket.py::TestUpload::test_upload_one_retryable_error",
"test/test_bucket.py::TestDownload::test_download_by_id_no_progress",
"test/test_bucket.py::TestDownload::test_download_by_id_progress",
"test/test_bucket.py::TestDownload::test_download_by_name_no_progress",
"test/test_bucket.py::TestDownload::test_download_by_name_progress"
]
| []
| MIT License | 818 | [
"b2/download_dest.py",
"b2/version.py",
"setup.py",
"b2/api.py",
"b2/bucket.py",
"README.md",
"b2/raw_simulator.py",
"b2/exception.py",
"b2/raw_api.py"
]
| [
"b2/download_dest.py",
"b2/version.py",
"setup.py",
"b2/api.py",
"b2/bucket.py",
"README.md",
"b2/raw_simulator.py",
"b2/exception.py",
"b2/raw_api.py"
]
|
|
napjon__krisk-58 | 8497da2333a8265b2e19c87dcec3bae20b8d4059 | 2016-10-17 08:32:31 | a676433768a62b61f5861c68c127e40970914764 | diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index 924e015..68e3c41 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -43,6 +43,9 @@ def make_chart(df, **kwargs):
if kwargs.get('y', None):
chart.set_ylabel(kwargs['y'])
+ if kwargs['type'] == 'line':
+ chart.set_tooltip_style(trigger='axis',axis_pointer='shadow')
+
if kwargs['type'] in ['bar', 'line', 'hist']:
set_bar_line_chart(chart, df, **kwargs)
| Line Plot should have default trigger axis and axis pointer shadow | napjon/krisk | diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 50435da..13b78e3 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -92,6 +92,8 @@ def test_line(gapminder):
annotate='all')
opt = read_option_tests('line.json')
assert_barline_data(p, opt)
+ assert p.option['tooltip']['axisPointer']['type'] == 'shadow'
+ assert p.option['tooltip']['trigger'] == 'axis'
def test_smooth_line(gapminder):
@@ -134,6 +136,7 @@ def test_sort_bar_line(gapminder):
'name': 'Africa',
'type': 'line'}
+
def test_hist(gapminder):
p1 = kk.hist(gapminder,'lifeExp',bins=10)
opt1 = read_option_tests('hist_x.json')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest"
],
"pre_install": [
"pip install cython"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
Cython==3.0.12
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@8497da2333a8265b2e19c87dcec3bae20b8d4059#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- cython==3.0.12
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_line"
]
| []
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_sort_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_density",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 819 | [
"krisk/plot/make_chart.py"
]
| [
"krisk/plot/make_chart.py"
]
|
|
unnonouno__jqp-19 | 89b6ea235226cbc51f3f47f8051bcd8305ff447f | 2016-10-17 14:28:00 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | coveralls:
[](https://coveralls.io/builds/8370776)
Coverage increased (+14.3%) to 84.848% when pulling **d332fb358942fb35d8d2c960728665cc126f6192 on error-handling** into **89b6ea235226cbc51f3f47f8051bcd8305ff447f on master**.
| diff --git a/jqp/__init__.py b/jqp/__init__.py
index 9638c44..cdcc02e 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -6,19 +6,44 @@ import sys
__version__ = '0.0.0.1'
+def _exit(error, return_code, message):
+ sys.stderr.write(message)
+ sys.stderr.write('\nOiginal error: ')
+ sys.stderr.write(str(error))
+ sys.stderr.write('\n')
+ sys.exit(return_code)
+
+
def run(in_io, out_io, cmd):
- for line in in_io:
+ for i, line in enumerate(in_io):
if line.strip() == '':
continue
- js = json.loads(line)
- out = eval(cmd, {'j': js})
- json.dump(out, out_io)
+
+ line_no = i + 1
+ try:
+ js = json.loads(line)
+ except Exception as e:
+ _exit(e, 4, 'Parse error: line %d' % line_no)
+
+ try:
+ out = eval(cmd, {'j': js})
+ except Exception as e:
+ _exit(e, 3, 'Cannot execute command: line %d' % line_no)
+
+ try:
+ json.dump(out, out_io)
+ except Exception as e:
+ _exit(e, 3, 'Cannot dump result: line %d' % line_no)
+
out_io.write('\n')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('cmd')
+ parser.add_argument(
+ '--version', action='version', version='jqp %s' % __version__,
+ help='show version and exit')
args = parser.parse_args()
run(sys.stdin, sys.stdout, args.cmd)
| Error handling
Don't show stacktrace. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index 185ce17..31ceb87 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -15,3 +15,24 @@ class RunTest(unittest.TestCase):
outputs = StringIO()
jqp.run(inputs, outputs, 'j["name"]')
self.assertEqual(outputs.getvalue(), '"Taro"\n')
+
+ def test_parse_error(self):
+ inputs = StringIO('invalid\n')
+ outputs = StringIO()
+ with self.assertRaises(SystemExit) as e:
+ jqp.run(inputs, outputs, 'j')
+ self.assertEqual(e.exception.code, 4)
+
+ def test_execution_error(self):
+ inputs = StringIO('1\n')
+ outputs = StringIO()
+ with self.assertRaises(SystemExit) as e:
+ jqp.run(inputs, outputs, 'invalid')
+ self.assertEqual(e.exception.code, 3)
+
+ def test_dump_error(self):
+ inputs = StringIO('1\n')
+ outputs = StringIO()
+ with self.assertRaises(SystemExit) as e:
+ jqp.run(inputs, outputs, 'lambda: 0')
+ self.assertEqual(e.exception.code, 3)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/unnonouno/jqp.git@89b6ea235226cbc51f3f47f8051bcd8305ff447f#egg=jqp
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_parse_error"
]
| []
| [
"tests/run_test.py::RunTest::test1"
]
| []
| MIT License | 820 | [
"jqp/__init__.py"
]
| [
"jqp/__init__.py"
]
|
scrapy__scrapy-2331 | 7e20725eb78cb34db60944cd1f155522fcf3b9f5 | 2016-10-18 14:13:49 | d7b26edf6b419e379a7a0a425093f02cac2fcf33 | diff --git a/docs/topics/email.rst b/docs/topics/email.rst
index 18d2f8084..aac93a91a 100644
--- a/docs/topics/email.rst
+++ b/docs/topics/email.rst
@@ -35,12 +35,6 @@ And here is how to use it to send an e-mail (without attachments)::
mailer.send(to=["[email protected]"], subject="Some subject", body="Some body", cc=["[email protected]"])
-.. note::
- As shown in the example above, ``to`` and ``cc`` need to be lists
- of email addresses, not single addresses, and even for one recipient,
- i.e. ``to="[email protected]"`` will not work.
-
-
MailSender class reference
==========================
@@ -87,13 +81,13 @@ uses `Twisted non-blocking IO`_, like the rest of the framework.
Send email to the given recipients.
:param to: the e-mail recipients
- :type to: list
+ :type to: str or list of str
:param subject: the subject of the e-mail
:type subject: str
:param cc: the e-mails to CC
- :type cc: list
+ :type cc: str or list of str
:param body: the e-mail body
:type body: str
diff --git a/scrapy/mail.py b/scrapy/mail.py
index c6339f25b..0bb395521 100644
--- a/scrapy/mail.py
+++ b/scrapy/mail.py
@@ -21,6 +21,8 @@ else:
from twisted.internet import defer, reactor, ssl
+from .utils.misc import arg_to_iter
+
logger = logging.getLogger(__name__)
@@ -48,6 +50,10 @@ class MailSender(object):
msg = MIMEMultipart()
else:
msg = MIMENonMultipart(*mimetype.split('/', 1))
+
+ to = list(arg_to_iter(to))
+ cc = list(arg_to_iter(cc))
+
msg['From'] = self.mailfrom
msg['To'] = COMMASPACE.join(to)
msg['Date'] = formatdate(localtime=True)
| Accept single values for "to" and "cc" arguments when sending mails
Follow up to https://github.com/scrapy/scrapy/pull/2271#issuecomment-248341246
> What about checking for strings and raising an error?
> Passing a string is an easy mistake to make.
> Or maybe we can support passing a single string. | scrapy/scrapy | diff --git a/tests/test_mail.py b/tests/test_mail.py
index bd7e49621..b139e98d8 100644
--- a/tests/test_mail.py
+++ b/tests/test_mail.py
@@ -10,7 +10,8 @@ class MailSenderTest(unittest.TestCase):
def test_send(self):
mailsender = MailSender(debug=True)
- mailsender.send(to=['[email protected]'], subject='subject', body='body', _callback=self._catch_mail_sent)
+ mailsender.send(to=['[email protected]'], subject='subject', body='body',
+ _callback=self._catch_mail_sent)
assert self.catched_msg
@@ -24,9 +25,16 @@ class MailSenderTest(unittest.TestCase):
self.assertEqual(msg.get_payload(), 'body')
self.assertEqual(msg.get('Content-Type'), 'text/plain')
+ def test_send_single_values_to_and_cc(self):
+ mailsender = MailSender(debug=True)
+ mailsender.send(to='[email protected]', subject='subject', body='body',
+ cc='[email protected]', _callback=self._catch_mail_sent)
+
def test_send_html(self):
mailsender = MailSender(debug=True)
- mailsender.send(to=['[email protected]'], subject='subject', body='<p>body</p>', mimetype='text/html', _callback=self._catch_mail_sent)
+ mailsender.send(to=['[email protected]'], subject='subject',
+ body='<p>body</p>', mimetype='text/html',
+ _callback=self._catch_mail_sent)
msg = self.catched_msg['msg']
self.assertEqual(msg.get_payload(), '<p>body</p>')
@@ -90,7 +98,8 @@ class MailSenderTest(unittest.TestCase):
mailsender = MailSender(debug=True)
mailsender.send(to=['[email protected]'], subject=subject, body=body,
- attachs=attachs, charset='utf-8', _callback=self._catch_mail_sent)
+ attachs=attachs, charset='utf-8',
+ _callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['subject'], subject)
@@ -99,7 +108,8 @@ class MailSenderTest(unittest.TestCase):
msg = self.catched_msg['msg']
self.assertEqual(msg['subject'], subject)
self.assertEqual(msg.get_charset(), Charset('utf-8'))
- self.assertEqual(msg.get('Content-Type'), 'multipart/mixed; charset="utf-8"')
+ self.assertEqual(msg.get('Content-Type'),
+ 'multipart/mixed; charset="utf-8"')
payload = msg.get_payload()
assert isinstance(payload, list)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
coverage==7.8.0
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
execnet==2.1.1
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@7e20725eb78cb34db60944cd1f155522fcf3b9f5#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- coverage==7.8.0
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_mail.py::MailSenderTest::test_send_single_values_to_and_cc"
]
| []
| [
"tests/test_mail.py::MailSenderTest::test_send",
"tests/test_mail.py::MailSenderTest::test_send_attach",
"tests/test_mail.py::MailSenderTest::test_send_attach_utf8",
"tests/test_mail.py::MailSenderTest::test_send_html",
"tests/test_mail.py::MailSenderTest::test_send_utf8"
]
| []
| BSD 3-Clause "New" or "Revised" License | 821 | [
"scrapy/mail.py",
"docs/topics/email.rst"
]
| [
"scrapy/mail.py",
"docs/topics/email.rst"
]
|
|
unnonouno__jqp-26 | 0977f3585147190adec127722e940783209b6ab6 | 2016-10-19 12:55:42 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | coveralls:
[](https://coveralls.io/builds/8409367)
Coverage increased (+1.4%) to 83.721% when pulling **df0ac543dc33b5ad23fb4dd94e5f512793a1f5f0 on import** into **0977f3585147190adec127722e940783209b6ab6 on master**.
coveralls:
[](https://coveralls.io/builds/8409381)
Coverage increased (+1.4%) to 83.721% when pulling **df0ac543dc33b5ad23fb4dd94e5f512793a1f5f0 on import** into **0977f3585147190adec127722e940783209b6ab6 on master**.
| diff --git a/jqp/__init__.py b/jqp/__init__.py
index cdcc02e..90fd8de 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -14,7 +14,15 @@ def _exit(error, return_code, message):
sys.exit(return_code)
-def run(in_io, out_io, cmd):
+def run(in_io, out_io, cmd, imports=[]):
+ environment = {}
+ for mod_name in imports:
+ try:
+ mod = __import__(mod_name)
+ except Exception as e:
+ _exit(e, 5, 'Cannot import module: %s' % mod_name)
+ environment[mod_name] = mod
+
for i, line in enumerate(in_io):
if line.strip() == '':
continue
@@ -26,7 +34,8 @@ def run(in_io, out_io, cmd):
_exit(e, 4, 'Parse error: line %d' % line_no)
try:
- out = eval(cmd, {'j': js})
+ environment['j'] = js
+ out = eval(cmd, environment)
except Exception as e:
_exit(e, 3, 'Cannot execute command: line %d' % line_no)
@@ -44,6 +53,10 @@ def main():
parser.add_argument(
'--version', action='version', version='jqp %s' % __version__,
help='show version and exit')
+ parser.add_argument(
+ '--import', action='append',
+ help='modules to import')
+
args = parser.parse_args()
- run(sys.stdin, sys.stdout, args.cmd)
+ run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'))
| Make import option
I need an option to import modules which I use in commands. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index 31ceb87..8e64641 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -16,6 +16,13 @@ class RunTest(unittest.TestCase):
jqp.run(inputs, outputs, 'j["name"]')
self.assertEqual(outputs.getvalue(), '"Taro"\n')
+ def test_import(self):
+ inputs = StringIO('''{"name": "Taro", "age": 10}
+''')
+ outputs = StringIO()
+ jqp.run(inputs, outputs, 're.sub("a", "A", j["name"])', imports=['re'])
+ self.assertEqual(outputs.getvalue(), '"TAro"\n')
+
def test_parse_error(self):
inputs = StringIO('invalid\n')
outputs = StringIO()
@@ -36,3 +43,10 @@ class RunTest(unittest.TestCase):
with self.assertRaises(SystemExit) as e:
jqp.run(inputs, outputs, 'lambda: 0')
self.assertEqual(e.exception.code, 3)
+
+ def test_import_error(self):
+ inputs = StringIO('1\n')
+ outputs = StringIO()
+ with self.assertRaises(SystemExit) as e:
+ jqp.run(inputs, outputs, 'j', imports=['unknown_module'])
+ self.assertEqual(e.exception.code, 5)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/unnonouno/jqp.git@0977f3585147190adec127722e940783209b6ab6#egg=jqp
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_import",
"tests/run_test.py::RunTest::test_import_error"
]
| []
| [
"tests/run_test.py::RunTest::test1",
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_parse_error"
]
| []
| MIT License | 822 | [
"jqp/__init__.py"
]
| [
"jqp/__init__.py"
]
|
unnonouno__jqp-29 | 0b56b8127bf430cab872c71f62329094e666f995 | 2016-10-19 14:10:23 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | diff --git a/README.rst b/README.rst
index 8ffd9b7..04a53d0 100644
--- a/README.rst
+++ b/README.rst
@@ -45,6 +45,7 @@ optional arguments:
-h, --help show this help message and exit
--version show version and exit
--import IMPORT modules to import
+ --sort-keys, -S sort keys in objects when the command print it
Example
diff --git a/jqp/__init__.py b/jqp/__init__.py
index 90fd8de..1e20c00 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -14,7 +14,7 @@ def _exit(error, return_code, message):
sys.exit(return_code)
-def run(in_io, out_io, cmd, imports=[]):
+def run(in_io, out_io, cmd, imports=[], sort_keys=False):
environment = {}
for mod_name in imports:
try:
@@ -40,7 +40,7 @@ def run(in_io, out_io, cmd, imports=[]):
_exit(e, 3, 'Cannot execute command: line %d' % line_no)
try:
- json.dump(out, out_io)
+ json.dump(out, out_io, sort_keys=sort_keys)
except Exception as e:
_exit(e, 3, 'Cannot dump result: line %d' % line_no)
@@ -56,7 +56,11 @@ def main():
parser.add_argument(
'--import', action='append',
help='modules to import')
+ parser.add_argument(
+ '--sort-keys', '-S', action='store_true',
+ help='sort keys in objects when the command print it')
args = parser.parse_args()
- run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'))
+ run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'),
+ sort_keys=args.sort_keys)
| Add option to sort keys
`--sort-keys` / `-S` to sort keys. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index 8e64641..4f26ab8 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -23,6 +23,14 @@ class RunTest(unittest.TestCase):
jqp.run(inputs, outputs, 're.sub("a", "A", j["name"])', imports=['re'])
self.assertEqual(outputs.getvalue(), '"TAro"\n')
+ def test_sort_keys(self):
+ # This command ignores input
+ inputs = StringIO('''1
+''')
+ outputs = StringIO()
+ jqp.run(inputs, outputs, '{"a": 0, "b": 0, "c": 0}', sort_keys=True)
+ self.assertEqual(outputs.getvalue(), '{"a": 0, "b": 0, "c": 0}\n')
+
def test_parse_error(self):
inputs = StringIO('invalid\n')
outputs = StringIO()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/unnonouno/jqp.git@0b56b8127bf430cab872c71f62329094e666f995#egg=jqp
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- pytest-cov==6.0.0
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_sort_keys"
]
| []
| [
"tests/run_test.py::RunTest::test1",
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_import",
"tests/run_test.py::RunTest::test_import_error",
"tests/run_test.py::RunTest::test_parse_error"
]
| []
| MIT License | 823 | [
"README.rst",
"jqp/__init__.py"
]
| [
"README.rst",
"jqp/__init__.py"
]
|
|
unnonouno__jqp-32 | c5cc6ce947f65446926749039542004f3c5a2a2b | 2016-10-19 14:50:03 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | coveralls:
[](https://coveralls.io/builds/8411457)
Coverage decreased (-1.8%) to 80.0% when pulling **f405db260506c60284215d247f886eef63ad8898 on raw-output** into **c5cc6ce947f65446926749039542004f3c5a2a2b on master**.
coveralls:
[](https://coveralls.io/builds/8411508)
Coverage increased (+0.2%) to 82.0% when pulling **6296205b798a73cc82ffd3d9223d11c7ff79ad9e on raw-output** into **c5cc6ce947f65446926749039542004f3c5a2a2b on master**.
| diff --git a/jqp/__init__.py b/jqp/__init__.py
index 19e1ea8..def3f62 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -6,6 +6,12 @@ import sys
__version__ = '0.0.0.1'
+if sys.version_info.major >= 3:
+ _basestring = str
+else:
+ _basestring = basestring # NOQA
+
+
def _exit(error, return_code, message):
sys.stderr.write(message)
sys.stderr.write('\nOiginal error: ')
@@ -14,7 +20,7 @@ def _exit(error, return_code, message):
sys.exit(return_code)
-def run(in_io, out_io, cmd, imports=[], sort_keys=False):
+def run(in_io, out_io, cmd, imports=[], sort_keys=False, raw_output=False):
environment = {}
for mod_name in imports:
try:
@@ -39,10 +45,13 @@ def run(in_io, out_io, cmd, imports=[], sort_keys=False):
except Exception as e:
_exit(e, 3, 'Cannot execute command: line %d' % line_no)
- try:
- json.dump(out, out_io, sort_keys=sort_keys)
- except Exception as e:
- _exit(e, 3, 'Cannot dump result: line %d' % line_no)
+ if raw_output and isinstance(out, _basestring):
+ out_io.write(out)
+ else:
+ try:
+ json.dump(out, out_io, sort_keys=sort_keys)
+ except Exception as e:
+ _exit(e, 3, 'Cannot dump result: line %d' % line_no)
out_io.write('\n')
@@ -59,8 +68,11 @@ def main():
parser.add_argument(
'--sort-keys', '-S', action='store_true',
help='sort keys in objects when the command print it')
+ parser.add_argument(
+ '--raw-output', '-r', action='store_true',
+ help='when a result is string, the command shows a raw string')
args = parser.parse_args()
run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'),
- sort_keys=args.sort_keys)
+ sort_keys=args.sort_keys, raw_output=args.raw_output)
| Make raw output mode
It shows raw string instead of showing JSON string. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index 4f26ab8..f2fb837 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -31,6 +31,14 @@ class RunTest(unittest.TestCase):
jqp.run(inputs, outputs, '{"a": 0, "b": 0, "c": 0}', sort_keys=True)
self.assertEqual(outputs.getvalue(), '{"a": 0, "b": 0, "c": 0}\n')
+ def test_raw_output(self):
+ # This command ignores input
+ inputs = StringIO('''1
+''')
+ outputs = StringIO()
+ jqp.run(inputs, outputs, '"a"', raw_output=True)
+ self.assertEqual(outputs.getvalue(), 'a\n')
+
def test_parse_error(self):
inputs = StringIO('invalid\n')
outputs = StringIO()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"coverage",
"hacking"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
flake8==3.8.4
hacking==4.1.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/unnonouno/jqp.git@c5cc6ce947f65446926749039542004f3c5a2a2b#egg=jqp
mccabe==0.6.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.6.0
pyflakes==2.2.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- flake8==3.8.4
- hacking==4.1.0
- mccabe==0.6.1
- pycodestyle==2.6.0
- pyflakes==2.2.0
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_raw_output"
]
| []
| [
"tests/run_test.py::RunTest::test1",
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_import",
"tests/run_test.py::RunTest::test_import_error",
"tests/run_test.py::RunTest::test_parse_error",
"tests/run_test.py::RunTest::test_sort_keys"
]
| []
| MIT License | 824 | [
"jqp/__init__.py"
]
| [
"jqp/__init__.py"
]
|
unnonouno__jqp-33 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | 2016-10-19 15:13:44 | 2cb79f866194b90b5236d8146e3cab6ace704ebd | diff --git a/README.rst b/README.rst
index 04a53d0..cdeb66f 100644
--- a/README.rst
+++ b/README.rst
@@ -46,6 +46,7 @@ optional arguments:
--version show version and exit
--import IMPORT modules to import
--sort-keys, -S sort keys in objects when the command print it
+ --join-output, -j do not show newlines
Example
diff --git a/jqp/__init__.py b/jqp/__init__.py
index def3f62..c2f9405 100644
--- a/jqp/__init__.py
+++ b/jqp/__init__.py
@@ -20,7 +20,8 @@ def _exit(error, return_code, message):
sys.exit(return_code)
-def run(in_io, out_io, cmd, imports=[], sort_keys=False, raw_output=False):
+def run(in_io, out_io, cmd, imports=[], sort_keys=False, raw_output=False,
+ join_output=False):
environment = {}
for mod_name in imports:
try:
@@ -53,7 +54,8 @@ def run(in_io, out_io, cmd, imports=[], sort_keys=False, raw_output=False):
except Exception as e:
_exit(e, 3, 'Cannot dump result: line %d' % line_no)
- out_io.write('\n')
+ if not join_output:
+ out_io.write('\n')
def main():
@@ -71,8 +73,12 @@ def main():
parser.add_argument(
'--raw-output', '-r', action='store_true',
help='when a result is string, the command shows a raw string')
+ parser.add_argument(
+ '--join-output', '-j', action='store_true',
+ help='do not show newlines')
args = parser.parse_args()
run(sys.stdin, sys.stdout, args.cmd, imports=getattr(args, 'import'),
- sort_keys=args.sort_keys, raw_output=args.raw_output)
+ sort_keys=args.sort_keys, raw_output=args.raw_output,
+ join_output=args.join_output)
| Make an option to suppress newline characters
jq has `--join-output / -j` option that surpress newlines. | unnonouno/jqp | diff --git a/tests/run_test.py b/tests/run_test.py
index f2fb837..954a304 100644
--- a/tests/run_test.py
+++ b/tests/run_test.py
@@ -39,6 +39,13 @@ class RunTest(unittest.TestCase):
jqp.run(inputs, outputs, '"a"', raw_output=True)
self.assertEqual(outputs.getvalue(), 'a\n')
+ def test_join_output(self):
+ inputs = StringIO('''1
+2''')
+ outputs = StringIO()
+ jqp.run(inputs, outputs, 'j', join_output=True)
+ self.assertEqual(outputs.getvalue(), '12')
+
def test_parse_error(self):
inputs = StringIO('invalid\n')
outputs = StringIO()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/unnonouno/jqp.git@2cb79f866194b90b5236d8146e3cab6ace704ebd#egg=jqp
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: jqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/jqp
| [
"tests/run_test.py::RunTest::test_join_output"
]
| []
| [
"tests/run_test.py::RunTest::test1",
"tests/run_test.py::RunTest::test_dump_error",
"tests/run_test.py::RunTest::test_execution_error",
"tests/run_test.py::RunTest::test_import",
"tests/run_test.py::RunTest::test_import_error",
"tests/run_test.py::RunTest::test_parse_error",
"tests/run_test.py::RunTest::test_raw_output",
"tests/run_test.py::RunTest::test_sort_keys"
]
| []
| MIT License | 825 | [
"README.rst",
"jqp/__init__.py"
]
| [
"README.rst",
"jqp/__init__.py"
]
|
|
simphony__tornado-webapi-27 | a09944d66e3090ad4ab8ce20f8dd1f92a14855b3 | 2016-10-21 16:27:15 | a09944d66e3090ad4ab8ce20f8dd1f92a14855b3 | diff --git a/tornadowebapi/exceptions.py b/tornadowebapi/exceptions.py
index 60bd639..64a3a2e 100644
--- a/tornadowebapi/exceptions.py
+++ b/tornadowebapi/exceptions.py
@@ -44,6 +44,19 @@ class NotFound(WebAPIException):
return None
+class Exists(WebAPIException):
+ """Represents a case where the resource could not be created
+ because it already exists. This is generally raised in the
+ create() method if the resource has uniqueness constraints on
+ things other than the exposed id."""
+
+ http_code = httpstatus.CONFLICT
+
+ def representation(self):
+ """Exists does not have a representation, just an error status"""
+ return None
+
+
class BadRepresentation(WebAPIException):
"""Exception raised when the resource representation is
invalid or does not contain the appropriate keys.
| Add Exists exception
In case we generate a resource that already exists for whatever reason | simphony/tornado-webapi | diff --git a/tornadowebapi/tests/test_webapi.py b/tornadowebapi/tests/test_webapi.py
index e3f3b8b..0b5f7de 100644
--- a/tornadowebapi/tests/test_webapi.py
+++ b/tornadowebapi/tests/test_webapi.py
@@ -107,6 +107,12 @@ class Broken(Resource):
items = boom
+class AlreadyPresent(Resource):
+ @gen.coroutine
+ def create(self, *args):
+ raise exceptions.Exists()
+
+
class TestREST(AsyncHTTPTestCase):
def setUp(self):
super().setUp()
@@ -120,6 +126,7 @@ class TestREST(AsyncHTTPTestCase):
registry.registry.register(Unprocessable)
registry.registry.register(UnsupportsCollection)
registry.registry.register(Broken)
+ registry.registry.register(AlreadyPresent)
app = web.Application(handlers=handlers)
app.hub = mock.Mock()
return app
@@ -402,6 +409,12 @@ class TestREST(AsyncHTTPTestCase):
method="GET")
self.assertEqual(res.code, httpstatus.METHOD_NOT_ALLOWED)
+ def test_exists(self):
+ collection_url = "/api/v1/alreadypresents/"
+
+ res = self.fetch(collection_url, method="POST", body="{}")
+ self.assertEqual(res.code, httpstatus.CONFLICT)
+
class TestRESTFunctions(unittest.TestCase):
def test_api_handlers(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
tomli==2.2.1
tornado==6.4.2
-e git+https://github.com/simphony/tornado-webapi.git@a09944d66e3090ad4ab8ce20f8dd1f92a14855b3#egg=tornadowebapi
| name: tornado-webapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- tomli==2.2.1
- tornado==6.4.2
prefix: /opt/conda/envs/tornado-webapi
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_exists"
]
| []
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_broken",
"tornadowebapi/tests/test_webapi.py::TestREST::test_create",
"tornadowebapi/tests/test_webapi.py::TestREST::test_delete",
"tornadowebapi/tests/test_webapi.py::TestREST::test_items",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_non_json",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_on_resource",
"tornadowebapi/tests/test_webapi.py::TestREST::test_retrieve",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unexistent_resource_type",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unprocessable",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupported_methods",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupports_collections",
"tornadowebapi/tests/test_webapi.py::TestREST::test_update",
"tornadowebapi/tests/test_webapi.py::TestRESTFunctions::test_api_handlers",
"tornadowebapi/tests/test_webapi.py::TestNonGlobalRegistry::test_non_global_registry"
]
| []
| BSD 3-Clause "New" or "Revised" License | 826 | [
"tornadowebapi/exceptions.py"
]
| [
"tornadowebapi/exceptions.py"
]
|
|
simphony__tornado-webapi-28 | a09944d66e3090ad4ab8ce20f8dd1f92a14855b3 | 2016-10-21 16:35:47 | a09944d66e3090ad4ab8ce20f8dd1f92a14855b3 | diff --git a/tornadowebapi/handler.py b/tornadowebapi/handler.py
index 609a1c5..6b78df0 100644
--- a/tornadowebapi/handler.py
+++ b/tornadowebapi/handler.py
@@ -102,7 +102,7 @@ class CollectionHandler(BaseHandler):
self.set_status(httpstatus.OK)
# Need to convert into a dict for security issue tornado/1009
- self.write({"items": list(items)})
+ self.write({"items": [str(item) for item in items]})
self.flush()
@gen.coroutine
@@ -134,7 +134,7 @@ class CollectionHandler(BaseHandler):
raise web.HTTPError(httpstatus.INTERNAL_SERVER_ERROR)
location = with_end_slash(
- url_path_join(self.request.full_url(), resource_id))
+ url_path_join(self.request.full_url(), str(resource_id)))
self.set_status(httpstatus.CREATED)
self.set_header("Location", location)
| Set the POST resource_id to string.
The id should be converted to string before generating the resource URL, this way we can return integers from the REST handler and still work fine. | simphony/tornado-webapi | diff --git a/tornadowebapi/tests/test_webapi.py b/tornadowebapi/tests/test_webapi.py
index e3f3b8b..c67406d 100644
--- a/tornadowebapi/tests/test_webapi.py
+++ b/tornadowebapi/tests/test_webapi.py
@@ -26,8 +26,8 @@ class Student(Resource):
@gen.coroutine
def create(self, representation):
- id = str(type(self).id)
- self.collection[id] = representation
+ id = type(self).id
+ self.collection[str(id)] = representation
type(self).id += 1
return id
@@ -138,7 +138,7 @@ class TestREST(AsyncHTTPTestCase):
res = self.fetch("/api/v1/students/")
self.assertEqual(res.code, httpstatus.OK)
self.assertEqual(escape.json_decode(res.body),
- {"items": [1, 2, 3]})
+ {"items": ["1", "2", "3"]})
def test_create(self):
res = self.fetch(
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli==2.2.1
tornado==6.4.2
-e git+https://github.com/simphony/tornado-webapi.git@a09944d66e3090ad4ab8ce20f8dd1f92a14855b3#egg=tornadowebapi
tzdata==2025.2
| name: tornado-webapi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tomli==2.2.1
- tornado==6.4.2
- tzdata==2025.2
prefix: /opt/conda/envs/tornado-webapi
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_create",
"tornadowebapi/tests/test_webapi.py::TestREST::test_delete",
"tornadowebapi/tests/test_webapi.py::TestREST::test_items",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_on_resource",
"tornadowebapi/tests/test_webapi.py::TestREST::test_retrieve",
"tornadowebapi/tests/test_webapi.py::TestREST::test_update"
]
| []
| [
"tornadowebapi/tests/test_webapi.py::TestREST::test_broken",
"tornadowebapi/tests/test_webapi.py::TestREST::test_post_non_json",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unexistent_resource_type",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unprocessable",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupported_methods",
"tornadowebapi/tests/test_webapi.py::TestREST::test_unsupports_collections",
"tornadowebapi/tests/test_webapi.py::TestRESTFunctions::test_api_handlers",
"tornadowebapi/tests/test_webapi.py::TestNonGlobalRegistry::test_non_global_registry"
]
| []
| BSD 3-Clause "New" or "Revised" License | 827 | [
"tornadowebapi/handler.py"
]
| [
"tornadowebapi/handler.py"
]
|
|
cookiecutter__cookiecutter-839 | d8672b11e445a918431933c322e7ac96440fd438 | 2016-10-23 11:15:56 | bf618fda089fbc5f332d6221af333ee31856f96c | diff --git a/cookiecutter/generate.py b/cookiecutter/generate.py
index 4739aec..4656a4f 100644
--- a/cookiecutter/generate.py
+++ b/cookiecutter/generate.py
@@ -323,6 +323,7 @@ def generate_files(
for copy_dir in copy_dirs:
indir = os.path.normpath(os.path.join(root, copy_dir))
outdir = os.path.normpath(os.path.join(project_dir, indir))
+ outdir = env.from_string(outdir).render(**context)
logger.debug('Copying dir %s to %s without rendering', indir, outdir)
shutil.copytree(indir, outdir)
diff --git a/docs/advanced/copy_without_render.rst b/docs/advanced/copy_without_render.rst
index a804032..2cdb680 100644
--- a/docs/advanced/copy_without_render.rst
+++ b/docs/advanced/copy_without_render.rst
@@ -15,3 +15,14 @@ To avoid rendering directories and files of a cookiecutter, the `_copy_without_r
"rendered_dir/not_rendered_file.ini"
]
}
+
+**Note**: Only the content of the files will be copied without being rendered. The paths are subject to rendering. This allows you to write::
+
+ {
+ "project_slug": "sample",
+ "_copy_without_render": [
+ "{{cookiecutter.repo_name}}/templates/*.html",
+ ]
+ }
+
+In this example, `{{cookiecutter.repo_name}}` will be rendered as expected but the html file content will be copied without rendering.
| Allow for copy_without_render to render output directory name
I find it strange that copy_without_render doesn't render the directory name.
An example use case is wanting to copy a templates directory in your project that contains jinja, the directory is copied however it is placed into a folder like '{{cookiecutter.repo_name}}/templates' as opposed to 'someproject/templates'
I understand some people may in fact want this, but maybe an option to to toggle this behavior would welcome.
| cookiecutter/cookiecutter | diff --git a/tests/test-generate-copy-without-render/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}-rendered/README.md b/tests/test-generate-copy-without-render/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}-rendered/README.md
new file mode 100644
index 0000000..0e74081
--- /dev/null
+++ b/tests/test-generate-copy-without-render/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}-rendered/README.md
@@ -0,0 +1,3 @@
+# Fake Project
+
+{{cookiecutter.render_test}}
diff --git a/tests/test_generate_copy_without_render.py b/tests/test_generate_copy_without_render.py
index 0e2a944..7d61482 100644
--- a/tests/test_generate_copy_without_render.py
+++ b/tests/test_generate_copy_without_render.py
@@ -31,6 +31,7 @@ def test_generate_copy_without_render_extensions():
'*not-rendered',
'rendered/not_rendered.yml',
'*.txt',
+ '{{cookiecutter.repo_name}}-rendered/README.md',
],
}
},
@@ -39,7 +40,7 @@ def test_generate_copy_without_render_extensions():
dir_contents = os.listdir('test_copy_without_render')
- assert '{{cookiecutter.repo_name}}-not-rendered' in dir_contents
+ assert 'test_copy_without_render-not-rendered' in dir_contents
assert 'test_copy_without_render-rendered' in dir_contents
with open('test_copy_without_render/README.txt') as f:
@@ -59,9 +60,16 @@ def test_generate_copy_without_render_extensions():
assert 'I have been rendered' in f.read()
with open(
- 'test_copy_without_render/{{cookiecutter.repo_name}}-not-rendered/README.rst'
+ 'test_copy_without_render/'
+ 'test_copy_without_render-not-rendered/'
+ 'README.rst'
) as f:
assert '{{cookiecutter.render_test}}' in f.read()
with open('test_copy_without_render/rendered/not_rendered.yml') as f:
assert '{{cookiecutter.render_test}}' in f.read()
+
+ with open(
+ 'test_copy_without_render/' 'test_copy_without_render-rendered/' 'README.md'
+ ) as f:
+ assert '{{cookiecutter.render_test}}' in f.read()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "scipy numpy",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock"
],
"pre_install": null,
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arrow==1.2.3
binaryornot==0.4.4
certifi @ file:///croot/certifi_1671487769961/work/certifi
chardet==5.2.0
charset-normalizer==3.4.1
click==8.1.8
-e git+https://github.com/cookiecutter/cookiecutter.git@d8672b11e445a918431933c322e7ac96440fd438#egg=cookiecutter
coverage==7.2.7
exceptiongroup==1.2.2
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==2.11.3
jinja2-time==0.2.0
MarkupSafe==1.1.1
numpy @ file:///opt/conda/conda-bld/numpy_and_numpy_base_1653915516269/work
packaging==24.0
pluggy==1.2.0
poyo==0.5.0
pytest==7.4.4
pytest-cov==4.1.0
pytest-mock==3.11.1
python-dateutil==2.9.0.post0
python-slugify==8.0.4
requests==2.31.0
scipy @ file:///opt/conda/conda-bld/scipy_1661390393401/work
six==1.17.0
text-unidecode==1.3
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: cookiecutter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- blas=1.0=openblas
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- fftw=3.3.9=h5eee18b_2
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=11.2.0=h00389a5_1
- libgfortran5=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.21=h043d6bf_0
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- numpy=1.21.5=py37hf838250_3
- numpy-base=1.21.5=py37h1e6e340_3
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- scipy=1.7.3=py37hf838250_2
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arrow==1.2.3
- binaryornot==0.4.4
- chardet==5.2.0
- charset-normalizer==3.4.1
- click==8.1.8
- cookiecutter==2.0.0
- coverage==7.2.7
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==2.11.3
- jinja2-time==0.2.0
- markupsafe==1.1.1
- packaging==24.0
- pluggy==1.2.0
- poyo==0.5.0
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-mock==3.11.1
- python-dateutil==2.9.0.post0
- python-slugify==8.0.4
- requests==2.31.0
- six==1.17.0
- text-unidecode==1.3
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/cookiecutter
| [
"tests/test_generate_copy_without_render.py::test_generate_copy_without_render_extensions"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 828 | [
"cookiecutter/generate.py",
"docs/advanced/copy_without_render.rst"
]
| [
"cookiecutter/generate.py",
"docs/advanced/copy_without_render.rst"
]
|
|
gerva__tower-companion-28 | 5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a | 2016-10-24 08:21:41 | 5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a | coveralls:
[](https://coveralls.io/builds/8472505)
Coverage remained the same at 100.0% when pulling **ad80837a4268266baeb9cebaa1d793a4d0bc46fb on michaelgaida:TC-26** into **5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a on gerva:master**.
coveralls:
[](https://coveralls.io/builds/8473235)
Coverage remained the same at 100.0% when pulling **2ac2caa48f1e7562fef046d4e62dc28cfec0ff63 on michaelgaida:TC-26** into **5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a on gerva:master**.
| diff --git a/lib/api.py b/lib/api.py
index 6d76e18..a131931 100644
--- a/lib/api.py
+++ b/lib/api.py
@@ -8,6 +8,7 @@ import json
import requests
import lib.validate as validate
from lib.adhoc import AdHocError
+from lib.configuration import ConfigError
class APIError(Exception):
@@ -27,7 +28,13 @@ class APIv1(object):
# E: Instance of 'LookupDict' has no 'created' member (no-member)
def __init__(self, config):
self.config = config
- self.host = config.get('host')
+ try:
+ self.host = config.get('host')
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
+
self.api_url = "https://{0}/api/v1".format(self.host)
def _authentication(self):
@@ -38,14 +45,25 @@ class APIv1(object):
(tuple) username, password
"""
config = self.config
- return (config.get('username'), config.get('password'))
+ try:
+ return (config.get('username'), config.get('password'))
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
def _verify_ssl(self):
"""
Gets the value of verify_ssl from the actual configuraion
"""
config = self.config
- return config.getboolean('verify_ssl')
+ try:
+ return config.getboolean('verify_ssl')
+ except ConfigError as error:
+ msg = "Missing key from configuration, {0}.".format(error)
+ msg = "{0} Please check your configuration.".format(msg)
+ raise APIError(msg)
+
def _get(self, url, params, data):
auth = self._authentication()
| When no configuration file is set, `kick` raises a ConfigError
we should intercept the ConfigError and explain what's wrong with the configuration, instead of just printing the full stack trace | gerva/tower-companion | diff --git a/test/test_api.py b/test/test_api.py
index 50b1a85..31d8d27 100644
--- a/test/test_api.py
+++ b/test/test_api.py
@@ -135,6 +135,25 @@ def test_get_json_error(monkeypatch):
with pytest.raises(APIError):
api._get_json(url='', params={}, data={})
+def test_less_configuration():
+ config = Config(None)
+ with pytest.raises(APIError):
+ api = APIv1(config)
+
+ config.update('host', HOST)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._authentication()
+
+ config.update('username', USERNAME)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._authentication()
+
+ config.update('password', PASSWORD)
+ api = APIv1(config)
+ with pytest.raises(APIError):
+ api._verify_ssl()
def test_job_params(monkeypatch):
def mockreturn(*args, **kwargs):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"prospector"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
click==6.6
coverage==6.2
dill==0.3.4
dodgy==0.2.1
flake8==4.0.1
flake8-polyfill==1.0.2
importlib-metadata==4.2.0
iniconfig==1.1.1
isort==5.10.1
lazy-object-proxy==1.7.1
mccabe==0.6.1
packaging==21.3
pep8-naming==0.10.0
platformdirs==2.4.0
pluggy==1.0.0
prospector==1.7.7
py==1.11.0
pycodestyle==2.8.0
pydocstyle==6.3.0
pyflakes==2.4.0
pylint==2.13.9
pylint-celery==0.3
pylint-django==2.5.3
pylint-flask==0.6
pylint-plugin-utils==0.7
pyparsing==3.1.4
pytest==7.0.1
PyYAML==3.12
requests==2.11.1
requirements-detector==0.7
setoptconf-tmp==0.3.1
snowballstemmer==2.2.0
toml==0.10.2
tomli==1.2.3
-e git+https://github.com/gerva/tower-companion.git@5c373dd2992d4404e9a0e5fc0c5195f58c7ddb7a#egg=tower_companion
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
zipp==3.6.0
| name: tower-companion
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- click==6.6
- coverage==6.2
- dill==0.3.4
- dodgy==0.2.1
- flake8==4.0.1
- flake8-polyfill==1.0.2
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- mccabe==0.6.1
- packaging==21.3
- pep8-naming==0.10.0
- platformdirs==2.4.0
- pluggy==1.0.0
- prospector==1.7.7
- py==1.11.0
- pycodestyle==2.8.0
- pydocstyle==6.3.0
- pyflakes==2.4.0
- pylint==2.13.9
- pylint-celery==0.3
- pylint-django==2.5.3
- pylint-flask==0.6
- pylint-plugin-utils==0.7
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==3.12
- requests==2.11.1
- requirements-detector==0.7
- setoptconf-tmp==0.3.1
- snowballstemmer==2.2.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/tower-companion
| [
"test/test_api.py::test_less_configuration"
]
| []
| [
"test/test_api.py::test_api",
"test/test_api.py::test_verify_ssl",
"test/test_api.py::test_get",
"test/test_api.py::test_get_error",
"test/test_api.py::test_post",
"test/test_api.py::test_post_error",
"test/test_api.py::test_get_json",
"test/test_api.py::test_get_json_error",
"test/test_api.py::test_job_params",
"test/test_api.py::test_get_ids",
"test/test_api.py::test_get_ids_zero_results",
"test/test_api.py::test_launch_template_id",
"test/test_api.py::test_launch_data_to_url",
"test/test_api.py::test_job_stdout",
"test/test_api.py::test_job_status",
"test/test_api.py::test_job_finished",
"test/test_api.py::test_job_started",
"test/test_api.py::test_get_data",
"test/test_api.py::test_ad_hoc_to_api",
"test/test_api.py::test_launch_ad_hoc_job",
"test/test_api.py::test_job_url"
]
| []
| Apache License 2.0 | 829 | [
"lib/api.py"
]
| [
"lib/api.py"
]
|
scrapy__w3lib-77 | 8e19741b6b004d6248fb70b525255a96a1eb1ee6 | 2016-10-25 06:37:29 | e2c7b62ea59104f628c1c5f35333cb406b4e500e | redapple: Can you add tests for this?
Can you provide example websites showing this issue? | diff --git a/w3lib/html.py b/w3lib/html.py
index a4be054..a31d42b 100644
--- a/w3lib/html.py
+++ b/w3lib/html.py
@@ -311,7 +311,7 @@ def get_base_url(
"""
- utext = to_unicode(text, encoding)
+ utext: str = remove_comments(text, encoding=encoding)
m = _baseurl_re.search(utext)
if m:
return urljoin(
| It's not a good idead to parse HTML text using regular expressions
In [`w3lib.html`](https://github.com/scrapy/w3lib/blob/master/w3lib/html.py) regular expressions are used to parse HTML texts:
``` python
_ent_re = re.compile(r'&((?P<named>[a-z\d]+)|#(?P<dec>\d+)|#x(?P<hex>[a-f\d]+))(?P<semicolon>;?)', re.IGNORECASE)
_tag_re = re.compile(r'<[a-zA-Z\/!].*?>', re.DOTALL)
_baseurl_re = re.compile(six.u(r'<base\s[^>]*href\s*=\s*[\"\']\s*([^\"\'\s]+)\s*[\"\']'), re.I)
_meta_refresh_re = re.compile(six.u(r'<meta\s[^>]*http-equiv[^>]*refresh[^>]*content\s*=\s*(?P<quote>["\'])(?P<int>(\d*\.)?\d+)\s*;\s*url=\s*(?P<url>.*?)(?P=quote)'), re.DOTALL | re.IGNORECASE)
_cdata_re = re.compile(r'((?P<cdata_s><!\[CDATA\[)(?P<cdata_d>.*?)(?P<cdata_e>\]\]>))', re.DOTALL)
```
However this is definitely incorrect when it involves commented contents, e.g.
``` python
>>> from w3lib import html
>>> html.get_base_url("""<!-- <base href="http://example.com/" /> -->""")
'http://example.com/'
```
Introducing "heavier" utilities like `lxml` would solve this issue easily, but that might be an awful idea as `w3lib` aims to be lightweight & fast.
Or maybe we could implement some quick parser merely for eliminating the commented parts.
Any ideas?
| scrapy/w3lib | diff --git a/tests/test_html.py b/tests/test_html.py
index d4861ba..1e637b0 100644
--- a/tests/test_html.py
+++ b/tests/test_html.py
@@ -372,6 +372,30 @@ class GetBaseUrlTest(unittest.TestCase):
get_base_url(text, baseurl.encode("ascii")), "http://example.org/something"
)
+ def test_base_url_in_comment(self):
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/> -->"""), ""
+ )
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/>"""), ""
+ )
+ self.assertEqual(
+ get_base_url("""<!-- <base href="http://example.com/"/> --"""), ""
+ )
+ self.assertEqual(
+ get_base_url(
+ """<!-- <!-- <base href="http://example.com/"/> -- --> <base href="http://example_2.com/"/> """
+ ),
+ "http://example_2.com/",
+ )
+
+ self.assertEqual(
+ get_base_url(
+ """<!-- <base href="http://example.com/"/> --> <!-- <base href="http://example_2.com/"/> --> <base href="http://example_3.com/"/>"""
+ ),
+ "http://example_3.com/",
+ )
+
def test_relative_url_with_absolute_path(self):
baseurl = "https://example.org"
text = """\
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/scrapy/w3lib.git@8e19741b6b004d6248fb70b525255a96a1eb1ee6#egg=w3lib
| name: w3lib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/w3lib
| [
"tests/test_html.py::GetBaseUrlTest::test_base_url_in_comment"
]
| []
| [
"tests/test_html.py::RemoveEntitiesTest::test_browser_hack",
"tests/test_html.py::RemoveEntitiesTest::test_encoding",
"tests/test_html.py::RemoveEntitiesTest::test_illegal_entities",
"tests/test_html.py::RemoveEntitiesTest::test_keep_entities",
"tests/test_html.py::RemoveEntitiesTest::test_missing_semicolon",
"tests/test_html.py::RemoveEntitiesTest::test_regular",
"tests/test_html.py::RemoveEntitiesTest::test_returns_unicode",
"tests/test_html.py::ReplaceTagsTest::test_replace_tags",
"tests/test_html.py::ReplaceTagsTest::test_replace_tags_multiline",
"tests/test_html.py::ReplaceTagsTest::test_returns_unicode",
"tests/test_html.py::RemoveCommentsTest::test_no_comments",
"tests/test_html.py::RemoveCommentsTest::test_remove_comments",
"tests/test_html.py::RemoveCommentsTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsTest::test_keep_argument",
"tests/test_html.py::RemoveTagsTest::test_remove_empty_tags",
"tests/test_html.py::RemoveTagsTest::test_remove_tags",
"tests/test_html.py::RemoveTagsTest::test_remove_tags_with_attributes",
"tests/test_html.py::RemoveTagsTest::test_remove_tags_without_tags",
"tests/test_html.py::RemoveTagsTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsTest::test_uppercase_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_empty_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_returns_unicode",
"tests/test_html.py::RemoveTagsWithContentTest::test_tags_with_shared_prefix",
"tests/test_html.py::RemoveTagsWithContentTest::test_with_tags",
"tests/test_html.py::RemoveTagsWithContentTest::test_without_tags",
"tests/test_html.py::ReplaceEscapeCharsTest::test_returns_unicode",
"tests/test_html.py::ReplaceEscapeCharsTest::test_with_escape_chars",
"tests/test_html.py::ReplaceEscapeCharsTest::test_without_escape_chars",
"tests/test_html.py::UnquoteMarkupTest::test_returns_unicode",
"tests/test_html.py::UnquoteMarkupTest::test_unquote_markup",
"tests/test_html.py::GetBaseUrlTest::test_attributes_before_href",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_latin1",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_latin1_percent",
"tests/test_html.py::GetBaseUrlTest::test_get_base_url_utf8",
"tests/test_html.py::GetBaseUrlTest::test_no_scheme_url",
"tests/test_html.py::GetBaseUrlTest::test_relative_url_with_absolute_path",
"tests/test_html.py::GetBaseUrlTest::test_tag_name",
"tests/test_html.py::GetMetaRefreshTest::test_commented_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_entities_in_redirect_url",
"tests/test_html.py::GetMetaRefreshTest::test_float_refresh_intervals",
"tests/test_html.py::GetMetaRefreshTest::test_get_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_html_comments_with_uncommented_meta_refresh",
"tests/test_html.py::GetMetaRefreshTest::test_inside_noscript",
"tests/test_html.py::GetMetaRefreshTest::test_inside_script",
"tests/test_html.py::GetMetaRefreshTest::test_leading_newline_in_url",
"tests/test_html.py::GetMetaRefreshTest::test_multiline",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_latin1",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_latin1_query",
"tests/test_html.py::GetMetaRefreshTest::test_nonascii_url_utf8",
"tests/test_html.py::GetMetaRefreshTest::test_redirections_in_different_ordering__in_meta_tag",
"tests/test_html.py::GetMetaRefreshTest::test_relative_redirects",
"tests/test_html.py::GetMetaRefreshTest::test_tag_name",
"tests/test_html.py::GetMetaRefreshTest::test_without_url"
]
| []
| BSD 3-Clause "New" or "Revised" License | 830 | [
"w3lib/html.py"
]
| [
"w3lib/html.py"
]
|
Azure__azure-cli-1160 | 98572bac3cdff9eb2d106140cdf352a6d2979162 | 2016-10-25 17:25:42 | 1576ec67f5029db062579da230902a559acbb9fe | diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py
index 78fb78969..0f91655a8 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py
@@ -268,8 +268,8 @@ def _handle_container_ssh_file(**kwargs):
private_key_filepath = public_key_filepath[:-4]
else:
private_key_filepath = public_key_filepath + '.private'
- logger.warning('Creating SSH key files: %s,%s', private_key_filepath, public_key_filepath)
content = _generate_ssh_keys(private_key_filepath, public_key_filepath)
+ logger.warning('Created SSH key files: %s,%s', private_key_filepath, public_key_filepath)
args.ssh_key_value = content
def _generate_ssh_keys(private_key_filepath, public_key_filepath):
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
index b189d5967..5f2c070f5 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
@@ -71,8 +71,10 @@ register_cli_argument('vm access', 'password', options_list=('--password', '-p')
register_cli_argument('acs', 'name', arg_type=name_arg_type)
register_cli_argument('acs', 'orchestrator_type', **enum_choice_list(ContainerServiceOchestratorTypes))
-register_cli_argument('acs', 'admin_username', admin_username_type)
+#some admin names are prohibited in acs, such as root, admin, etc. Because we have no control on the orchestrators, so default to a safe name.
+register_cli_argument('acs', 'admin_username', options_list=('--admin-username',), default='azureuser', required=False)
register_cli_argument('acs', 'ssh_key_value', required=False, help='SSH key file value or key file path.', default=os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa.pub'), completer=FilesCompleter())
+register_cli_argument('acs', 'dns_name_prefix', options_list=('--dns-prefix', '-d'))
register_extra_cli_argument('acs create', 'generate_ssh_keys', action='store_true', help='Generate SSH public and private key files if missing')
register_cli_argument('acs', 'container_service_name', options_list=('--name', '-n'), help='The name of the container service', completer=get_resource_name_completion_list('Microsoft.ContainerService/ContainerServices'))
register_cli_argument('acs create', 'agent_vm_size', completer=get_vm_size_completion_list)
| ACS create fails with no username
--admin-username defaults to 'root' which is not allowed:
$ az acs create --resource-group rgacsazcli --orchestrator-type DCOS --name rgacsazcli --dns-name-prefix rgacsazcli
Error loading command module 'iot'
At least one resource deployment operation failed. Please list deployment operations for details. Please see https://aka.ms/arm-debug for usage details. {
"error": {
"code": "InvalidParameter",
"target": "windowsProfile.adminUsername",
"message": "Linux admin user name cannot be 'root' or be longer than 64 characters."
}
}
| Azure/azure-cli | diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py
index 4576239d4..7877af690 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py
@@ -1254,7 +1254,7 @@ class AzureContainerServiceScenarioTest(ResourceGroupVCRTestBase): #pylint: disa
dns_prefix = 'myacs123'
#create
- self.cmd('acs create -g {} -n {} --dns-name-prefix {}'.format(self.resource_group, acs_name, dns_prefix), checks=[
+ self.cmd('acs create -g {} -n {} --dns-prefix {}'.format(self.resource_group, acs_name, dns_prefix), checks=[
JMESPathCheck('masterFQDN', '{}mgmt.{}.cloudapp.azure.com'.format(dns_prefix, self.location)),
JMESPathCheck('agentFQDN', '{}agents.{}.cloudapp.azure.com'.format(dns_prefix, self.location))
])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.1
applicationinsights==0.10.0
argcomplete==1.3.0
astroid==1.4.9
attrs==22.2.0
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_context&subdirectory=src/command_modules/azure-cli-context
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_vsts&subdirectory=src/command_modules/azure-cli-vsts
-e git+https://github.com/Azure/azure-cli.git@98572bac3cdff9eb2d106140cdf352a6d2979162#egg=azure_cli_webapp&subdirectory=src/command_modules/azure-cli-webapp
azure-common==1.1.4
azure-graphrbac==0.30.0rc6
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-compute==0.30.0rc6
azure-mgmt-dns==0.30.0rc6
azure-mgmt-iothub==0.1.0
azure-mgmt-keyvault==0.30.0
azure-mgmt-network==0.30.0rc6
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.0rc6
azure-mgmt-storage==0.30.0rc6
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.30.0
azure-nspkg==3.0.2
azure-storage==0.33.0
bcrypt==4.0.1
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.3.7
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycparser==2.21
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
PyNaCl==1.5.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.16
vcrpy==1.7.4
wrapt==1.16.0
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.1
- applicationinsights==0.10.0
- argcomplete==1.3.0
- astroid==1.4.9
- attrs==22.2.0
- azure-common==1.1.4
- azure-graphrbac==0.30.0rc6
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-compute==0.30.0rc6
- azure-mgmt-dns==0.30.0rc6
- azure-mgmt-iothub==0.1.0
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-network==0.30.0rc6
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.0rc6
- azure-mgmt-storage==0.30.0rc6
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.30.0
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- bcrypt==4.0.1
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.3.7
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycparser==2.21
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pynacl==1.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- secretstorage==3.3.3
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.7.4
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::AzureContainerServiceScenarioTest::test_acs_create_update"
]
| [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageListByAliasesScenarioTest::test_vm_image_list_by_alias",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMOpenPortTest::test_vm_open_port",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMGeneralizeScenarioTest::test_vm_generalize",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetStatesScenarioTest::test_vm_scaleset_states",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetCreateSimple::test_vm_scaleset_create_simple",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetCreateOptions::test_vm_scaleset_create_options",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMSSCreateNoneOptionsTest::test_vmss_create_none_options",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetCreateExistingOptions::test_vm_scaleset_create_existing_options",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateUbuntuScenarioTest::test_vm_create_ubuntu",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateMultiNicTest::test_vm_create_multinic",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateNoneOptionsTest::test_vm_create_none_options",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateExistingOptions::test_vm_create_existing_options",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateCustomIP::test_vm_create_custom_ip",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMDataDiskVCRTest::test_vm_data_disk"
]
| [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMUsageScenarioTest::test_vm_usage",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageListThruServiceScenarioTest::test_vm_images_list_thru_services",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCombinedListTest::test_vm_combined_list",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMResizeTest::test_vm_resize",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMShowListSizesListIPAddressesScenarioTest::test_vm_show_list_sizes_list_ip_addresses",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMSizeListScenarioTest::test_vm_size_list",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageListOffersScenarioTest::test_vm_image_list_offers",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageListPublishersScenarioTest::test_vm_image_list_publishers",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageListSkusScenarioTest::test_vm_image_list_skus",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMImageShowScenarioTest::test_vm_image_show",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMCreateAndStateModificationsScenarioTest::test_vm_create_state_modifications",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMAvailSetScenarioTest::test_vm_availset",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMExtensionAutoUpgradeTest::test_vm_extension_autoupgrade",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMExtensionsScenarioTest::test_vm_extension",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMMachineExtensionImageScenarioTest::test_vm_machine_extension_image",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMExtensionImageSearchScenarioTest::test_vm_extension_image_search",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetGetsScenarioTest::test_vm_scaleset_gets",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetScaleUpScenarioTest::test_vm_scaleset_scaleup",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetDeleteScenarioTest::test_vm_scaleset_delete",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMScaleSetVMsScenarioTest::test_vm_scaleset_vms",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMAccessAddRemoveLinuxUser::test_vm_add_remove_linux_user",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMBootDiagnostics::test_vm_enable_disable_boot_diagnostic",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMExtensionInstallTest::test_vm_extension_install",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::VMSSExtensionInstallTest::test_vmss_extension_install",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/tests/test_vm_commands.py::DiagnosticsExtensionInstallTest::test_diagnostics_extension_install"
]
| []
| MIT License | 831 | [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py"
]
| [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_actions.py"
]
|
|
zooniverse__panoptes-python-client-55 | 6be958cc842488e5410814910febd6e71b14d7b0 | 2016-10-27 10:58:20 | 3a1605e7e5209e2ad1cd4c38c2daee7fca905980 | diff --git a/panoptes_client/project.py b/panoptes_client/project.py
index fa67fd1..807d3b7 100644
--- a/panoptes_client/project.py
+++ b/panoptes_client/project.py
@@ -30,7 +30,12 @@ class Project(PanoptesObject):
def find(cls, id='', slug=None):
if not id and not slug:
return None
- return cls.where(id=id, slug=slug).next()
+ try:
+ return cls.where(id=id, slug=slug).next()
+ except StopIteration:
+ raise PanoptesAPIException(
+ "Could not find project with slug='{}'".format(slug)
+ )
def get_export(
self,
| Raise something better than StopIteration in .find() when nothing is found
https://github.com/zooniverse/panoptes-python-client/blob/67e11e16cd91689e62939a6ba54ff7769259a525/panoptes_client/panoptes.py#L428 | zooniverse/panoptes-python-client | diff --git a/panoptes_client/tests/test_project.py b/panoptes_client/tests/test_project.py
index d900b75..14effae 100644
--- a/panoptes_client/tests/test_project.py
+++ b/panoptes_client/tests/test_project.py
@@ -1,6 +1,7 @@
import unittest
from panoptes_client import Project
+from panoptes_client.panoptes import PanoptesAPIException
class TestProject(unittest.TestCase):
@@ -17,5 +18,5 @@ class TestProject(unittest.TestCase):
self.assertEqual(p, None)
def test_find_unknown_slug(self):
- with self.assertRaises(StopIteration):
+ with self.assertRaises(PanoptesAPIException):
Project.find(slug='invalid_slug')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/zooniverse/panoptes-python-client.git@6be958cc842488e5410814910febd6e71b14d7b0#egg=panoptes_client
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
| name: panoptes-python-client
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- idna==3.10
- requests==2.32.3
- urllib3==2.3.0
prefix: /opt/conda/envs/panoptes-python-client
| [
"panoptes_client/tests/test_project.py::TestProject::test_find_unknown_slug"
]
| [
"panoptes_client/tests/test_project.py::TestProject::test_find_id"
]
| [
"panoptes_client/tests/test_project.py::TestProject::test_find_slug",
"panoptes_client/tests/test_project.py::TestProject::test_find_unknown_id"
]
| []
| Apache License 2.0 | 832 | [
"panoptes_client/project.py"
]
| [
"panoptes_client/project.py"
]
|
|
spulec__freezegun-161 | 11d9ead16a55fd31fb545a663a2b758049b4b40d | 2016-10-27 13:04:14 | 181f7ac7f909e561e26f5b293d2d40e82eb99f7a | coveralls:
[](https://coveralls.io/builds/8538937)
Coverage decreased (-0.5%) to 95.726% when pulling **88b1f06d959015d916d116e7687c8e26ea4b2a57 on pelme:ignore-deprecation-warnings** into **11d9ead16a55fd31fb545a663a2b758049b4b40d on spulec:master**.
coveralls:
[](https://coveralls.io/builds/8539163)
Coverage decreased (-0.5%) to 95.726% when pulling **35b2fc1c3d7db2074cbdcf079413b2a8b0a5b100 on pelme:ignore-deprecation-warnings** into **11d9ead16a55fd31fb545a663a2b758049b4b40d on spulec:master**.
coveralls:
[](https://coveralls.io/builds/8539163)
Coverage decreased (-0.5%) to 95.726% when pulling **35b2fc1c3d7db2074cbdcf079413b2a8b0a5b100 on pelme:ignore-deprecation-warnings** into **11d9ead16a55fd31fb545a663a2b758049b4b40d on spulec:master**.
| diff --git a/freezegun/api.py b/freezegun/api.py
index 9f63fe9..4502e32 100644
--- a/freezegun/api.py
+++ b/freezegun/api.py
@@ -6,6 +6,7 @@ import time
import calendar
import unittest
import platform
+import warnings
from dateutil import parser
from dateutil.tz import tzlocal
@@ -390,25 +391,28 @@ class _freeze_time(object):
# Save the current loaded modules
self.modules_at_start = set(sys.modules.keys())
- for mod_name, module in list(sys.modules.items()):
- if mod_name is None or module is None:
- continue
- elif mod_name.startswith(self.ignore):
- continue
- elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
- continue
- for module_attribute in dir(module):
- if module_attribute in real_names:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore')
+
+ for mod_name, module in list(sys.modules.items()):
+ if mod_name is None or module is None:
continue
- try:
- attribute_value = getattr(module, module_attribute)
- except (ImportError, AttributeError, TypeError):
- # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
+ elif mod_name.startswith(self.ignore):
continue
- fake = fakes.get(id(attribute_value))
- if fake:
- setattr(module, module_attribute, fake)
- add_change((module, module_attribute, attribute_value))
+ elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
+ continue
+ for module_attribute in dir(module):
+ if module_attribute in real_names:
+ continue
+ try:
+ attribute_value = getattr(module, module_attribute)
+ except (ImportError, AttributeError, TypeError):
+ # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
+ continue
+ fake = fakes.get(id(attribute_value))
+ if fake:
+ setattr(module, module_attribute, fake)
+ add_change((module, module_attribute, attribute_value))
datetime.datetime.times_to_freeze.append(time_to_freeze)
datetime.datetime.tz_offsets.append(self.tz_offset)
@@ -436,26 +440,29 @@ class _freeze_time(object):
# Restore modules loaded after start()
modules_to_restore = set(sys.modules.keys()) - self.modules_at_start
self.modules_at_start = set()
- for mod_name in modules_to_restore:
- module = sys.modules.get(mod_name, None)
- if mod_name is None or module is None:
- continue
- elif mod_name.startswith(self.ignore):
- continue
- elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
- continue
- for module_attribute in dir(module):
- if module_attribute in self.fake_names:
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ for mod_name in modules_to_restore:
+ module = sys.modules.get(mod_name, None)
+ if mod_name is None or module is None:
continue
- try:
- attribute_value = getattr(module, module_attribute)
- except (ImportError, AttributeError, TypeError):
- # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
+ elif mod_name.startswith(self.ignore):
continue
-
- real = self.reals.get(id(attribute_value))
- if real:
- setattr(module, module_attribute, real)
+ elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
+ continue
+ for module_attribute in dir(module):
+
+ if module_attribute in self.fake_names:
+ continue
+ try:
+ attribute_value = getattr(module, module_attribute)
+ except (ImportError, AttributeError, TypeError):
+ # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
+ continue
+
+ real = self.reals.get(id(attribute_value))
+ if real:
+ setattr(module, module_attribute, real)
time.time = time.time.previous_time_function
time.gmtime = time.gmtime.previous_gmtime_function
| Freezetime's access to every imported module's attribute triggers celery's deprecation warnings
Since https://github.com/spulec/freezegun/commit/70518d630c71a37d3c651dee7b73d347b5d75405 freezegun has been fetching every attribute from every imported module to check if it matches one of the patchable date. However, the celery package imports a number of modules that throw deprecation warnings on their access. Celery slightly complicates the tracking down of this issue because it implements some LazyModule functionality so the deprecation warnings show when first accessed. Normally this prevents the warnings from showing to developer unless they intended to use a deprecated module, but since freezegun touches ever attribute on every module, the warning is always thrown.
| spulec/freezegun | diff --git a/tests/test_warnings.py b/tests/test_warnings.py
new file mode 100644
index 0000000..7747081
--- /dev/null
+++ b/tests/test_warnings.py
@@ -0,0 +1,84 @@
+import contextlib
+import datetime
+import sys
+import types
+import warnings
+
+from freezegun import freeze_time
+
+
+class ModuleWithWarning(object):
+ """
+ A module that triggers warnings on attribute access.
+
+ This does not happen with regular modules, there has to be a bit of lazy
+ module magic going on in order for this to happen.
+
+ Examples of modules that uses this pattern in real projects can be found at:
+
+ py.code - the compiler package import causes a warning to be emitted:
+ https://github.com/pytest-dev/py/blob/67987e26aadddbbe7d1ec76c16ea9be346ae9811/py/__init__.py
+ https://github.com/pytest-dev/py/blob/67987e26aadddbbe7d1ec76c16ea9be346ae9811/py/_code/_assertionold.py#L3
+
+ celery.task - the sets module is listed in __all__ in celery.task and freeze_time accesses it:
+ https://github.com/celery/celery/blob/46c92025cdec07a4a30ad44901cf66cb27346638/celery/task/__init__.py
+ https://github.com/celery/celery/blob/46c92025cdec07a4a30ad44901cf66cb27346638/celery/task/sets.py
+ """
+ __name__ = 'module_with_warning'
+ __dict__ = {}
+ warning_triggered = False
+ counter = 0
+
+ @property
+ def attribute_that_emits_a_warning(self):
+ # Use unique warning messages to avoid messages being only reported once
+ self.__class__.counter += 1
+ warnings.warn('this is test warning #{counter}'.format(counter=self.__class__.counter))
+ self.warning_triggered = True
+
+
[email protected]
+def assert_module_with_emitted_warning():
+ """Install a module that triggers warnings into sys.modules and ensure the
+ warning was triggered in the with-block. """
+ module = sys.modules['module_with_warning'] = ModuleWithWarning()
+
+ try:
+ yield
+ finally:
+ del sys.modules['module_with_warning']
+
+ assert module.warning_triggered
+
+
[email protected]
+def assert_no_warnings():
+ """A context manager that makes sure no warnings was emitted."""
+ with warnings.catch_warnings(record=True) as caught_warnings:
+ warnings.filterwarnings('always')
+ yield
+ assert not caught_warnings
+
+
+def test_ignore_warnings_in_start():
+ """Make sure that modules being introspected in start() does not emit warnings."""
+ with assert_module_with_emitted_warning():
+ freezer = freeze_time(datetime.datetime(2016, 10, 27, 9, 56))
+
+ try:
+ with assert_no_warnings():
+ freezer.start()
+
+ finally:
+ freezer.stop()
+
+
+def test_ignore_warnings_in_stop():
+ """Make sure that modules that was loaded after start() does not trigger
+ warnings in stop()"""
+ freezer = freeze_time(datetime.datetime(2016, 10, 27, 9, 56))
+ freezer.start()
+
+ with assert_module_with_emitted_warning():
+ with assert_no_warnings():
+ freezer.stop()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==3.7.1
coveralls==1.11.1
docopt==0.6.2
exceptiongroup==1.2.2
-e git+https://github.com/spulec/freezegun.git@11d9ead16a55fd31fb545a663a2b758049b4b40d#egg=freezegun
idna==3.10
iniconfig==2.1.0
mock==5.2.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: freezegun
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==3.7.1
- coveralls==1.11.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/freezegun
| [
"tests/test_warnings.py::test_ignore_warnings_in_start",
"tests/test_warnings.py::test_ignore_warnings_in_stop"
]
| []
| []
| []
| Apache License 2.0 | 833 | [
"freezegun/api.py"
]
| [
"freezegun/api.py"
]
|
rabitt__pysox-28 | bba620da31f76adbb4fa6cc524151b2518dc3079 | 2016-10-27 22:35:19 | 8a6748d32b6917d5ef920895fbfc734dda21f294 | diff --git a/setup.py b/setup.py
index eee03d9..d0cd01c 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ if __name__ == "__main__":
setup(
name='sox',
- version='1.2.1',
+ version='1.2.2',
description='Python wrapper around SoX.',
diff --git a/sox/core.py b/sox/core.py
index 2b14d01..64a70cd 100644
--- a/sox/core.py
+++ b/sox/core.py
@@ -67,7 +67,6 @@ def sox(args):
class SoxError(Exception):
'''Exception to be raised when SoX exits with non-zero status.
'''
-
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
diff --git a/sox/transform.py b/sox/transform.py
index 438a3c3..f4eb248 100644
--- a/sox/transform.py
+++ b/sox/transform.py
@@ -395,6 +395,11 @@ class Transformer(object):
file_info.validate_input_file(input_filepath)
file_info.validate_output_file(output_filepath)
+ if input_filepath == output_filepath:
+ raise ValueError(
+ "input_filepath must be different from output_filepath."
+ )
+
args = []
args.extend(self.globals)
args.extend(self.input_format)
| Support using same file path for input & output
Sox doesn't support using the same file as both input and output - doing this will result in an empty, invalid audio file. While this is sox behavior and not pysox, it would be nice if pysox took care of this behind the scenes. Right now the user needs to worry about this logic themselves, e.g. like this:
```python
import tempfile
import shutil
from scaper.util import _close_temp_files
audio_infile = '/Users/justin/Downloads/trimtest.wav'
audio_outfile = '/Users/justin/Downloads/trimtest.wav'
start_time = 2
end_time = 3
tfm = sox.Transformer()
tfm.trim(start_time, end_time)
if audio_outfile != audio_infile:
tfm.build(audio_infile, audio_outfile)
else:
# must use temp file in order to save to same file
tmpfiles = []
with _close_temp_files(tmpfiles):
# Create tmp file
tmpfiles.append(
tempfile.NamedTemporaryFile(
suffix='.wav', delete=True))
# Save trimmed result to temp file
tfm.build(audio_infile, tmpfiles[-1].name)
# Copy result back to original file
shutil.copyfile(tmpfiles[-1].name, audio_outfile)
```
Pysox *does* issue a warning when a file is about to be overwritten, which is even more confusing under this scenario since the user (who might be unfamiliar with the quirks of sox) has no reason to think that the overwritten file will be invalid. | rabitt/pysox | diff --git a/tests/test_transform.py b/tests/test_transform.py
index aa357d1..056676c 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -386,6 +386,10 @@ class TestTransformerBuild(unittest.TestCase):
with self.assertRaises(IOError):
self.tfm.build('blah/asdf.wav', OUTPUT_FILE)
+ def test_input_output_equal(self):
+ with self.assertRaises(ValueError):
+ self.tfm.build(INPUT_FILE, INPUT_FILE)
+
def test_failed_sox(self):
self.tfm.effects = ['channels', '-1']
with self.assertRaises(SoxError):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-pep8"
],
"pre_install": [
"apt-get update",
"apt-get install -y sox"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
packaging==24.2
pep8==1.7.1
pluggy==1.5.0
pytest==8.3.5
pytest-cache==1.0
pytest-cov==6.0.0
pytest-pep8==1.0.6
-e git+https://github.com/rabitt/pysox.git@bba620da31f76adbb4fa6cc524151b2518dc3079#egg=sox
tomli==2.2.1
| name: pysox
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- packaging==24.2
- pep8==1.7.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cache==1.0
- pytest-cov==6.0.0
- pytest-pep8==1.0.6
- tomli==2.2.1
prefix: /opt/conda/envs/pysox
| [
"tests/test_transform.py::TestTransformerBuild::test_input_output_equal"
]
| []
| [
"tests/test_transform.py::TestTransformDefault::test_effects",
"tests/test_transform.py::TestTransformDefault::test_effects_log",
"tests/test_transform.py::TestTransformDefault::test_globals",
"tests/test_transform.py::TestTransformDefault::test_input_format",
"tests/test_transform.py::TestTransformDefault::test_output_format",
"tests/test_transform.py::TestTransformSetGlobals::test_defaults",
"tests/test_transform.py::TestTransformSetGlobals::test_dither",
"tests/test_transform.py::TestTransformSetGlobals::test_dither_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_guard",
"tests/test_transform.py::TestTransformSetGlobals::test_guard_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_multithread",
"tests/test_transform.py::TestTransformSetGlobals::test_multithread_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_replay_gain",
"tests/test_transform.py::TestTransformSetGlobals::test_replay_gain_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_verbosity",
"tests/test_transform.py::TestTransformSetGlobals::test_verbosity_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits_invalid2",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels_invalid2",
"tests/test_transform.py::TestTransformSetInputFormat::test_defaults",
"tests/test_transform.py::TestTransformSetInputFormat::test_encoding",
"tests/test_transform.py::TestTransformSetInputFormat::test_encoding_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_file_type",
"tests/test_transform.py::TestTransformSetInputFormat::test_file_type_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_ignore_length",
"tests/test_transform.py::TestTransformSetInputFormat::test_ignore_length_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_append_comments",
"tests/test_transform.py::TestTransformSetOutputFormat::test_append_comments_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_comments",
"tests/test_transform.py::TestTransformSetOutputFormat::test_comments_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_defaults",
"tests/test_transform.py::TestTransformSetOutputFormat::test_encoding",
"tests/test_transform.py::TestTransformSetOutputFormat::test_encoding_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_file_type",
"tests/test_transform.py::TestTransformSetOutputFormat::test_file_type_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate_invalid2",
"tests/test_transform.py::TestTransformerBuild::test_failed_sox",
"tests/test_transform.py::TestTransformerBuild::test_invalid",
"tests/test_transform.py::TestTransformerBuild::test_valid",
"tests/test_transform.py::TestTransformerBuild::test_valid_spacey",
"tests/test_transform.py::TestTransformerPreview::test_valid",
"tests/test_transform.py::TestTransformerAllpass::test_default",
"tests/test_transform.py::TestTransformerAllpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerAllpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_constant_skirt",
"tests/test_transform.py::TestTransformerBandpass::test_constant_skirt_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_default",
"tests/test_transform.py::TestTransformerBandpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBandreject::test_default",
"tests/test_transform.py::TestTransformerBandreject::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBandreject::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBass::test_default",
"tests/test_transform.py::TestTransformerBass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBass::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerBass::test_slope_invalid",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_vals",
"tests/test_transform.py::TestTransformerBend::test_default",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_order",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_vals",
"tests/test_transform.py::TestTransformerBend::test_frame_rate_invalid",
"tests/test_transform.py::TestTransformerBend::test_frame_rate_valid",
"tests/test_transform.py::TestTransformerBend::test_n_bends_invalid",
"tests/test_transform.py::TestTransformerBend::test_overlapping_intervals",
"tests/test_transform.py::TestTransformerBend::test_oversample_rate_invalid",
"tests/test_transform.py::TestTransformerBend::test_oversample_rate_valid",
"tests/test_transform.py::TestTransformerBend::test_start_greater_end",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_order",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_vals",
"tests/test_transform.py::TestTransformerBiquad::test_a_non_num",
"tests/test_transform.py::TestTransformerBiquad::test_a_nonlist",
"tests/test_transform.py::TestTransformerBiquad::test_a_wrong_len",
"tests/test_transform.py::TestTransformerBiquad::test_b_non_num",
"tests/test_transform.py::TestTransformerBiquad::test_b_nonlist",
"tests/test_transform.py::TestTransformerBiquad::test_b_wrong_len",
"tests/test_transform.py::TestTransformerBiquad::test_default",
"tests/test_transform.py::TestTransformerChannels::test_default",
"tests/test_transform.py::TestTransformerChannels::test_invalid_nchannels",
"tests/test_transform.py::TestTransformerChorus::test_default",
"tests/test_transform.py::TestTransformerChorus::test_explicit_args",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_gain_in",
"tests/test_transform.py::TestTransformerChorus::test_invalid_gain_out",
"tests/test_transform.py::TestTransformerChorus::test_invalid_n_voices",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds_wronglen",
"tests/test_transform.py::TestTransformerContrast::test_default",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_big",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_neg",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_attack_bigger_decay",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_invalid_neg",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_valid",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_invalid_neg",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_valid",
"tests/test_transform.py::TestTransformerCompand::test_default",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_invalid",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_none",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_valid",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_empty",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_nonlist",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_nontuples",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_dups",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_len",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_nonnum2",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_positive",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_valid",
"tests/test_transform.py::TestTransformerConvert::test_bitdepth_invalid",
"tests/test_transform.py::TestTransformerConvert::test_bitdepth_valid",
"tests/test_transform.py::TestTransformerConvert::test_channels_invalid1",
"tests/test_transform.py::TestTransformerConvert::test_channels_invalid2",
"tests/test_transform.py::TestTransformerConvert::test_channels_valid",
"tests/test_transform.py::TestTransformerConvert::test_default",
"tests/test_transform.py::TestTransformerConvert::test_samplerate_invalid",
"tests/test_transform.py::TestTransformerConvert::test_samplerate_valid",
"tests/test_transform.py::TestTransformerDcshift::test_default",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_big",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_neg",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_nonnum",
"tests/test_transform.py::TestTransformerDeemph::test_default",
"tests/test_transform.py::TestTransformerDelay::test_default",
"tests/test_transform.py::TestTransformerDelay::test_default_three_channel",
"tests/test_transform.py::TestTransformerDelay::test_invalid_position_type",
"tests/test_transform.py::TestTransformerDelay::test_invalid_position_vals",
"tests/test_transform.py::TestTransformerDownsample::test_default",
"tests/test_transform.py::TestTransformerDownsample::test_invalid_factor_neg",
"tests/test_transform.py::TestTransformerDownsample::test_invalid_factor_nonnum",
"tests/test_transform.py::TestTransformerEarwax::test_default",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_len",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_type",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_vals",
"tests/test_transform.py::TestTransformerEcho::test_decays_valid",
"tests/test_transform.py::TestTransformerEcho::test_default",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_len",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_type",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_vals",
"tests/test_transform.py::TestTransformerEcho::test_delays_valid",
"tests/test_transform.py::TestTransformerEcho::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerEcho::test_gain_in_valid",
"tests/test_transform.py::TestTransformerEcho::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerEcho::test_gain_out_valid",
"tests/test_transform.py::TestTransformerEcho::test_n_echos_invalid",
"tests/test_transform.py::TestTransformerEcho::test_n_echos_valid",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_len",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_type",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_vals",
"tests/test_transform.py::TestTransformerEchos::test_decays_valid",
"tests/test_transform.py::TestTransformerEchos::test_default",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_len",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_type",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_vals",
"tests/test_transform.py::TestTransformerEchos::test_delays_valid",
"tests/test_transform.py::TestTransformerEchos::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerEchos::test_gain_in_valid",
"tests/test_transform.py::TestTransformerEchos::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerEchos::test_gain_out_valid",
"tests/test_transform.py::TestTransformerEchos::test_n_echos_invalid",
"tests/test_transform.py::TestTransformerEchos::test_n_echos_valid",
"tests/test_transform.py::TestTransformerEqualizer::test_default",
"tests/test_transform.py::TestTransformerEqualizer::test_frequency_invalid",
"tests/test_transform.py::TestTransformerEqualizer::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerEqualizer::test_width_q_invalid",
"tests/test_transform.py::TestTransformerFade::test_default",
"tests/test_transform.py::TestTransformerFade::test_fade_in_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_in_valid",
"tests/test_transform.py::TestTransformerFade::test_fade_out_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_out_valid",
"tests/test_transform.py::TestTransformerFade::test_fade_shape_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_shape_valid",
"tests/test_transform.py::TestTransformerFir::test_default",
"tests/test_transform.py::TestTransformerFir::test_invalid_coeffs_nonlist",
"tests/test_transform.py::TestTransformerFir::test_invalid_coeffs_vals",
"tests/test_transform.py::TestTransformerFlanger::test_default",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_delay_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_delay_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_depth_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_depth_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_interp_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_interp_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_phase_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_phase_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_regen_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_regen_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_shape_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_shape_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_speed_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_speed_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_width_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_width_valid",
"tests/test_transform.py::TestTransformerGain::test_balance_invalid",
"tests/test_transform.py::TestTransformerGain::test_balance_valid",
"tests/test_transform.py::TestTransformerGain::test_default",
"tests/test_transform.py::TestTransformerGain::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerGain::test_gain_db_valid",
"tests/test_transform.py::TestTransformerGain::test_limiter_invalid",
"tests/test_transform.py::TestTransformerGain::test_limiter_valid",
"tests/test_transform.py::TestTransformerGain::test_normalize_invalid",
"tests/test_transform.py::TestTransformerGain::test_normalize_valid",
"tests/test_transform.py::TestTransformerHighpass::test_default",
"tests/test_transform.py::TestTransformerHighpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerHighpass::test_n_poles_invalid",
"tests/test_transform.py::TestTransformerHighpass::test_one_pole",
"tests/test_transform.py::TestTransformerHighpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerHilbert::test_default",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_invalid",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_invalid_even",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_valid",
"tests/test_transform.py::TestTransformerLowpass::test_default",
"tests/test_transform.py::TestTransformerLowpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerLowpass::test_n_poles_invalid",
"tests/test_transform.py::TestTransformerLowpass::test_one_pole",
"tests/test_transform.py::TestTransformerLowpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_default",
"tests/test_transform.py::TestTransformerLoudness::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_gain_db_valid",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_oorange",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_valid",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_neg",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_valid",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_invalid_vals",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_valid",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_neg",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_valid",
"tests/test_transform.py::TestTransformerMcompand::test_default",
"tests/test_transform.py::TestTransformerMcompand::test_n_bands_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_n_bands_valid",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_db_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_db_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_none",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_valid",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_empty",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_nonlist",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_nontuples",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_tup_len",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_tup_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_dups",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_nonnum2",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_positive",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_valid",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_wrong_len",
"tests/test_transform.py::TestTransformerNorm::test_db_level_invalid",
"tests/test_transform.py::TestTransformerNorm::test_db_level_valid",
"tests/test_transform.py::TestTransformerNorm::test_default",
"tests/test_transform.py::TestTransformerOops::test_default",
"tests/test_transform.py::TestTransformerOverdrive::test_colour_invalid",
"tests/test_transform.py::TestTransformerOverdrive::test_colour_valid",
"tests/test_transform.py::TestTransformerOverdrive::test_default",
"tests/test_transform.py::TestTransformerOverdrive::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerOverdrive::test_gain_db_valid",
"tests/test_transform.py::TestTransformerPad::test_default",
"tests/test_transform.py::TestTransformerPad::test_end_duration_invalid",
"tests/test_transform.py::TestTransformerPad::test_end_duration_valid",
"tests/test_transform.py::TestTransformerPad::test_start_duration_invalid",
"tests/test_transform.py::TestTransformerPad::test_start_duration_valid",
"tests/test_transform.py::TestTransformerPhaser::test_decay_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_decay_valid",
"tests/test_transform.py::TestTransformerPhaser::test_default",
"tests/test_transform.py::TestTransformerPhaser::test_delay_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_delay_valid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_in_valid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_out_valid",
"tests/test_transform.py::TestTransformerPhaser::test_modulation_shape_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_modulation_shape_valid",
"tests/test_transform.py::TestTransformerPhaser::test_speed_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_speed_valid",
"tests/test_transform.py::TestTransformerPitch::test_default",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_invalid",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_valid",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_warning",
"tests/test_transform.py::TestTransformerPitch::test_quick_invalid",
"tests/test_transform.py::TestTransformerPitch::test_quick_valid",
"tests/test_transform.py::TestTransformerRate::test_default",
"tests/test_transform.py::TestTransformerRate::test_quality_invalid",
"tests/test_transform.py::TestTransformerRate::test_quality_valid",
"tests/test_transform.py::TestTransformerRate::test_samplerate_invalid",
"tests/test_transform.py::TestTransformerRate::test_samplerate_valid",
"tests/test_transform.py::TestTransformerRepeat::test_count_invalid",
"tests/test_transform.py::TestTransformerRepeat::test_count_invalid_fmt",
"tests/test_transform.py::TestTransformerRepeat::test_count_valid",
"tests/test_transform.py::TestTransformerRepeat::test_default",
"tests/test_transform.py::TestTransformerReverb::test_default",
"tests/test_transform.py::TestTransformerReverb::test_high_freq_damping_invalid",
"tests/test_transform.py::TestTransformerReverb::test_high_freq_damping_valid",
"tests/test_transform.py::TestTransformerReverb::test_pre_delay_invalid",
"tests/test_transform.py::TestTransformerReverb::test_pre_delay_valid",
"tests/test_transform.py::TestTransformerReverb::test_reverberance_invalid",
"tests/test_transform.py::TestTransformerReverb::test_reverberance_valid",
"tests/test_transform.py::TestTransformerReverb::test_room_scale_invalid",
"tests/test_transform.py::TestTransformerReverb::test_room_scale_valid",
"tests/test_transform.py::TestTransformerReverb::test_stereo_depth_invalid",
"tests/test_transform.py::TestTransformerReverb::test_stereo_depth_valid",
"tests/test_transform.py::TestTransformerReverb::test_wet_gain_invalid",
"tests/test_transform.py::TestTransformerReverb::test_wet_gain_valid",
"tests/test_transform.py::TestTransformerReverb::test_wet_only_invalid",
"tests/test_transform.py::TestTransformerReverb::test_wet_only_valid",
"tests/test_transform.py::TestTransformerReverse::test_default",
"tests/test_transform.py::TestTransformerSilence::test_buffer_around_silence_invalid",
"tests/test_transform.py::TestTransformerSilence::test_buffer_around_silence_valid",
"tests/test_transform.py::TestTransformerSilence::test_default",
"tests/test_transform.py::TestTransformerSilence::test_location_beginning",
"tests/test_transform.py::TestTransformerSilence::test_location_end",
"tests/test_transform.py::TestTransformerSilence::test_location_invalid",
"tests/test_transform.py::TestTransformerSilence::test_min_silence_duration_invalid",
"tests/test_transform.py::TestTransformerSilence::test_min_silence_duration_valid",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_invalid",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_invalid2",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_valid",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_high",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_list",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_list_len",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_number",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_reject",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_float",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_list",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_unordered",
"tests/test_transform.py::TestTransformerSinc::test_default",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_invalid",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_pass",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_reject",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid_large",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid_small",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_high",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_mid",
"tests/test_transform.py::TestTransformerSinc::test_stop_band_attenuation_invalid",
"tests/test_transform.py::TestTransformerSinc::test_stop_band_attenuation_valid",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_float",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_list_elt",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_low",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_linvalid_list_len",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_high",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_pass_float",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_pass_list",
"tests/test_transform.py::TestTransformerSpeed::test_default",
"tests/test_transform.py::TestTransformerSpeed::test_factor_invalid",
"tests/test_transform.py::TestTransformerSpeed::test_factor_valid",
"tests/test_transform.py::TestTransformerSpeed::test_factor_valid_extreme",
"tests/test_transform.py::TestTransformerSwap::test_default",
"tests/test_transform.py::TestTransformerStretch::test_default",
"tests/test_transform.py::TestTransformerStretch::test_factor_extreme",
"tests/test_transform.py::TestTransformerStretch::test_factor_invalid",
"tests/test_transform.py::TestTransformerStretch::test_factor_valid",
"tests/test_transform.py::TestTransformerStretch::test_window_invalid",
"tests/test_transform.py::TestTransformerStretch::test_window_valid",
"tests/test_transform.py::TestTransformerTempo::test_audio_type_invalid",
"tests/test_transform.py::TestTransformerTempo::test_audio_type_valid",
"tests/test_transform.py::TestTransformerTempo::test_default",
"tests/test_transform.py::TestTransformerTempo::test_factor_invalid",
"tests/test_transform.py::TestTransformerTempo::test_factor_valid",
"tests/test_transform.py::TestTransformerTempo::test_factor_warning",
"tests/test_transform.py::TestTransformerTempo::test_quick_invalid",
"tests/test_transform.py::TestTransformerTempo::test_quick_valid",
"tests/test_transform.py::TestTransformerTreble::test_default",
"tests/test_transform.py::TestTransformerTreble::test_frequency_invalid",
"tests/test_transform.py::TestTransformerTreble::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerTreble::test_slope_invalid",
"tests/test_transform.py::TestTransformerTremolo::test_default",
"tests/test_transform.py::TestTransformerTremolo::test_depth_invalid",
"tests/test_transform.py::TestTransformerTremolo::test_speed_invalid",
"tests/test_transform.py::TestTransformerTrim::test_default",
"tests/test_transform.py::TestTransformerTrim::test_invalid_end_time",
"tests/test_transform.py::TestTransformerTrim::test_invalid_start_time",
"tests/test_transform.py::TestTransformerTrim::test_invalid_time_pair",
"tests/test_transform.py::TestTransformerUpsample::test_default",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_decimal",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_neg",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_nonnum",
"tests/test_transform.py::TestTransformerVad::test_default",
"tests/test_transform.py::TestTransformerVad::test_end_location",
"tests/test_transform.py::TestTransformerVad::test_invalid_activity_threshold",
"tests/test_transform.py::TestTransformerVad::test_invalid_initial_pad",
"tests/test_transform.py::TestTransformerVad::test_invalid_initial_search_buffer",
"tests/test_transform.py::TestTransformerVad::test_invalid_location",
"tests/test_transform.py::TestTransformerVad::test_invalid_max_gap",
"tests/test_transform.py::TestTransformerVad::test_invalid_min_activity_duration",
"tests/test_transform.py::TestTransformerVad::test_invalid_normalize",
"tests/test_transform.py::TestTransformerVad::test_no_normalize"
]
| []
| BSD 3-Clause "New" or "Revised" License | 834 | [
"setup.py",
"sox/transform.py",
"sox/core.py"
]
| [
"setup.py",
"sox/transform.py",
"sox/core.py"
]
|
|
acorg__dark-matter-437 | 907a4ac310a1e855f076a2a65367d21266bb7299 | 2016-10-28 00:42:55 | 907a4ac310a1e855f076a2a65367d21266bb7299 | diff --git a/dark/blast/hsp.py b/dark/blast/hsp.py
index 70087d8..1c264d6 100644
--- a/dark/blast/hsp.py
+++ b/dark/blast/hsp.py
@@ -6,12 +6,11 @@ def printHSP(hsp, indent=''):
def normalizeHSP(hsp, readLen, blastApplication):
"""
- Examine the sense of an HSP and return information about where the
- read and the alignment (match) begin and end. Return a dict with keys
- that allow the read and the alignment to be displayed relative to the
- hit orientation (i.e., with start < stop for both the read and the
- match). The returned read indices are offsets into the hit. I.e.,
- they indicate where on the hit the read lies.
+ Examine an HSP and return information about where the query and subject
+ match begins and ends. Return a dict with keys that allow the query to
+ be displayed against the subject. The returned readStartInSubject and
+ readEndInSubject indices are offsets into the subject. I.e., they
+ indicate where in the subject the query falls.
In the returned object, all indices are suitable for Python string
slicing etc. We must be careful to convert from the 1-based offsets
@@ -120,6 +119,16 @@ def normalizeHSP(hsp, readLen, blastApplication):
subjectLength = subjectEnd - subjectStart
readLength = readEndInSubject - readStartInSubject
+ # NOTE: readLength (above) is a really bad name. It's actually going to
+ # hold the length of the match in the query. I don't know why
+ # readEndInSubject - readStartInSubject is used (I mean why those two
+ # variables are not named readEnd and readStart). Maybe someone made a
+ # find and replace editing error which changed their names. Anyway, the
+ # readLength variable is confusingly named because this function is
+ # passed a 'readLen' argument, which does happen to be the full length
+ # of the read. This should be cleaned up. See ../diamond/hsp.py for
+ # something cleaner.
+
hitGaps = hsp['sbjct'].count('-')
readGaps = hsp['query'].count('-')
diff --git a/dark/consensus.py b/dark/consensus.py
index 41817f7..bde8a9b 100644
--- a/dark/consensus.py
+++ b/dark/consensus.py
@@ -1,6 +1,7 @@
from time import time
from collections import defaultdict
+# TODO: Some of these imported functions are no longer in utils.py!
from dark.utils import findHits, getSequence, summarizeHits, printHSP, report
diff --git a/dark/diamond/hsp.py b/dark/diamond/hsp.py
index e5559af..fd49976 100644
--- a/dark/diamond/hsp.py
+++ b/dark/diamond/hsp.py
@@ -1,150 +1,200 @@
-def printHSP(hsp, indent=''):
+from __future__ import division, print_function
+import sys
+
+
+def _debugPrint(hsp, queryLen, localDict, msg=''):
+ """
+ Print debugging information showing the local variables used during
+ a call to normalizeHSP and the hsp and then raise an C{AssertionError}.
+
+ @param hsp: The HSP C{dict} passed to normalizeHSP.
+ @param queryLen: the length of the query sequence.
+ @param localDict: A C{dict} of local variables (as produced by locals()).
+ @param msg: A C{str} message to raise C{AssertionError} with.
+ @raise AssertionError: unconditionally.
+ """
+ print('normalizeHSP error:', file=sys.stderr)
+ print(' queryLen: %d' % queryLen, file=sys.stderr)
+
+ print(' Original HSP:', file=sys.stderr)
for attr in ['bits', 'expect', 'frame', 'query_end', 'query_start',
'sbjct', 'query', 'sbjct_end', 'sbjct_start']:
- print('%s%s: %s' % (indent, attr, hsp[attr]))
+ print(' %s: %s' % (attr, hsp[attr]), file=sys.stderr)
+ print(' Local variables:', file=sys.stderr)
+ for var in sorted(localDict):
+ if var != 'hsp':
+ print(' %s: %s' % (var, localDict[var]), file=sys.stderr)
-def normalizeHSP(hsp, readLen, diamondTask):
+ raise AssertionError(msg)
+
+
+def _sanityCheck(subjectStart, subjectEnd, queryStart, queryEnd,
+ queryStartInSubject, queryEndInSubject, hsp, queryLen,
+ subjectGaps, queryGaps, localDict):
+ """
+ Perform some sanity checks on an HSP. Call _debugPrint on any error.
+
+ @param subjectStart: The 0-based C{int} start offset of the match in the
+ subject.
+ @param subjectEnd: The 0-based C{int} end offset of the match in the
+ subject.
+ @param queryStart: The 0-based C{int} start offset of the match in the
+ query.
+ @param queryEnd: The 0-based C{int} end offset of the match in the query.
+ @param queryStartInSubject: The 0-based C{int} offset of where the query
+ starts in the subject.
+ @param queryEndInSubject: The 0-based C{int} offset of where the query
+ ends in the subject.
+ @param hsp: The HSP C{dict} passed to normalizeHSP.
+ @param queryLen: the C{int} length of the query sequence.
+ @param subjectGaps: the C{int} number of gaps in the subject.
+ @param queryGaps: the C{int} number of gaps in the query.
+ @param localDict: A C{dict} of local variables from our caller (as
+ produced by locals()).
"""
- Examine the sense of an HSP and return information about where the
- read and the alignment (match) begin and end. Return a dict with keys
- that allow the read and the alignment to be displayed relative to the
- hit orientation (i.e., with start < stop for both the read and the
- match). The returned read indices are offsets into the hit. I.e.,
- they indicate where on the hit the read lies.
+ # Subject indices must always be ascending.
+ if subjectStart >= subjectEnd:
+ _debugPrint(hsp, queryLen, localDict, 'subjectStart >= subjectEnd')
+
+ subjectMatchLength = subjectEnd - subjectStart
+ queryMatchLength = queryEnd - queryStart
+
+ # Sanity check that the length of the matches in the subject and query
+ # are identical, taking into account gaps in either (indicated by '-'
+ # characters in the match sequences, as returned by DIAMOND).
+ subjectMatchLengthWithGaps = subjectMatchLength + subjectGaps
+ queryMatchLengthWithGaps = queryMatchLength + queryGaps
+ if subjectMatchLengthWithGaps != queryMatchLengthWithGaps:
+ _debugPrint(hsp, queryLen, localDict,
+ 'Including gaps, subject match length (%d) != Query match '
+ 'length (%d)' % (subjectMatchLengthWithGaps,
+ queryMatchLengthWithGaps))
+
+ if queryStartInSubject > subjectStart:
+ _debugPrint(hsp, queryLen, localDict,
+ 'queryStartInSubject (%d) > subjectStart (%d)' %
+ (queryStartInSubject, subjectStart))
+ if queryEndInSubject < subjectEnd:
+ _debugPrint(hsp, queryLen, localDict,
+ 'queryEndInSubject (%d) < subjectEnd (%d)' %
+ (queryEndInSubject, subjectEnd))
+
+
+def normalizeHSP(hsp, queryLen, diamondTask):
+ """
+ Examine an HSP and return information about where the query and subject
+ match begins and ends. Return a dict with keys that allow the query to
+ be displayed against the subject. The returned readStartInSubject and
+ readEndInSubject indices are offsets into the subject. I.e., they
+ indicate where in the subject the query falls.
In the returned object, all indices are suitable for Python string
slicing etc. We must be careful to convert from the 1-based offsets
found in DIAMOND output properly.
- hsp['frame'] is a (query, subject) 2-tuple, with both values coming from
- {-3, -2, -1, 1, 2, 3}. The sign indicates negative or positive sense
- (i.e., the direction of reading through the query or subject to get the
- alignment). The value is the nucleotide match offset modulo 3, plus one
- (i.e., it tells us which of the 3 possible reading frames is used in
- the match). The value is redundant because that information could also
- be obtained from the mod 3 value of the match offset.
+ hsp['frame'] is a value from {-3, -2, -1, 1, 2, 3}. The sign indicates
+ negative or positive sense (i.e., the direction of reading through the
+ query to get the alignment). The frame value is the nucleotide match offset
+ modulo 3, plus one (i.e., it tells us which of the 3 possible query reading
+ frames was used in the match).
NOTE: the returned readStartInSubject value may be negative. We consider
- the hit sequence to start at offset 0. So if the read string has
+ the subject sequence to start at offset 0. So if the query string has
sufficient additional nucleotides before the start of the alignment
- match, it may protrude to the left of the hit. Similarly, the returned
+ match, it may protrude to the left of the subject. Similarly, the returned
readEndInSubject can be greater than the subjectEnd.
@param hsp: an HSP in the form of a C{dict}, built from a DIAMOND record.
- All passed hsp offsets are 1-based.
- @param readLen: the length of the read sequence.
- @param diamondTask: The C{str} command line program that was
- run (e.g., 'blastp', 'blastx').
-
+ All passed offsets are 1-based.
+ @param queryLen: the length of the query sequence.
+ @param diamondTask: The C{str} command-line matching algorithm that was
+ run (either 'blastx' or 'blastp').
+ @return: A C{dict} with C{str} keys and C{int} offset values. Keys are
+ readStart
+ readEnd
+ readStartInSubject
+ readEndInSubject
+ subjectStart
+ subjectEnd
+ The returned offset values are all zero-based.
"""
- def debugPrint(locals, msg=None):
- """
- Print debugging information showing the local variables from
- a call to normalizeHSP and then raise an C{AssertionError}.
-
- @param locals: A C{dict} of local variables.
- @param msg: A C{str} message to raise C{AssertionError} with.
- """
- print('normalizeHSP error:')
- print(' readLen: %d' % readLen)
- for var in sorted(locals.keys()):
- if var in ('debugPrint', 'hsp'):
- continue
- print(' %s: %s' % (var, locals[var]))
- print(' Original HSP:')
- printHSP(hsp, ' ')
- if msg:
- raise AssertionError(msg)
+ # TODO: DIAMOND does not show gaps yet. When they start doing that, the
+ # following might have to be changed (or we might have to ask it to
+ # output attributes with different names).
+ subjectGaps = hsp['sbjct'].count('-')
+ queryGaps = hsp['query'].count('-')
+
+ # Make some variables using Python's standard string indexing (start
+ # offset included, end offset not). No calculations in this function
+ # are done with the original 1-based HSP variables.
+ queryStart = hsp['query_start'] - 1
+ queryEnd = hsp['query_end']
+ subjectStart = hsp['sbjct_start'] - 1
+ subjectEnd = hsp['sbjct_end']
+
+ queryReversed = hsp['frame'] < 0
+
+ # Query offsets must be ascending, unless we're looking at blastx output
+ # and the query was reversed for the match.
+ if queryStart >= queryEnd:
+ if diamondTask == 'blastx' and queryReversed:
+ # Compute new query start and end indices, based on their
+ # distance from the end of the string.
+ #
+ # Above we took one off the start index, so we need to undo
+ # that (because the start is actually the end). We didn't take
+ # one off the end index, and need to do that now (because the
+ # end is actually the start).
+ queryStart = queryLen - (queryStart + 1)
+ queryEnd = queryLen - (queryEnd - 1)
else:
- raise AssertionError()
-
- readPositive = hsp['frame'] > 0
-
- # The following variable names with underscores match the names of
- # attributes BioPython uses and the values (1-based) match those
- # reported by DIAMOND.
- read_start = hsp['query_start']
- read_end = hsp['query_end']
- sbjct_start = hsp['sbjct_start']
- sbjct_end = hsp['sbjct_end']
-
- # Read offsets should be ascending.
- if read_start > read_end:
- debugPrint(locals(),
- 'Assertion "read_start <= read_end" failed. Read '
- 'positive is %s. read_start = %d, read_end = %d' %
- (readPositive, read_start, read_end))
-
- # Make sure subject indices are ascending.
- if sbjct_start > sbjct_end:
- debugPrint(locals())
-
- # Now that we have asserted what we can about the original HSP values
- # and gotten them into ascending order, make some sane 0-based offsets.
- readStartInSubject = read_start - 1
- readEndInSubject = read_end
- subjectStart = sbjct_start - 1
- subjectEnd = sbjct_end
+ _debugPrint(hsp, queryLen, locals(), 'queryStart >= queryEnd')
if diamondTask == 'blastx':
- # In Blastx output, hit offsets are based on protein sequence
- # length but queries (and the reported offsets) are nucleotide.
- # Convert the read offsets to protein because we will plot against
- # the hit (protein).
+ # In DIAMOND blastx output, subject offsets are based on protein
+ # sequence length but queries (and the reported offsets) are
+ # nucleotide. Convert the query offsets to protein because we will
+ # plot against the subject (protein).
#
- # Note that readStartInSubject and readEndInSubject may not be 0 mod
- # 3. They are offsets into the read string giving the position of
- # the AA, which depends on the translation frame.
- readStartInSubject = int(readStartInSubject / 3)
- readEndInSubject = int(readEndInSubject / 3)
-
- # No operations on original 1-based HSP variables (with underscores)
- # should appear beyond this point.
+ # Convert queryLen and the query nucleotide start and end offsets
+ # to be valid for the query after translation to AAs. When
+ # translating, DIAMOND may ignore some nucleotides at the start
+ # and/or the end of the original DNA query. At the start this is
+ # due to the frame in use, and at the end it is due to always using
+ # three nucleotides at a time to form codons.
+ #
+ # So, for example, a query of 6 nucleotides that is translated in
+ # frame 2 (i.e., the translation starts from the second nucleotide)
+ # will have length 1 as an AA sequence. The first nucleotide is
+ # ignored due to the frame and the last two due to there not being
+ # enough final nucleotides to make another codon.
+ #
+ # In the following, the subtraction accounts for the first form of
+ # loss and the integer division for the second.
+ initiallyIgnored = abs(hsp['frame']) - 1
+ queryLen = (queryLen - initiallyIgnored) // 3
+ queryStart = (queryStart - initiallyIgnored) // 3
+ queryEnd = (queryEnd - initiallyIgnored) // 3
- subjectLength = subjectEnd - subjectStart
- readLength = readEndInSubject - readStartInSubject
+ # unmatchedQueryLeft is the number of query bases that will extend
+ # to the left of the start of the subject in our plots.
+ unmatchedQueryLeft = queryStart
- # TODO: DIAMOND output check.
- hitGaps = hsp['sbjct'].count('-')
- readGaps = hsp['query'].count('-')
+ # Set the query offsets into the subject.
+ queryStartInSubject = subjectStart - unmatchedQueryLeft
+ queryEndInSubject = queryStartInSubject + queryLen + queryGaps
- # Sanity check that the length of the matches in the hit and read
- # are identical, taking into account gaps in either (indicated by '-'
- # characters in the match sequences, as returned by DIAMOND).
- subjectLengthWithGaps = subjectLength + hitGaps
- readLengthWithGaps = readLength + readGaps
- if subjectLengthWithGaps != readLengthWithGaps:
- debugPrint(locals(),
- 'Including gaps, hit match length (%d) != Read match '
- 'length (%d)' % (subjectLengthWithGaps,
- readLengthWithGaps))
-
- # Calculate read indices. These are indices relative to the hit!
-
- # unmatchedReadLeft is the number of read bases that will be sticking
- # out to the left of the start of the subject in our plots.
- if readPositive:
- unmatchedReadLeft = readStartInSubject
- else:
- unmatchedReadLeft = readLen - readEndInSubject
-
- # Set the read offsets.
- readStartInSubject = subjectStart - unmatchedReadLeft
- readEndInSubject = readStartInSubject + readLen + readGaps
-
- # Final sanity checks.
- if readStartInSubject > subjectStart:
- debugPrint(locals(), 'readStartInSubject > subjectStart')
- if readEndInSubject < subjectEnd:
- debugPrint(locals(), 'readEndInSubject < subjectEnd')
+ _sanityCheck(subjectStart, subjectEnd, queryStart, queryEnd,
+ queryStartInSubject, queryEndInSubject, hsp, queryLen,
+ subjectGaps, queryGaps, locals())
return {
- 'readStart': read_start - 1,
- 'readEnd': read_end,
- 'readStartInSubject': readStartInSubject,
- 'readEndInSubject': readEndInSubject,
+ 'readStart': queryStart,
+ 'readEnd': queryEnd,
+ 'readStartInSubject': queryStartInSubject,
+ 'readEndInSubject': queryEndInSubject,
'subjectStart': subjectStart,
'subjectEnd': subjectEnd,
}
diff --git a/dark/hsp.py b/dark/hsp.py
index 9140466..6a3a019 100644
--- a/dark/hsp.py
+++ b/dark/hsp.py
@@ -91,7 +91,7 @@ class _Base(object):
class HSP(_Base):
"""
Holds information about a high-scoring pair from a read alignment.
- Comparisons are done as for BLAST bit scores (higher is better).
+ Comparisons are done as for BLAST or DIAMOND bit scores (higher is better).
@param score: The numeric score of this HSP.
"""
@@ -104,7 +104,7 @@ class HSP(_Base):
class LSP(_Base):
"""
Holds information about a low-scoring pair from a read alignment.
- Comparisons are done as for BLAST e-values (smaller is better).
+ Comparisons are done as for BLAST or DIAMOND e-values (smaller is better).
@param score: The numeric score of this LSP.
"""
diff --git a/setup.py b/setup.py
index b884bd0..42efba6 100644
--- a/setup.py
+++ b/setup.py
@@ -50,7 +50,7 @@ scripts = [
]
setup(name='dark-matter',
- version='1.0.50',
+ version='1.0.51',
packages=['dark', 'dark.blast', 'dark.diamond'],
include_package_data=True,
url='https://github.com/acorg/dark-matter',
| DIAMOND output has qstart > qend when frame < 0
When DIAMOND blastx finds a match in a reverse complemented read, the qstart and qend numbers it emits are decreasing. I.e., they are indices into the read from its end, not its beginning. This is a difference from BLAST, which always has qstart < qend. Our DIAMOND hsp.py normalization code needs to take this into account.
| acorg/dark-matter | diff --git a/test/diamond/test_hsp.py b/test/diamond/test_hsp.py
index 525a8a3..e479aa8 100644
--- a/test/diamond/test_hsp.py
+++ b/test/diamond/test_hsp.py
@@ -3,153 +3,935 @@ from unittest import TestCase
from dark.diamond.hsp import normalizeHSP
-class Frame(object):
- def __init__(self, read):
- self.read = read
-
-
class FakeHSP(dict):
- def __init__(self, subjectStart, subjectEnd, readStart, readEnd, frame,
- hit='', read=''):
+ def __init__(self, subjectStart, subjectEnd, queryStart, queryEnd, frame,
+ subject='', query='', bitscore=None, evalue=None):
"""
- A fake HSP class (with 1-based offsets, as are used in DIAMOND).
+ A fake HSP class with 1-based offsets and key names as used by DIAMOND.
"""
+ if frame > 0:
+ if not queryStart < queryEnd:
+ raise ValueError('queryStart (%d) not less than queryEnd '
+ '(%d) when frame (%d) is positive.' %
+ (queryStart, queryEnd, frame))
+ else:
+ if not queryStart > queryEnd:
+ raise ValueError('queryStart (%d) not greater than queryEnd '
+ '(%d) when frame (%d) is negative.' %
+ (queryStart, queryEnd, frame))
+
self['sbjct_start'] = subjectStart
self['sbjct_end'] = subjectEnd
- self['query_start'] = readStart
- self['query_end'] = readEnd
- self['frame'] = frame.read
- self['sbjct'] = hit
- self['query'] = read
-
- # In case you're thinking of adding it, the following assertion is
- # not valid:
- #
- # assert abs(subjectEnd - subjectStart) == abs(readEnd - readStart)
- #
- # That's because DIAMOND might find a match that requires a gap in
- # the read or in the hit. The indices that it reports do not
- # include the gap and so the differences in the lengths of the
- # sections of the read and hit may not be the same.
-
-
-class Old_ReadPositiveHitPositive(TestCase):
+ self['query_start'] = queryStart
+ self['query_end'] = queryEnd
+ self['frame'] = frame
+ self['sbjct'] = subject
+ self['query'] = query
+ self['bits'] = bitscore
+ self['expect'] = evalue
+
+
+class TestBlastxFramePlus1(TestCase):
"""
- Tests for normalizeHSP when the hit start is less than the hit end.
+ Tests for normalizeHSP for DIAMOND blastx output when frame=1 (i.e., the
+ query matches in the order it was given to DIAMOND, and the translation
+ frame starts at the first nucleotide.
"""
- frame = Frame(read=1)
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
def testIdentical(self):
"""
- The hit start and end are identical to those of the read.
+ The subject start and end are identical to those of the translated
+ query.
- ssss
- qqqq
+ ....
+ ....
"""
- hsp = FakeHSP(subjectStart=1, subjectEnd=4, readStart=3, readEnd=12,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 4, 'blastx')
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=1, queryEnd=12,
+ frame=1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
self.assertEqual({
'subjectStart': 0,
'subjectEnd': 4,
- 'readStart': 2,
- 'readEnd': 12,
+ 'readStart': 0,
+ 'readEnd': 4,
'readStartInSubject': 0,
'readEndInSubject': 4,
}, normalized)
- def testHitExtendsLeft(self):
+ def testSubjectExtendsLeft(self):
"""
- The hit overlaps the read to the left.
+ The subject overlaps the translated query to the left.
- ssssss
- qqqq
+ ss....
+ ....
"""
- hsp = FakeHSP(subjectStart=3, subjectEnd=6, readStart=3, readEnd=12,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 4, 'blastx')
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=1, queryEnd=12,
+ frame=1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
self.assertEqual({
'subjectStart': 2,
'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 2,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsLeft(self):
+ """
+ The translated query extends to the left of the subject.
+
+ ....
+ qq....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=7, queryEnd=18,
+ frame=1)
+ normalized = normalizeHSP(hsp, 18, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
'readStart': 2,
- 'readEnd': 12,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsRight(self):
+ """
+ The subject extends to the right of the translated query.
+
+ ....ss
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=1, queryEnd=12,
+ frame=1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=1, queryEnd=12,
+ frame=1)
+ normalized = normalizeHSP(hsp, 18, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=7, queryEnd=18,
+ frame=1)
+ normalized = normalizeHSP(hsp, 21, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+ def testSubjectExtendsRightAndLeft(self):
+ """
+ The subject extends to the right and left of the translated query.
+
+ s...sss
+ ...q
+ """
+ hsp = FakeHSP(subjectStart=2, subjectEnd=5, queryStart=3, queryEnd=12,
+ frame=1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 1,
+ 'subjectEnd': 5,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 1,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+
+class TestBlastxFramePlus2(TestCase):
+ """
+ Tests for normalizeHSP for DIAMOND blastx output when frame=2 (i.e., the
+ query matches in the order it was given to DIAMOND, and the translation
+ frame starts at the first nucleotide.
+ """
+
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
+
+ def testIdentical(self):
+ """
+ The subject start and end are identical to those of the translated
+ query.
+
+ ....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=2, queryEnd=13,
+ frame=2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsLeft(self):
+ """
+ The subject overlaps the translated query to the left.
+
+ ss....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=2, queryEnd=13,
+ frame=2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 2,
+ 'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
'readStartInSubject': 2,
'readEndInSubject': 6,
}, normalized)
- def testReadExtendsLeft(self):
+ def testQueryExtendsLeft(self):
"""
- The read sticks out to the left of the hit.
+ The translated query extends to the left of the subject.
- ssss
- qqqqqq
+ ....
+ qq....
"""
- hsp = FakeHSP(subjectStart=1, subjectEnd=4, readStart=9, readEnd=18,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 6, 'blastx')
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=8, queryEnd=19,
+ frame=2)
+ normalized = normalizeHSP(hsp, 19, 'blastx')
self.assertEqual({
'subjectStart': 0,
'subjectEnd': 4,
- 'readStart': 8,
- 'readEnd': 18,
+ 'readStart': 2,
+ 'readEnd': 6,
'readStartInSubject': -2,
'readEndInSubject': 4,
}, normalized)
- def testReadExtendsRight(self):
+ def testSubjectExtendsRight(self):
"""
- The read sticks out to the right of the hit.
+ The subject extends to the right of the translated query.
- ssss
- qqqqqq
+ ....ss
+ ....
"""
- hsp = FakeHSP(subjectStart=1, subjectEnd=4, readStart=3, readEnd=12,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 6, 'blastx')
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=2, queryEnd=13,
+ frame=2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=2, queryEnd=13,
+ frame=2)
+ normalized = normalizeHSP(hsp, 19, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=8, queryEnd=19,
+ frame=2)
+ normalized = normalizeHSP(hsp, 22, 'blastx')
self.assertEqual({
'subjectStart': 0,
'subjectEnd': 4,
'readStart': 2,
- 'readEnd': 12,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+ def testSubjectExtendsRightAndLeft(self):
+ """
+ The subject extends to the right and left of the translated query.
+
+ s...sss
+ ...q
+ """
+ hsp = FakeHSP(subjectStart=2, subjectEnd=5, queryStart=4, queryEnd=13,
+ frame=2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 1,
+ 'subjectEnd': 5,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 1,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+
+class TestBlastxFramePlus3(TestCase):
+ """
+ Tests for normalizeHSP for DIAMOND blastx output when frame=3 (i.e., the
+ query matches in the order it was given to DIAMOND, and the translation
+ frame starts at the first nucleotide.
+ """
+
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
+
+ def testIdentical(self):
+ """
+ The subject start and end are identical to those of the translated
+ query.
+
+ ....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=3, queryEnd=14,
+ frame=3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsLeft(self):
+ """
+ The subject overlaps the translated query to the left.
+
+ ss....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=3, queryEnd=14,
+ frame=3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 2,
+ 'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 2,
'readEndInSubject': 6,
}, normalized)
- def testReadExtendsRightAndLeft(self):
+ def testQueryExtendsLeft(self):
"""
- The read extends to the right and left of the hit.
+ The translated query extends to the left of the subject.
- ssss
- qqqqqq
+ ....
+ qq....
"""
- hsp = FakeHSP(subjectStart=1, subjectEnd=4, readStart=6, readEnd=15,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 6, 'blastx')
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=9, queryEnd=20,
+ frame=3)
+ normalized = normalizeHSP(hsp, 20, 'blastx')
self.assertEqual({
'subjectStart': 0,
'subjectEnd': 4,
- 'readStart': 5,
- 'readEnd': 15,
- 'readStartInSubject': -1,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsRight(self):
+ """
+ The subject extends to the right of the translated query.
+
+ ....ss
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=3, queryEnd=14,
+ frame=3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=3, queryEnd=14,
+ frame=3)
+ normalized = normalizeHSP(hsp, 20, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=9, queryEnd=20,
+ frame=3)
+ normalized = normalizeHSP(hsp, 23, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
'readEndInSubject': 5,
}, normalized)
- def testHitExtendsRightAndLeft(self):
+ def testSubjectExtendsRightAndLeft(self):
"""
- The hit extends to the right and left of the read.
+ The subject extends to the right and left of the translated query.
- sssssss
- qqqq
+ s...sss
+ ...q
"""
- hsp = FakeHSP(subjectStart=2, subjectEnd=5, readStart=3, readEnd=12,
- frame=self.frame)
- normalized = normalizeHSP(hsp, 4, 'blastx')
+ hsp = FakeHSP(subjectStart=2, subjectEnd=5, queryStart=5, queryEnd=14,
+ frame=3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
self.assertEqual({
'subjectStart': 1,
'subjectEnd': 5,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 1,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+
+class TestBlastxFrameMinus1(TestCase):
+ """
+ Tests for normalizeHSP for DIAMOND blastx output when frame=-1 (i.e., the
+ query matches in the reverse (complemented) order it was given to DIAMOND,
+ and the translation frame starts at the last nucleotide.
+ """
+
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
+
+ def testIdentical(self):
+ """
+ If the query and subject match completely, the normalized HSP must
+ be correct.
+
+ ....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsLeft(self):
+ """
+ The subject overlaps the translated query to the left.
+
+ ss....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=12, queryEnd=1,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 2,
+ 'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 2,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsLeft(self):
+ """
+ The translated query extends to the left of the subject.
+
+ ....
+ qq....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 18, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsRight(self):
+ """
+ The subject extends to the right of the translated query.
+
+ ....ss
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=18, queryEnd=7,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 18, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=15, queryEnd=4,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 21, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+ def testSubjectExtendsRightAndLeft(self):
+ """
+ The subject extends to the right and left of the translated query.
+
+ s...sss
+ ...q
+ """
+ hsp = FakeHSP(subjectStart=2, subjectEnd=4, queryStart=12, queryEnd=4,
+ frame=-1)
+ normalized = normalizeHSP(hsp, 12, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 1,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 3,
+ 'readStartInSubject': 1,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+
+class TestBlastxFrameMinus2(TestCase):
+ """
+ Tests for normalizeHSP for DIAMOND blastx output when frame=-2 (i.e., the
+ query matches in the reverse (complemented) order it was given to DIAMOND,
+ and the translation frame starts at the last nucleotide.
+ """
+
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
+
+ def testIdentical(self):
+ """
+ If the query and subject match completely, the normalized HSP must
+ be correct.
+
+ ....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsLeft(self):
+ """
+ The subject overlaps the translated query to the left.
+
+ ss....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=12, queryEnd=1,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 2,
+ 'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 2,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsLeft(self):
+ """
+ The translated query extends to the left of the subject.
+
+ ....
+ qq....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 19, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
'readStart': 2,
- 'readEnd': 12,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsRight(self):
+ """
+ The subject extends to the right of the translated query.
+
+ ....ss
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=18, queryEnd=7,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 19, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=15, queryEnd=4,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 22, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+ def testSubjectExtendsRightAndLeft(self):
+ """
+ The subject extends to the right and left of the translated query.
+
+ s...sss
+ ...q
+ """
+ hsp = FakeHSP(subjectStart=2, subjectEnd=4, queryStart=12, queryEnd=4,
+ frame=-2)
+ normalized = normalizeHSP(hsp, 13, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 1,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 3,
+ 'readStartInSubject': 1,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+
+class TestBlastxFrameMinus3(TestCase):
+ """
+ Tests for normalizeHSP for DIAMOND blastx output when frame=-3 (i.e., the
+ query matches in the reverse (complemented) order it was given to DIAMOND,
+ and the translation frame starts at the last nucleotide.
+ """
+
+ # All query offsets and lengths must be in terms of nucleotides.
+ # Subject offsets are in terms of protein AA sequences. This is how
+ # DIAMOND reports those offsets.
+ #
+ # In the little diagrams in the docstrings below, the first line is the
+ # subject and the second the query. Dots indicate where the matched
+ # region is. The queries are shown translated so as to line up properly
+ # with the subjects.
+
+ def testIdentical(self):
+ """
+ If the query and subject match completely, the normalized HSP must
+ be correct.
+
+ ....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsLeft(self):
+ """
+ The subject overlaps the translated query to the left.
+
+ ss....
+ ....
+ """
+ hsp = FakeHSP(subjectStart=3, subjectEnd=6, queryStart=12, queryEnd=1,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 2,
+ 'subjectEnd': 6,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 2,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsLeft(self):
+ """
+ The translated query extends to the left of the subject.
+
+ ....
+ qq....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 20, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testSubjectExtendsRight(self):
+ """
+ The subject extends to the right of the translated query.
+
+ ....ss
+ ....
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=12, queryEnd=1,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 4,
+ }, normalized)
+
+ def testQueryExtendsRight(self):
+ """
+ The translated query extends to the right of the subject.
+
+ ....
+ ....qq
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=18, queryEnd=7,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 20, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 4,
+ 'readStartInSubject': 0,
+ 'readEndInSubject': 6,
+ }, normalized)
+
+ def testQueryExtendsRightAndLeft(self):
+ """
+ The translated query extends to the right and left of the subject.
+
+ ....
+ qq....q
+ """
+ hsp = FakeHSP(subjectStart=1, subjectEnd=4, queryStart=15, queryEnd=4,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 23, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 0,
+ 'subjectEnd': 4,
+ 'readStart': 2,
+ 'readEnd': 6,
+ 'readStartInSubject': -2,
+ 'readEndInSubject': 5,
+ }, normalized)
+
+ def testSubjectExtendsRightAndLeft(self):
+ """
+ The subject extends to the right and left of the translated query.
+
+ s...sss
+ ...q
+ """
+ hsp = FakeHSP(subjectStart=2, subjectEnd=4, queryStart=12, queryEnd=4,
+ frame=-3)
+ normalized = normalizeHSP(hsp, 14, 'blastx')
+ self.assertEqual({
+ 'subjectStart': 1,
+ 'subjectEnd': 4,
+ 'readStart': 0,
+ 'readEnd': 3,
'readStartInSubject': 1,
'readEndInSubject': 5,
}, normalized)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 5
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
cffi==1.15.1
coverage==6.2
-e git+https://github.com/acorg/dark-matter.git@907a4ac310a1e855f076a2a65367d21266bb7299#egg=dark_matter
execnet==1.9.0
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycparser==2.21
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: dark-matter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cffi==1.15.1
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/dark-matter
| [
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus1::testSubjectExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus2::testSubjectExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFramePlus3::testSubjectExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus1::testSubjectExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus2::testSubjectExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testIdentical",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testQueryExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testQueryExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testQueryExtendsRightAndLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testSubjectExtendsLeft",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testSubjectExtendsRight",
"test/diamond/test_hsp.py::TestBlastxFrameMinus3::testSubjectExtendsRightAndLeft"
]
| []
| []
| []
| MIT License | 835 | [
"setup.py",
"dark/blast/hsp.py",
"dark/consensus.py",
"dark/hsp.py",
"dark/diamond/hsp.py"
]
| [
"setup.py",
"dark/blast/hsp.py",
"dark/consensus.py",
"dark/hsp.py",
"dark/diamond/hsp.py"
]
|
|
tornadoweb__tornado-1867 | 3d2bcbedbd2eaab9d221c5e8129240702f2287e7 | 2016-10-28 03:04:30 | ecd8968c5135b810cd607b5902dda2cd32122b39 | ajdavis: Ben, do you suppose RedirectHandler should also convert named capture groups to keyword args like RequestHandler does? | diff --git a/docs/guide/structure.rst b/docs/guide/structure.rst
index f0829df0..715e080f 100644
--- a/docs/guide/structure.rst
+++ b/docs/guide/structure.rst
@@ -278,7 +278,7 @@ to the prefix ``/photos/`` instead::
app = tornado.web.Application([
url(r"/photos/(.*)", MyPhotoHandler),
url(r"/pictures/(.*)", tornado.web.RedirectHandler,
- dict(url=r"/photos/\1")),
+ dict(url=r"/photos/{0}")),
])
Unlike `.RequestHandler.redirect`, `.RedirectHandler` uses permanent
diff --git a/tornado/httpserver.py b/tornado/httpserver.py
index c7b9c2f8..ff235fe4 100644
--- a/tornado/httpserver.py
+++ b/tornado/httpserver.py
@@ -179,45 +179,12 @@ class HTTPServer(TCPServer, Configurable,
conn.start_serving(self)
def start_request(self, server_conn, request_conn):
- if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate):
- delegate = self.request_callback.start_request(server_conn, request_conn)
- else:
- delegate = _CallableAdapter(self.request_callback, request_conn)
-
- if self.xheaders:
- delegate = _ProxyAdapter(delegate, request_conn)
-
- return delegate
+ return _ServerRequestAdapter(self, server_conn, request_conn)
def on_close(self, server_conn):
self._connections.remove(server_conn)
-class _CallableAdapter(httputil.HTTPMessageDelegate):
- def __init__(self, request_callback, request_conn):
- self.connection = request_conn
- self.request_callback = request_callback
- self.request = None
- self.delegate = None
- self._chunks = []
-
- def headers_received(self, start_line, headers):
- self.request = httputil.HTTPServerRequest(
- connection=self.connection, start_line=start_line,
- headers=headers)
-
- def data_received(self, chunk):
- self._chunks.append(chunk)
-
- def finish(self):
- self.request.body = b''.join(self._chunks)
- self.request._parse_body()
- self.request_callback(self.request)
-
- def on_connection_close(self):
- self._chunks = None
-
-
class _HTTPRequestContext(object):
def __init__(self, stream, address, protocol):
self.address = address
@@ -280,27 +247,58 @@ class _HTTPRequestContext(object):
self.protocol = self._orig_protocol
-class _ProxyAdapter(httputil.HTTPMessageDelegate):
- def __init__(self, delegate, request_conn):
+class _ServerRequestAdapter(httputil.HTTPMessageDelegate):
+ """Adapts the `HTTPMessageDelegate` interface to the interface expected
+ by our clients.
+ """
+ def __init__(self, server, server_conn, request_conn):
+ self.server = server
self.connection = request_conn
- self.delegate = delegate
+ self.request = None
+ if isinstance(server.request_callback,
+ httputil.HTTPServerConnectionDelegate):
+ self.delegate = server.request_callback.start_request(
+ server_conn, request_conn)
+ self._chunks = None
+ else:
+ self.delegate = None
+ self._chunks = []
def headers_received(self, start_line, headers):
- self.connection.context._apply_xheaders(headers)
- return self.delegate.headers_received(start_line, headers)
+ if self.server.xheaders:
+ self.connection.context._apply_xheaders(headers)
+ if self.delegate is None:
+ self.request = httputil.HTTPServerRequest(
+ connection=self.connection, start_line=start_line,
+ headers=headers)
+ else:
+ return self.delegate.headers_received(start_line, headers)
def data_received(self, chunk):
- return self.delegate.data_received(chunk)
+ if self.delegate is None:
+ self._chunks.append(chunk)
+ else:
+ return self.delegate.data_received(chunk)
def finish(self):
- self.delegate.finish()
+ if self.delegate is None:
+ self.request.body = b''.join(self._chunks)
+ self.request._parse_body()
+ self.server.request_callback(self.request)
+ else:
+ self.delegate.finish()
self._cleanup()
def on_connection_close(self):
- self.delegate.on_connection_close()
+ if self.delegate is None:
+ self._chunks = None
+ else:
+ self.delegate.on_connection_close()
self._cleanup()
def _cleanup(self):
- self.connection.context._unapply_xheaders()
+ if self.server.xheaders:
+ self.connection.context._unapply_xheaders()
+
HTTPRequest = httputil.HTTPServerRequest
diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index 1b1a07cd..d6183176 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -28,7 +28,6 @@ In addition to I/O events, the `IOLoop` can also schedule time-based events.
from __future__ import absolute_import, division, print_function, with_statement
-import collections
import datetime
import errno
import functools
@@ -694,7 +693,8 @@ class PollIOLoop(IOLoop):
self.time_func = time_func or time.time
self._handlers = {}
self._events = {}
- self._callbacks = collections.deque()
+ self._callbacks = []
+ self._callback_lock = threading.Lock()
self._timeouts = []
self._cancellations = 0
self._running = False
@@ -712,7 +712,8 @@ class PollIOLoop(IOLoop):
self.READ)
def close(self, all_fds=False):
- self._closing = True
+ with self._callback_lock:
+ self._closing = True
self.remove_handler(self._waker.fileno())
if all_fds:
for fd, handler in self._handlers.values():
@@ -799,7 +800,9 @@ class PollIOLoop(IOLoop):
while True:
# Prevent IO event starvation by delaying new callbacks
# to the next iteration of the event loop.
- ncallbacks = len(self._callbacks)
+ with self._callback_lock:
+ callbacks = self._callbacks
+ self._callbacks = []
# Add any timeouts that have come due to the callback list.
# Do not run anything until we have determined which ones
@@ -828,14 +831,14 @@ class PollIOLoop(IOLoop):
if x.callback is not None]
heapq.heapify(self._timeouts)
- for i in range(ncallbacks):
- self._run_callback(self._callbacks.popleft())
+ for callback in callbacks:
+ self._run_callback(callback)
for timeout in due_timeouts:
if timeout.callback is not None:
self._run_callback(timeout.callback)
# Closures may be holding on to a lot of memory, so allow
# them to be freed before we go into our poll wait.
- due_timeouts = timeout = None
+ callbacks = callback = due_timeouts = timeout = None
if self._callbacks:
# If any callbacks or timeouts called add_callback,
@@ -931,20 +934,36 @@ class PollIOLoop(IOLoop):
self._cancellations += 1
def add_callback(self, callback, *args, **kwargs):
- if self._closing:
- return
- # Blindly insert into self._callbacks. This is safe even
- # from signal handlers because deque.append is atomic.
- self._callbacks.append(functools.partial(
- stack_context.wrap(callback), *args, **kwargs))
if thread.get_ident() != self._thread_ident:
- # This will write one byte but Waker.consume() reads many
- # at once, so it's ok to write even when not strictly
- # necessary.
- self._waker.wake()
+ # If we're not on the IOLoop's thread, we need to synchronize
+ # with other threads, or waking logic will induce a race.
+ with self._callback_lock:
+ if self._closing:
+ return
+ list_empty = not self._callbacks
+ self._callbacks.append(functools.partial(
+ stack_context.wrap(callback), *args, **kwargs))
+ if list_empty:
+ # If we're not in the IOLoop's thread, and we added the
+ # first callback to an empty list, we may need to wake it
+ # up (it may wake up on its own, but an occasional extra
+ # wake is harmless). Waking up a polling IOLoop is
+ # relatively expensive, so we try to avoid it when we can.
+ self._waker.wake()
else:
- # If we're on the IOLoop's thread, we don't need to wake anyone.
- pass
+ if self._closing:
+ return
+ # If we're on the IOLoop's thread, we don't need the lock,
+ # since we don't need to wake anyone, just add the
+ # callback. Blindly insert into self._callbacks. This is
+ # safe even from signal handlers because the GIL makes
+ # list.append atomic. One subtlety is that if the signal
+ # is interrupting another thread holding the
+ # _callback_lock block in IOLoop.start, we may modify
+ # either the old or new version of self._callbacks, but
+ # either way will work.
+ self._callbacks.append(functools.partial(
+ stack_context.wrap(callback), *args, **kwargs))
def add_callback_from_signal(self, callback, *args, **kwargs):
with stack_context.NullContext():
diff --git a/tornado/platform/common.py b/tornado/platform/common.py
index 2d4065ca..b409a903 100644
--- a/tornado/platform/common.py
+++ b/tornado/platform/common.py
@@ -3,26 +3,10 @@ from __future__ import absolute_import, division, print_function, with_statement
import errno
import socket
-import time
from tornado.platform import interface
-def try_close(f):
- # Avoid issue #875 (race condition when using the file in another
- # thread).
- for i in range(10):
- try:
- f.close()
- except IOError:
- # Yield to another thread
- time.sleep(1e-3)
- else:
- break
- # Try a last time and let raise
- f.close()
-
-
class Waker(interface.Waker):
"""Create an OS independent asynchronous pipe.
@@ -91,7 +75,7 @@ class Waker(interface.Waker):
def wake(self):
try:
self.writer.send(b"x")
- except (IOError, socket.error, ValueError):
+ except (IOError, socket.error):
pass
def consume(self):
@@ -105,4 +89,4 @@ class Waker(interface.Waker):
def close(self):
self.reader.close()
- try_close(self.writer)
+ self.writer.close()
diff --git a/tornado/platform/posix.py b/tornado/platform/posix.py
index 572c0139..41a5794c 100644
--- a/tornado/platform/posix.py
+++ b/tornado/platform/posix.py
@@ -21,7 +21,7 @@ from __future__ import absolute_import, division, print_function, with_statement
import fcntl
import os
-from tornado.platform import common, interface
+from tornado.platform import interface
def set_close_exec(fd):
@@ -53,7 +53,7 @@ class Waker(interface.Waker):
def wake(self):
try:
self.writer.write(b"x")
- except (IOError, ValueError):
+ except IOError:
pass
def consume(self):
@@ -67,4 +67,4 @@ class Waker(interface.Waker):
def close(self):
self.reader.close()
- common.try_close(self.writer)
+ self.writer.close()
diff --git a/tornado/web.py b/tornado/web.py
index a0cb0e8e..f4c50e3c 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -2191,13 +2191,29 @@ class RedirectHandler(RequestHandler):
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
+
+ `RedirectHandler` supports regular expression substitutions. E.g., to
+ swap the first and second parts of a path while preserving the remainder::
+
+ application = web.Application([
+ (r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}),
+ ])
+
+ The final URL is formatted with `str.format` and the substrings that match
+ the capturing groups. In the above example, a request to "/a/b/c" would be
+ formatted like::
+
+ str.format("/{1}/{0}/{2}", "a", "b", "c") # -> "/b/a/c"
+
+ Use Python's :ref:`format string syntax <formatstrings>` to customize how
+ values are substituted.
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
- def get(self):
- self.redirect(self._url, permanent=self._permanent)
+ def get(self, *args):
+ self.redirect(self._url.format(*args), permanent=self._permanent)
class StaticFileHandler(RequestHandler):
| RedirectHandler re substitution doesn't work
### Code
``` python
import tornado.web
from tornado.web import url
import tornado.ioloop
app = tornado.web.Application([
url(r"/pictures/(.*)", tornado.web.RedirectHandler,
dict(url=r"/photos/\1")),
])
app.listen(8765)
tornado.ioloop.IOLoop.current().start()
```
### visit `localhost:8765/picture/1` will raise TypeError: get() takes 1 positional argument but 2 were given
```
ERROR:tornado.application:Uncaught exception GET /pictures/1 (::1)
HTTPServerRequest(protocol='http', host='localhost:8765', method='GET', uri='/pictures/1', version='HTTP/1.1', remote_ip='::1', headers={'Host': 'localhost:8765', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Dnt': '1', 'Upgrade-Insecure-Requests': '1', 'Connection': 'keep-alive', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36', 'Cookie': 'Pycharm-73b7632f=f7202b97-d898-4046-8aae-ae6536d1d931; _xsrf=2|ca7ba18c|041b228c03a99e0221d548c50c478a96|1476528491', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch, br'})
Traceback (most recent call last):
File "C:\Users\Jeff\AppData\Local\Programs\Python\Python35-32\lib\site-packages\tornado\web.py", line 1467, in _execute
result = method(*self.path_args, **self.path_kwargs)
TypeError: get() takes 1 positional argument but 2 were given
ERROR:tornado.access:500 GET /pictures/1 (::1) 1532.03ms
```
| tornadoweb/tornado | diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index 89f34071..b09bb9ab 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -2866,3 +2866,20 @@ class URLSpecReverseTest(unittest.TestCase):
def test_reverse_arguments(self):
self.assertEqual('/api/v1/foo/bar',
url(r'^/api/v1/foo/(\w+)$', None).reverse('bar'))
+
+
+class RedirectHandlerTest(WebTestCase):
+ def get_handlers(self):
+ return [
+ ('/src', WebRedirectHandler, {'url': '/dst'}),
+ (r'/(.*?)/(.*?)/(.*)', WebRedirectHandler, {'url': '/{1}/{0}/{2}'})]
+
+ def test_basic_redirect(self):
+ response = self.fetch('/src', follow_redirects=False)
+ self.assertEqual(response.code, 301)
+ self.assertEqual(response.headers['Location'], '/dst')
+
+ def test_redirect_pattern(self):
+ response = self.fetch('/a/b/c', follow_redirects=False)
+ self.assertEqual(response.code, 301)
+ self.assertEqual(response.headers['Location'], '/b/a/c')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 6
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"sphinx",
"sphinx_rtd_theme",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@3d2bcbedbd2eaab9d221c5e8129240702f2287e7#egg=tornado
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- jinja2==3.0.3
- markupsafe==2.0.1
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- urllib3==1.26.20
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::RedirectHandlerTest::test_redirect_pattern"
]
| []
| [
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::DefaultHostMatchingTest::test_default_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header204Test::test_204_headers",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::StatusReasonTest::test_status",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments",
"tornado/test/web_test.py::RedirectHandlerTest::test_basic_redirect"
]
| []
| Apache License 2.0 | 836 | [
"tornado/ioloop.py",
"docs/guide/structure.rst",
"tornado/web.py",
"tornado/platform/posix.py",
"tornado/httpserver.py",
"tornado/platform/common.py"
]
| [
"tornado/ioloop.py",
"docs/guide/structure.rst",
"tornado/web.py",
"tornado/platform/posix.py",
"tornado/httpserver.py",
"tornado/platform/common.py"
]
|
mesonbuild__meson-968 | 36a0d162cb65ee6470ed3d6b5e610de8f93b7a9d | 2016-10-28 17:03:21 | b2a39dd06ee139152d5522c961e6ba04acdec791 | tp-m: Perhaps one could also first remove/ignore everything in brackets, that might simplify things or make them more robust? | diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index eb6f06da2..c8ee13fbb 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -216,6 +216,10 @@ builtin_options = {
# uses that. Instead they always set it manually to /etc. This default
# value is thus pointless and not really used but we set it to this
# for consistency with other systems.
+ #
+ # Projects installing to sysconfdir probably want to set the following in project():
+ #
+ # default_options : ['sysconfdir=/etc']
'sysconfdir' : [ UserStringOption, 'Sysconf data directory.', 'etc' ],
'werror' : [ UserBooleanOption, 'Treat warnings as errors.', False ],
'warning_level' : [ UserComboOption, 'Compiler warning level to use.', [ '1', '2', '3' ], '1'],
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index b810e2059..f7045f466 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -173,11 +173,28 @@ def for_darwin(is_cross, env):
return False
+def search_version(text):
+ # Usually of the type 4.1.4 but compiler output may contain
+ # stuff like this:
+ # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
+ # Limiting major version number to two digits seems to work
+ # thus far. When we get to GCC 100, this will break, but
+ # if we are still relevant when that happens, it can be
+ # considered an achievement in itself.
+ #
+ # This regex is reaching magic levels. If it ever needs
+ # to be updated, do not complexify but convert to something
+ # saner instead.
+ version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
+ match = re.search(version_regex, text)
+ if match:
+ return match.group(0)
+ return 'unknown version'
+
class Environment():
private_dir = 'meson-private'
log_dir = 'meson-logs'
coredata_file = os.path.join(private_dir, 'coredata.dat')
- version_regex = '\d+(\.\d+)+(-[a-zA-Z0-9]+)?'
def __init__(self, source_dir, build_dir, main_script_launcher, options, original_cmd_line_args):
self.source_dir = source_dir
@@ -361,11 +378,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Free Software Foundation' in out:
defines = self.get_gnu_compiler_defines([compiler])
if not defines:
@@ -382,7 +395,7 @@ class Environment():
if 'Microsoft' in out or 'Microsoft' in err:
# Visual Studio prints version number to stderr but
# everything else to stdout. Why? Lord only knows.
- version = re.search(Environment.version_regex, err).group()
+ version = search_version(err)
return VisualStudioCCompiler([compiler], version, is_cross, exe_wrap)
errmsg = 'Unknown compiler(s): "' + ', '.join(compilers) + '"'
if popen_exceptions:
@@ -422,10 +435,7 @@ class Environment():
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- version = 'unknown version'
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
+ version = search_version(out)
if 'GNU Fortran' in out:
defines = self.get_gnu_compiler_defines([compiler])
@@ -439,10 +449,7 @@ class Environment():
return G95FortranCompiler([compiler], version, is_cross, exe_wrap)
if 'Sun Fortran' in err:
- version = 'unknown version'
- vmatch = re.search(Environment.version_regex, err)
- if vmatch:
- version = vmatch.group(0)
+ version = search_version(err)
return SunFortranCompiler([compiler], version, is_cross, exe_wrap)
if 'ifort (IFORT)' in out:
@@ -510,11 +517,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Free Software Foundation' in out:
defines = self.get_gnu_compiler_defines([compiler])
if not defines:
@@ -529,7 +532,7 @@ class Environment():
cltype = CLANG_STANDARD
return ClangCPPCompiler(ccache + [compiler], version, cltype, is_cross, exe_wrap)
if 'Microsoft' in out or 'Microsoft' in err:
- version = re.search(Environment.version_regex, err).group()
+ version = search_version(err)
return VisualStudioCPPCompiler([compiler], version, is_cross, exe_wrap)
errmsg = 'Unknown compiler(s): "' + ', '.join(compilers) + '"'
if popen_exceptions:
@@ -557,11 +560,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Free Software Foundation' in out:
defines = self.get_gnu_compiler_defines(exelist)
return GnuObjCCompiler(exelist, version, is_cross, exe_wrap, defines)
@@ -588,11 +587,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Free Software Foundation' in out:
defines = self.get_gnu_compiler_defines(exelist)
return GnuObjCPPCompiler(exelist, version, is_cross, exe_wrap, defines)
@@ -609,11 +604,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, err)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(err)
if 'javac' in err:
return JavaCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
@@ -627,11 +618,7 @@ class Environment():
(out, err) = p.communicate()
out = out.decode(errors='ignore')
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Mono' in out:
return MonoCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
@@ -644,11 +631,7 @@ class Environment():
raise EnvironmentException('Could not execute Vala compiler "%s"' % ' '.join(exelist))
(out, _) = p.communicate()
out = out.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'Vala' in out:
return ValaCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
@@ -661,11 +644,7 @@ class Environment():
raise EnvironmentException('Could not execute Rust compiler "%s"' % ' '.join(exelist))
(out, _) = p.communicate()
out = out.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'rustc' in out:
return RustCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
@@ -699,11 +678,7 @@ class Environment():
raise EnvironmentException('Could not execute D compiler "%s"' % ' '.join(exelist))
(out, _) = p.communicate()
out = out.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, out)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(out)
if 'LLVM D compiler' in out:
return LLVMDCompiler(exelist, version, is_cross)
elif 'gdc' in out:
@@ -720,11 +695,7 @@ class Environment():
raise EnvironmentException('Could not execute Swift compiler "%s"' % ' '.join(exelist))
(_, err) = p.communicate()
err = err.decode(errors='ignore')
- vmatch = re.search(Environment.version_regex, err)
- if vmatch:
- version = vmatch.group(0)
- else:
- version = 'unknown version'
+ version = search_version(err)
if 'Swift' in err:
return SwiftCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 25f2c6bf7..4ef8d921a 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -165,7 +165,16 @@ class Conf:
print('')
print('Directories:')
parr = []
- for key in [ 'prefix', 'libdir', 'libexecdir', 'bindir', 'includedir', 'datadir', 'mandir', 'localedir' ]:
+ for key in ['prefix',
+ 'libdir',
+ 'libexecdir',
+ 'bindir',
+ 'includedir',
+ 'datadir',
+ 'mandir',
+ 'localedir',
+ 'sysconfdir',
+ ]:
parr.append([key, coredata.get_builtin_option_description(key),
self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)])
self.print_aligned(parr)
| Error finding version of arm gcc
When meson tries to recognize the version of arm-none-linux-gnueabi-gcc it finds the wrong version.
the output of arm-none-linux-gnueabi-gcc --version is:
```
arm-none-linux-gnueabi-gcc (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
Copyright (C) 2013 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
```
printing the version within meson outputs:
`2014.05-29`
| mesonbuild/meson | diff --git a/run_unittests.py b/run_unittests.py
index 0e3b7d5be..cf30276aa 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -17,6 +17,7 @@ import unittest, os, sys, shutil, time
import subprocess
import re, json
import tempfile
+import mesonbuild.environment
from mesonbuild.environment import detect_ninja
from mesonbuild.dependencies import PkgConfigDependency
@@ -32,10 +33,21 @@ def get_soname(fname):
class FakeEnvironment(object):
def __init__(self):
self.cross_info = None
-
+
def is_cross_build(self):
return False
+class InternalTests(unittest.TestCase):
+
+ def test_version_number(self):
+ searchfunc = mesonbuild.environment.search_version
+ self.assertEqual(searchfunc('foobar 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
+ self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
+
class LinuxlikeTests(unittest.TestCase):
def setUp(self):
super().setUp()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 0.35 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y ninja-build python3"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/mesonbuild/meson.git@36a0d162cb65ee6470ed3d6b5e610de8f93b7a9d#egg=meson
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: meson
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/meson
| [
"run_unittests.py::InternalTests::test_version_number"
]
| [
"run_unittests.py::LinuxlikeTests::test_pkgconfig_gen",
"run_unittests.py::LinuxlikeTests::test_vala_c_warnings"
]
| [
"run_unittests.py::LinuxlikeTests::test_basic_soname",
"run_unittests.py::LinuxlikeTests::test_custom_soname",
"run_unittests.py::LinuxlikeTests::test_install_introspection",
"run_unittests.py::LinuxlikeTests::test_pic",
"run_unittests.py::LinuxlikeTests::test_static_compile_order"
]
| []
| Apache License 2.0 | 837 | [
"mesonbuild/coredata.py",
"mesonbuild/mconf.py",
"mesonbuild/environment.py"
]
| [
"mesonbuild/coredata.py",
"mesonbuild/mconf.py",
"mesonbuild/environment.py"
]
|
ntoll__uflash-27 | fe26fe8ea0b7943674cb8165e6939cba3ef0dc88 | 2016-10-29 01:26:31 | fe26fe8ea0b7943674cb8165e6939cba3ef0dc88 | diff --git a/AUTHORS b/AUTHORS
index 0817aa1..c4d08fc 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -1,3 +1,4 @@
Nicholas H.Tollervey ([email protected])
Matt Wheeler ([email protected])
Tom Viner ([email protected])
+Tom Gurion ([email protected])
diff --git a/README.rst b/README.rst
index 0451d63..610092f 100644
--- a/README.rst
+++ b/README.rst
@@ -57,6 +57,15 @@ the path to the Python script in as the first argument to the command::
$ uflash my_script.py
Flashing Python to: /media/ntoll/MICROBIT/micropython.hex
+You can let uflash watch for changes of your script. It will be flashed
+automatically every time you save it::
+
+ $ uflash -w my_script.py
+
+or::
+
+ $ uflash --watch my_script.py
+
At this point uflash will try to automatically detect the path to the device.
However, if you have several devices plugged in and/or know what the path on
the filesystem to the BBC micro:bit already is, you can specify this as a
@@ -124,4 +133,3 @@ with development. Typing ``make`` on its own will list the options thus::
make package - create a deployable package for the project.
make publish - publish the project to PyPI.
make docs - run sphinx to create project documentation.
-
diff --git a/uflash.py b/uflash.py
index 3082398..1bcb02d 100644
--- a/uflash.py
+++ b/uflash.py
@@ -12,6 +12,7 @@ import os
import struct
import sys
from subprocess import check_output
+import time
#: The magic start address in flash memory for a Python script.
_SCRIPT_ADDR = 0x3e000
@@ -291,6 +292,27 @@ def extract(path_to_hex, output_path=None):
print(python_script)
+def watch_file(path, func, *args, **kwargs):
+ """
+ Watch a file for changes by polling its last modification time. Call the
+ provided function with *args and **kwargs upon modification.
+ """
+ if not path:
+ raise ValueError('Please specify a file to watch')
+ print('Watching "{}" for changes'.format(path))
+ last_modification_time = os.path.getmtime(path)
+ try:
+ while True:
+ time.sleep(1)
+ new_modification_time = os.path.getmtime(path)
+ if new_modification_time == last_modification_time:
+ continue
+ func(*args, **kwargs)
+ last_modification_time = new_modification_time
+ except KeyboardInterrupt:
+ pass
+
+
def main(argv=None):
"""
Entry point for the command line tool 'uflash'.
@@ -316,10 +338,18 @@ def main(argv=None):
action='store_true',
help=("Extract python source from a hex file"
" instead of creating the hex file."), )
+ parser.add_argument('-w', '--watch',
+ action='store_true',
+ help='Watch the source file for changes.')
args = parser.parse_args(argv)
if args.extract:
extract(args.source, args.target)
+ elif args.watch:
+ watch_file(args.source, flash,
+ path_to_python=args.source,
+ paths_to_microbits=args.target,
+ path_to_runtime=args.runtime)
else:
flash(path_to_python=args.source, paths_to_microbits=args.target,
path_to_runtime=args.runtime)
| Add command line flag to watch the script for changes and re-flash
I'm currently doing it with `inotifywait`, but I think that it might be usefull as part of uflash itself, with additional command line flag.
```bash
while inotifywait -e modify my_script.py; do
uflash my_script.py
done
```
(BTW, :+1: for the guidelines for contribution!) | ntoll/uflash | diff --git a/tests/test_uflash.py b/tests/test_uflash.py
index 6794b06..0428b4e 100644
--- a/tests/test_uflash.py
+++ b/tests/test_uflash.py
@@ -7,6 +7,8 @@ import os
import os.path
import sys
import tempfile
+import time
+import threading
import pytest
import uflash
@@ -461,6 +463,19 @@ def test_main_named_args():
path_to_runtime='baz.hex')
+def test_main_watch_flag():
+ """
+ The watch flag cause a call the correct function.
+ """
+ with mock.patch('uflash.watch_file') as mock_watch_file:
+ uflash.main(argv=['-w'])
+ mock_watch_file.assert_called_once_with(None,
+ uflash.flash,
+ path_to_python=None,
+ paths_to_microbits=[],
+ path_to_runtime=None)
+
+
def test_extract_command():
"""
Test the command-line script extract feature
@@ -515,3 +530,44 @@ def test_extract_command_no_source():
"""
with pytest.raises(TypeError):
uflash.extract(None, None)
+
+
+def test_watch_no_source():
+ """
+ If there is no source file the watch command should complain.
+ """
+ with pytest.raises(ValueError):
+ uflash.watch_file(None, lambda: "should never be called!")
+
+
[email protected]('uflash.time')
[email protected]('uflash.os')
+def test_watch_file(mock_os, mock_time):
+ """
+ Make sure that the callback is called each time the file changes.
+ """
+ # Our function will throw KeyboardInterrupt when called for the 2nd time,
+ # ending the watching gracefully.This will help in testing the
+ # watch_file function.
+ call_count = [0]
+
+ def func():
+ call_count[0] = call_count[0] + 1
+ if call_count[0] == 2:
+ raise KeyboardInterrupt()
+
+ # Instead of modifying any file, let's change the return value of
+ # os.path.getmtime. Start with initial value of 0.
+ mock_os.path.getmtime.return_value = 0
+
+ t = threading.Thread(target=uflash.watch_file,
+ args=('path/to/file', func))
+ t.start()
+ time.sleep(0.01)
+ mock_os.path.getmtime.return_value = 1 # Simulate file change
+ time.sleep(0.01)
+ assert t.is_alive()
+ assert call_count[0] == 1
+ mock_os.path.getmtime.return_value = 2 # Simulate file change
+ t.join()
+ assert call_count[0] == 2
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
packaging==24.2
pep8==1.7.1
pluggy==1.5.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
-e git+https://github.com/ntoll/uflash.git@fe26fe8ea0b7943674cb8165e6939cba3ef0dc88#egg=uflash
urllib3==2.3.0
zipp==3.21.0
| name: uflash
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- packaging==24.2
- pep8==1.7.1
- pluggy==1.5.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/uflash
| [
"tests/test_uflash.py::test_main_watch_flag",
"tests/test_uflash.py::test_watch_no_source",
"tests/test_uflash.py::test_watch_file"
]
| []
| [
"tests/test_uflash.py::test_get_version",
"tests/test_uflash.py::test_hexlify",
"tests/test_uflash.py::test_unhexlify",
"tests/test_uflash.py::test_hexlify_empty_script",
"tests/test_uflash.py::test_embed_hex",
"tests/test_uflash.py::test_embed_no_python",
"tests/test_uflash.py::test_embed_no_runtime",
"tests/test_uflash.py::test_extract",
"tests/test_uflash.py::test_extract_not_valid_hex",
"tests/test_uflash.py::test_extract_no_python",
"tests/test_uflash.py::test_find_microbit_posix_exists",
"tests/test_uflash.py::test_find_microbit_posix_missing",
"tests/test_uflash.py::test_find_microbit_nt_exists",
"tests/test_uflash.py::test_find_microbit_nt_missing",
"tests/test_uflash.py::test_find_microbit_unknown_os",
"tests/test_uflash.py::test_save_hex",
"tests/test_uflash.py::test_save_hex_no_hex",
"tests/test_uflash.py::test_save_hex_path_not_to_hex_file",
"tests/test_uflash.py::test_flash_no_args",
"tests/test_uflash.py::test_flash_has_python_no_path_to_microbit",
"tests/test_uflash.py::test_flash_with_path_to_multiple_microbits",
"tests/test_uflash.py::test_flash_with_path_to_microbit",
"tests/test_uflash.py::test_flash_with_path_to_runtime",
"tests/test_uflash.py::test_flash_cannot_find_microbit",
"tests/test_uflash.py::test_flash_wrong_python",
"tests/test_uflash.py::test_main_no_args",
"tests/test_uflash.py::test_main_first_arg_python",
"tests/test_uflash.py::test_main_first_arg_help",
"tests/test_uflash.py::test_main_first_arg_not_python",
"tests/test_uflash.py::test_main_two_args",
"tests/test_uflash.py::test_main_multiple_microbits",
"tests/test_uflash.py::test_main_runtime",
"tests/test_uflash.py::test_main_named_args",
"tests/test_uflash.py::test_extract_command",
"tests/test_uflash.py::test_extract_paths",
"tests/test_uflash.py::test_extract_command_source_only",
"tests/test_uflash.py::test_extract_command_no_source"
]
| []
| MIT License | 838 | [
"README.rst",
"uflash.py",
"AUTHORS"
]
| [
"README.rst",
"uflash.py",
"AUTHORS"
]
|
|
wireservice__agate-637 | 0d2671358cdea94c83bd8f28b5a6718a9326b033 | 2016-10-30 16:11:15 | 97cb37f673af480f74fef546ceefd3ba24aff93b | diff --git a/agate/aggregations/any.py b/agate/aggregations/any.py
index 70fa702..67a9651 100644
--- a/agate/aggregations/any.py
+++ b/agate/aggregations/any.py
@@ -32,7 +32,7 @@ class Any(Aggregation):
column = table.columns[self._column_name]
data = column.values()
- if isinstance(column.data_type, Boolean):
+ if isinstance(column.data_type, Boolean) and self._test is None:
return any(data)
return any(self._test(d) for d in data)
| agate.All cannot test whether all data is False
If the column data type is boolean, test gets overwritten to search for True values.
| wireservice/agate | diff --git a/tests/test_aggregations.py b/tests/test_aggregations.py
index c3c8fbb..11eefe1 100644
--- a/tests/test_aggregations.py
+++ b/tests/test_aggregations.py
@@ -138,6 +138,7 @@ class TestBooleanAggregation(unittest.TestCase):
table = Table(rows, ['test'], [Boolean()])
Any('test').validate(table)
self.assertEqual(Any('test').run(table), False)
+ self.assertEqual(Any('test', lambda r: not r).run(table), True)
def test_all(self):
rows = [
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"apt-get install -y locales"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/wireservice/agate.git@0d2671358cdea94c83bd8f28b5a6718a9326b033#egg=agate
awesome-slugify==1.6.5
babel==2.17.0
exceptiongroup==1.2.2
iniconfig==2.1.0
isodate==0.7.2
numpy==2.0.2
packaging==24.2
pandas==2.2.3
parsedatetime==2.6
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytimeparse==1.1.8
pytz==2025.2
regex==2024.11.6
six==1.17.0
tomli==2.2.1
tzdata==2025.2
Unidecode==0.4.21
| name: agate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- awesome-slugify==1.6.5
- babel==2.17.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isodate==0.7.2
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- parsedatetime==2.6
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytimeparse==1.1.8
- pytz==2025.2
- regex==2024.11.6
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
- unidecode==0.04.21
prefix: /opt/conda/envs/agate
| [
"tests/test_aggregations.py::TestBooleanAggregation::test_any"
]
| []
| [
"tests/test_aggregations.py::TestSimpleAggregation::test_all",
"tests/test_aggregations.py::TestSimpleAggregation::test_any",
"tests/test_aggregations.py::TestSimpleAggregation::test_count",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_column",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_value",
"tests/test_aggregations.py::TestSimpleAggregation::test_has_nulls",
"tests/test_aggregations.py::TestSimpleAggregation::test_summary",
"tests/test_aggregations.py::TestBooleanAggregation::test_all",
"tests/test_aggregations.py::TestDateTimeAggregation::test_max",
"tests/test_aggregations.py::TestDateTimeAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_deciles",
"tests/test_aggregations.py::TestNumberAggregation::test_iqr",
"tests/test_aggregations.py::TestNumberAggregation::test_mad",
"tests/test_aggregations.py::TestNumberAggregation::test_max",
"tests/test_aggregations.py::TestNumberAggregation::test_max_precision",
"tests/test_aggregations.py::TestNumberAggregation::test_mean",
"tests/test_aggregations.py::TestNumberAggregation::test_mean_with_nulls",
"tests/test_aggregations.py::TestNumberAggregation::test_median",
"tests/test_aggregations.py::TestNumberAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_mode",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_population_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_population_variance",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_quintiles",
"tests/test_aggregations.py::TestNumberAggregation::test_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_sum",
"tests/test_aggregations.py::TestNumberAggregation::test_variance",
"tests/test_aggregations.py::TestTextAggregation::test_max_length",
"tests/test_aggregations.py::TestTextAggregation::test_max_length_invalid"
]
| []
| MIT License | 839 | [
"agate/aggregations/any.py"
]
| [
"agate/aggregations/any.py"
]
|
|
wireservice__agate-638 | 97cb37f673af480f74fef546ceefd3ba24aff93b | 2016-10-30 16:50:31 | 97cb37f673af480f74fef546ceefd3ba24aff93b | diff --git a/agate/aggregations/__init__.py b/agate/aggregations/__init__.py
index e4f40cc..cf82a30 100644
--- a/agate/aggregations/__init__.py
+++ b/agate/aggregations/__init__.py
@@ -21,6 +21,7 @@ from agate.aggregations.all import All # noqa
from agate.aggregations.any import Any # noqa
from agate.aggregations.count import Count # noqa
from agate.aggregations.deciles import Deciles # noqa
+from agate.aggregations.first import First # noqa
from agate.aggregations.has_nulls import HasNulls # noqa
from agate.aggregations.iqr import IQR # noqa
from agate.aggregations.mad import MAD # noqa
diff --git a/agate/aggregations/first.py b/agate/aggregations/first.py
new file mode 100644
index 0000000..37e1695
--- /dev/null
+++ b/agate/aggregations/first.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+from agate.aggregations.base import Aggregation
+from agate.data_types import Boolean
+
+
+class First(Aggregation):
+ """
+ Returns the first value that passes a test.
+
+ If the test is omitted, the aggregation will return the first value in the column.
+
+ If no values pass the test, the aggregation will raise an exception.
+
+ :param column_name:
+ The name of the column to check.
+ :param test:
+ A function that takes a value and returns `True` or `False`. Test may be
+ omitted when checking :class:`.Boolean` data.
+ """
+ def __init__(self, column_name, test=None):
+ self._column_name = column_name
+ self._test = test
+
+ def get_aggregate_data_type(self, table):
+ return table.columns[self._column_name].data_type
+
+ def validate(self, table):
+ column = table.columns[self._column_name]
+ data = column.values()
+
+ if self._test is not None and len([d for d in data if self._test(d)]) == 0:
+ raise ValueError('No values pass the given test.')
+
+ def run(self, table):
+ column = table.columns[self._column_name]
+ data = column.values()
+
+ if self._test is None:
+ return data[0]
+
+ return next((d for d in data if self._test(d)))
| agate.First aggregation
I end up doing this all the time:
```
def pick_first(c):
return c[0]
agate.Summary('Serial_Num', agate.Text(), pick_first)
``` | wireservice/agate | diff --git a/tests/test_aggregations.py b/tests/test_aggregations.py
index 11eefe1..e0dc625 100644
--- a/tests/test_aggregations.py
+++ b/tests/test_aggregations.py
@@ -67,6 +67,17 @@ class TestSimpleAggregation(unittest.TestCase):
self.assertEqual(All('one', lambda d: d != 5).run(self.table), True)
self.assertEqual(All('one', lambda d: d == 2).run(self.table), False)
+ def test_first(self):
+ with self.assertRaises(ValueError):
+ First('one', lambda d: d == 5).validate(self.table)
+
+ First('one', lambda d: d).validate(self.table)
+
+ self.assertIsInstance(First('one').get_aggregate_data_type(self.table), Number)
+ self.assertEqual(First('one').run(self.table), 1)
+ self.assertEqual(First('one', lambda d: d == 2).run(self.table), 2)
+ self.assertEqual(First('one', lambda d: not d).run(self.table), None)
+
def test_count(self):
rows = (
(1, 2, 'a'),
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-py3.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/wireservice/agate.git@97cb37f673af480f74fef546ceefd3ba24aff93b#egg=agate
alabaster==0.7.16
awesome-slugify==1.6.5
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isodate==0.7.2
Jinja2==3.1.6
MarkupSafe==3.0.2
nose==1.3.7
packaging==24.2
parsedatetime==2.6
platformdirs==4.3.7
pluggy==1.5.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytimeparse==1.1.8
pytz==2025.2
regex==2024.11.6
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
Unidecode==0.4.21
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: agate
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- awesome-slugify==1.6.5
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isodate==0.7.2
- jinja2==3.1.6
- markupsafe==3.0.2
- nose==1.3.7
- packaging==24.2
- parsedatetime==2.6
- platformdirs==4.3.7
- pluggy==1.5.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytimeparse==1.1.8
- pytz==2025.2
- regex==2024.11.6
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- unidecode==0.04.21
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/agate
| [
"tests/test_aggregations.py::TestSimpleAggregation::test_first"
]
| []
| [
"tests/test_aggregations.py::TestSimpleAggregation::test_all",
"tests/test_aggregations.py::TestSimpleAggregation::test_any",
"tests/test_aggregations.py::TestSimpleAggregation::test_count",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_column",
"tests/test_aggregations.py::TestSimpleAggregation::test_count_value",
"tests/test_aggregations.py::TestSimpleAggregation::test_has_nulls",
"tests/test_aggregations.py::TestSimpleAggregation::test_summary",
"tests/test_aggregations.py::TestBooleanAggregation::test_all",
"tests/test_aggregations.py::TestBooleanAggregation::test_any",
"tests/test_aggregations.py::TestDateTimeAggregation::test_max",
"tests/test_aggregations.py::TestDateTimeAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_deciles",
"tests/test_aggregations.py::TestNumberAggregation::test_iqr",
"tests/test_aggregations.py::TestNumberAggregation::test_mad",
"tests/test_aggregations.py::TestNumberAggregation::test_max",
"tests/test_aggregations.py::TestNumberAggregation::test_max_precision",
"tests/test_aggregations.py::TestNumberAggregation::test_mean",
"tests/test_aggregations.py::TestNumberAggregation::test_mean_with_nulls",
"tests/test_aggregations.py::TestNumberAggregation::test_median",
"tests/test_aggregations.py::TestNumberAggregation::test_min",
"tests/test_aggregations.py::TestNumberAggregation::test_mode",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles",
"tests/test_aggregations.py::TestNumberAggregation::test_percentiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_population_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_population_variance",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles",
"tests/test_aggregations.py::TestNumberAggregation::test_quartiles_locate",
"tests/test_aggregations.py::TestNumberAggregation::test_quintiles",
"tests/test_aggregations.py::TestNumberAggregation::test_stdev",
"tests/test_aggregations.py::TestNumberAggregation::test_sum",
"tests/test_aggregations.py::TestNumberAggregation::test_variance",
"tests/test_aggregations.py::TestTextAggregation::test_max_length",
"tests/test_aggregations.py::TestTextAggregation::test_max_length_invalid"
]
| []
| MIT License | 840 | [
"agate/aggregations/first.py",
"agate/aggregations/__init__.py"
]
| [
"agate/aggregations/first.py",
"agate/aggregations/__init__.py"
]
|
|
napjon__krisk-61 | cbf39a0b5f2876a7cf1ce45361be3ba79aba4dc7 | 2016-10-31 09:52:29 | a676433768a62b61f5861c68c127e40970914764 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/61?src=pr) is 87.72% (diff: 40.62%)
> Merging [#61](https://codecov.io/gh/napjon/krisk/pull/61?src=pr) into [master](https://codecov.io/gh/napjon/krisk/branch/master?src=pr) will decrease coverage by **4.16%**
```diff
@@ master #61 diff @@
==========================================
Files 8 8
Lines 370 391 +21
Methods 0 0
Messages 0 0
Branches 55 60 +5
==========================================
+ Hits 340 343 +3
- Misses 19 37 +18
Partials 11 11
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [75dfe7d...550c497](https://codecov.io/gh/napjon/krisk/compare/75dfe7d414d23c488802ec242c913845253527bf...550c49756af45f8d46d670216dda7fd6ff8d3fb1?src=pr) | diff --git a/krisk/chart/core.py b/krisk/chart/core.py
index e62d98f..f4ae35a 100644
--- a/krisk/chart/core.py
+++ b/krisk/chart/core.py
@@ -419,6 +419,7 @@ class Chart(object):
def _set_label_axes(self, xy, **kwargs):
"""Set label axes name and other customization"""
assert xy in ['x','y']
+
self.option[xy + 'Axis'].update(**kwargs)
return self
@@ -468,6 +469,8 @@ class Chart(object):
------
Chart object
"""
+ if self._kwargs_chart_.get('type', None) == 'bar_line':
+ raise NotImplementedError
label_kwargs = dict(name=name,
nameLocation=axis_position,
nameGap=axis_gap,
diff --git a/krisk/plot/api.py b/krisk/plot/api.py
index dd4437d..d37dc89 100644
--- a/krisk/plot/api.py
+++ b/krisk/plot/api.py
@@ -21,32 +21,31 @@ def bar(df,
x: string
columns to be used as category axis
y: string, default None
- if None, use count of category value. otherwise aggregate based on y columns
- category: string, default None
+ if None, use count of category. otherwise aggregate based on y columns
+ c: string, default None
another grouping columns inside x-axis
how: string, default None
- to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
- reduced operations.
+ to be passed to pd.group_by(x).aggregate(how). Can be mean,median,
+ or any reduced operations.
stacked: Boolean, default False.
Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the
- last category. if 'all' and stacked, annotate all category
+ annotate: string, {'all', True, None} default None
+ if True, annotate value on top of the plot element. If stacked is
+ also True, annotate the last category. if 'all' and stacked,
+ annotate all category
full: boolean, default False.
If true, set to full area stacked chart. Only work if stacked is True.
trendline: boolean, default False.
- If true, add line that connected the bars. Only work if not category, category but stacked,
- or not full.
- sort_on: {'index', 'values', int, 'count', 'mean', 'std', 'min', '25%', '50%', '75%', 'max'},
- default 'index'.
- Add sort mode. Only work when c is None.
+ If true, add line that connected the bars. Only work if not category,
+ category but stacked, or not full.
+ sort_on: {'index', 'values', int, function}, default 'index'.
If index, sort index on lexicographical order. use as s.sort_index()
if values, sort based on values. Use as s.sort_values()
- If string, deviation from value provided by pd.Series.describe()
+ If function, use it as aggregate (e.g. grouped.agg('mean' or np.mean))
if integer, treat as value and deviate from that value
sort_c_on: string, default None.
- specify a category as basis sort value if c is specified. Must be specified when use
- sort_on other than default value.
+ specify a category as basis sort value if c is specified. Must be
+ specified when use sort_on other than default value.
ascending: boolean, default True
sort ascending vs. descending
@@ -54,8 +53,9 @@ def bar(df,
-------
Chart Object
"""
- return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
- trendline=trendline, sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
+ return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,
+ full=full, trendline=trendline,
+ sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
annotate='top' if annotate == True else annotate)
@@ -80,31 +80,32 @@ def line(df,
x: string
columns to be used as category axis
y: string, default None
- if None, use count of category value. otherwise aggregate based on y columns
+ if None, use count of category. otherwise aggregate based on y columns
c: string, default None
category column inside x-axis
how: string, default None
- to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
- reduced operations.
- stacked: Boolean, default False.
+ to be passed to pd.group_by(x).aggregate(how). Can be mean,median,
+ or any reduced operations.
+ stacked: boolean, default False.
Whether to stacked category on top of the other categories.
+ area: boolean, default False.
+ Whether to fill the area with line colors.
annotate: string, {'all',True} default None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ if True, annotate value on top of the plot element. If stacked is
+ also True, annotate the last category. if 'all' and stacked,
+ annotate all category
full: boolean, default False.
If true, set to full area stacked chart. Only work if stacked is True.
smooth: boolean, default False.
If true, smooth the line.
- sort_on: {'index', 'values', int, 'count', 'mean', 'std', 'min', '25%', '50%', '75%', 'max'},
- default 'index'.
- Add sort mode. Only work when c is None.
+ sort_on: {'index', 'values', int, function}, default 'index'.
If index, sort index on lexicographical order. use as s.sort_index()
if values, sort based on values. Use as s.sort_values()
- If string, deviation from value provided by pd.Series.describe()
+ If function, use it as aggregate (e.g. grouped.agg('mean' or np.mean))
if integer, treat as value and deviate from that value
sort_c_on: string, default None.
- specify a category as basis sort value if c is specified. Must be specified when use
- sort_on other than default value.
+ specify a category as basis sort value if c is specified. Must be
+ specified when use sort_on other than default value.
ascending: boolean, default True
sort ascending vs. descending
@@ -112,9 +113,57 @@ def line(df,
-------
Chart Object
"""
- return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
- smooth=smooth, sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
- annotate='top' if annotate == True else annotate)
+ return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,
+ area=area,full=full, smooth=smooth,
+ sort_on=sort_on, sort_c_on=sort_c_on, ascending=ascending,
+ annotate='top' if annotate is True else annotate)
+
+
+def bar_line(df, x, ybar, yline, bar_aggfunc='mean', line_aggfunc='mean',
+ sort_on='index', ascending=True, is_distinct=False,
+ hide_split_line=True, style_tooltip=True):
+ """
+ Parameters
+ ----------
+ df: pd.DataFrame
+ data to be used for the chart
+ x: string
+ column to be used as category axis
+ ybar: string
+ column to be used as bar values
+ yline:
+ column to be used as line values
+ bar_aggfunc: string (mapping function) or function, default 'mean'
+ Function to use for aggregating groups on bar values
+ line_aggfunc: string (mapping function) or function, default 'mean'
+ Function to use for aggregating groups on line values
+ sort_on: {'index', 'ybar', 'yline'}, default 'index'
+ sorting x-axis. If index, sort on x. if either `ybar` or `yline`,
+ sort based on values
+ ascending: boolean, default True
+ sort ascending vs. descending
+ is_distinct: boolean, default False
+ Don't use aggregation on this data. Will use drop_duplicates instead.
+ Ignore `bar_aggfunc`, `line_aggfunc`, `sort_on`, `ascending` parameters.
+ sort_on deliberately disabled in is_distinct mode to allow already
+ sorted distinct data.
+ hide_split_line: boolean, default True
+ Whether to hide the split line of both y-axis.
+ style_tooltip: boolean, default True
+ Whether to offer help to style tooltip. If True, execute
+ `chart.set_tooltip_style(trigger='axis',axis_pointer='shadow')`
+
+ Returns
+ -------
+ Chart Object
+ """
+ return make_chart(df, x=x, ybar=ybar, yline=yline,
+ bar_aggfunc=bar_aggfunc, line_aggfunc=line_aggfunc,
+ is_distinct=is_distinct,
+ sort_on=sort_on, ascending=ascending,
+ hide_split_line=hide_split_line,
+ style_tooltip=style_tooltip,
+ c=None, type='bar_line')
def hist(df,
@@ -140,9 +189,10 @@ def hist(df,
Whether normalize the histogram
stacked: Boolean, default False.
Whether to stacked category on top of the other categories.
- annotate: string, {'all',True} default None
- if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ annotate: string, {'all',True, None} default None
+ if True, annotate value on top of the plot element. If stacked is also
+ True, annotate the last category. if 'all' and stacked, annotate all
+ category
density: boolean, default False.
Whether to add density to the plot
@@ -150,9 +200,9 @@ def hist(df,
-------
Chart Object
"""
- return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,stacked=stacked,
- density=density,
- annotate='top' if annotate == True else annotate)
+ return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,
+ stacked=stacked, density=density,
+ annotate='top' if annotate is True else annotate)
def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
@@ -167,7 +217,8 @@ def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
Used as sizing value of the scatter points
c: string, default None
column used as grouping color category
- saturation
+ saturate: string, default None
+ column to use for saturation
size_px: tuple, default (10,70)
boundary size, lower and upper limit in pixel for min-max scatter points
@@ -176,4 +227,5 @@ def scatter(df, x, y, s=None, c=None, saturate=None, size_px=(10, 70)):
-------
Chart Object
"""
- return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,saturate=saturate,size_px=size_px)
+ return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,
+ saturate=saturate,size_px=size_px)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/make_bar_line.py
similarity index 66%
rename from krisk/plot/bar_line.py
rename to krisk/plot/make_bar_line.py
index 931a83e..05f7201 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/make_bar_line.py
@@ -12,7 +12,7 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
chart_type = kwargs['type']
if chart_type in ['bar', 'line']:
- data = get_bar_line_data(df, x, c, **kwargs)
+ data = get_bar_or_line_data(df, x, c, **kwargs)
chart.option['xAxis']['data'] = data.index.values.tolist()
elif chart_type == 'hist':
@@ -20,6 +20,11 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
data, bins = get_hist_data(df, x, c, **kwargs)
chart.option['xAxis']['data'] = bins
+ elif chart_type == 'bar_line':
+ data = set_barline(df, x, chart, **kwargs)
+ chart.option['xAxis']['data'] = data.index.values.tolist()
+ return
+
if c:
# append data for every category
for cat in data.columns:
@@ -50,20 +55,21 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
if kwargs['annotate'] == 'top':
series[-1]['label'] = d_annotate
- # TODO: make annotate receive all kinds supported in echarts.
+ # TODO: make annotate receive all kinds supported in echarts.
# Special Bar Condition: Trendline
if kwargs['type'] == 'bar' and kwargs['trendline']:
- trendline = {'name':'trendline', 'type': 'line',
+ trendline = {'name': 'trendline', 'type': 'line',
'lineStyle': {'normal': {'color': '#000'}}}
if c and kwargs['stacked']:
- trendline['data'] = [0] * len(series[-1]['data'])
+ trendline['data'] = [0] * len(series[-1]['data'])
trendline['stack'] = c
elif c is None:
trendline['data'] = series[0]['data']
else:
- raise AssertionError('Trendline must either stacked category, or not category')
+ raise AssertionError('Trendline must either stacked category,'
+ ' or not category')
series.append(trendline)
@@ -81,8 +87,8 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
'lineStyle': {'normal': {'color': '#000'}}}
chart.option['xAxis']['boundaryGap'] = False
- # The density have to be closed at zero. So all of xAxis and series must be updated
- # To incorporate the changes
+ # The density have to be closed at zero. So all of xAxis and series
+ # must be updated to incorporate the changes
chart.option['xAxis']['data'] = [0] + chart.option['xAxis']['data'] + [0]
for s in series:
@@ -93,12 +99,13 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
elif c is None:
density['data'] = [0] + round_list(data) + [0]
else:
- raise AssertionError('Density must either stacked category, or not category')
+ raise AssertionError('Density must either stacked category, '
+ 'or not category')
series.append(density)
-def get_bar_line_data(df, x, c, y, **kwargs):
+def get_bar_or_line_data(df, x, c, y, **kwargs):
"""Get Bar and Line manipulated data"""
if c and y:
@@ -115,7 +122,7 @@ def get_bar_line_data(df, x, c, y, **kwargs):
else:
data = df[x].value_counts()
- #Specify sort_on and order method
+ # Specify sort_on and order method
sort_on = kwargs['sort_on']
descr_keys = pd.Series([0]).describe().keys().tolist()
@@ -126,11 +133,13 @@ def get_bar_line_data(df, x, c, y, **kwargs):
data.sort_index(inplace=True, ascending=kwargs['ascending'])
else:
if sort_on != 'values':
- val_deviation = data.describe().loc[sort_on] if isinstance(sort_on, str) else sort_on
+ val_deviation = sort_on(data) if callable(sort_on) else sort_on
data = data - val_deviation
if c:
assert kwargs['sort_c_on'] is not None
- data.sort_values(kwargs['sort_c_on'], inplace=True, ascending=kwargs['ascending'])
+ (data.sort_values(kwargs['sort_c_on'],
+ inplace=True,
+ ascending=kwargs['ascending']))
else:
data.sort_values(inplace=True, ascending=kwargs['ascending'])
@@ -157,3 +166,42 @@ def get_hist_data(df, x, c, **kwargs):
data = pd.Series(y_val)
return data, bins
+
+
+def set_barline(df, x, chart, **kwargs):
+ """Set Bar-Line charts"""
+
+ ybar = kwargs['ybar']
+ yline = kwargs['yline']
+
+ if kwargs['is_distinct'] is True:
+ data = df[[x, ybar, yline]].drop_duplicates(subset=[x]).copy()
+ data.index = data.pop(x)
+ else:
+ data = (df
+ .groupby(x)
+ .agg({ybar: kwargs['bar_aggfunc'],
+ yline: kwargs['line_aggfunc']}))
+
+ assert kwargs['sort_on'] in ['index', 'ybar', 'yline']
+ if kwargs['sort_on'] == 'index':
+ data.sort_index(ascending=kwargs['ascending'], inplace=True)
+ else:
+ data.sort_values(kwargs[kwargs['sort_on']],
+ ascending=kwargs['ascending'], inplace=True)
+
+ def get_series(col, type): return dict(name=col, type=type,
+ data=round_list(data[col]))
+ chart.option['series'] = [
+ get_series(ybar, 'bar'),
+ dict(yAxisIndex=1, **get_series(yline, 'line'))
+ ]
+
+ if kwargs['hide_split_line'] is True:
+ def get_yaxis(col): return {'name': col, 'splitLine': {'show': False}}
+ chart.option['yAxis'] = [get_yaxis(ybar), get_yaxis(yline)]
+
+ if kwargs['style_tooltip'] is True:
+ chart.set_tooltip_style(axis_pointer='shadow', trigger='axis')
+
+ return data
diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index 68e3c41..c11b91d 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -33,20 +33,20 @@ def insert_series_data(data, x, chart_type, chart, cat=None):
def make_chart(df, **kwargs):
- from krisk.plot.bar_line import set_bar_line_chart
- from krisk.plot.scatter_geo import set_scatter_chart
+ from krisk.plot.make_bar_line import set_bar_line_chart
+ from krisk.plot.make_scatter_geo import set_scatter_chart
chart = Chart(**kwargs)
chart._kwargs_chart_['data_columns'] = df.columns
-
chart.set_xlabel(kwargs['x'])
+
if kwargs.get('y', None):
chart.set_ylabel(kwargs['y'])
if kwargs['type'] == 'line':
chart.set_tooltip_style(trigger='axis',axis_pointer='shadow')
- if kwargs['type'] in ['bar', 'line', 'hist']:
+ if kwargs['type'] in ['bar', 'line', 'hist', 'bar_line']:
set_bar_line_chart(chart, df, **kwargs)
elif kwargs['type'] == 'scatter':
| Add bar_line chart
Adding chart with bar and line with different series but same x-axis. For example,
``` Python
kk.bar_line(df, x, y_bar, y_line, c=None, .....)
```
| napjon/krisk | diff --git a/krisk/plot/scatter_geo.py b/krisk/plot/make_scatter_geo.py
similarity index 100%
rename from krisk/plot/scatter_geo.py
rename to krisk/plot/make_scatter_geo.py
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index e3861b5..29ba507 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -1,6 +1,7 @@
import json
import pytest
import krisk.plot as kk
+import numpy as np
DATA_DIR = 'krisk/tests/data'
read_option_tests = lambda f: json.load(open(DATA_DIR + '/' + f, 'r'))
@@ -11,6 +12,7 @@ def assert_barline_data(plot, true_option, test_legend=True):
if test_legend:
assert plot.option['legend']['data'] == true_option['legend']['data']
+
def assert_scatter_data(plot, true_option):
assert plot.option['series'][0]['data'] == true_option['series'][0]['data']
assert plot.option['xAxis'] == true_option['xAxis']
@@ -49,12 +51,14 @@ def test_bar(gapminder):
p4 = kk.bar(gapminder,'continent',y='gdpPercap',how='mean')
opt4 = {'legend': {'data': []},
- 'series': [{'data': [4426.026, 8955.554, 802.675, 3255.367, 19980.596],
+ 'series': [{'data': [4426.026, 8955.554, 802.675,
+ 3255.367, 19980.596],
'name': 'continent',
'type': 'bar'}],
'title': {'text': ''},
'tooltip': {'axisPointer': {'type': ''}},
- 'xAxis': {'data': ['Africa', 'Americas', 'Asia', 'Europe', 'Oceania']},
+ 'xAxis': {'data': ['Africa', 'Americas', 'Asia',
+ 'Europe', 'Oceania']},
'yAxis': {}}
assert_barline_data(p4, opt4, test_legend=False)
@@ -70,7 +74,8 @@ def test_trendline(gapminder):
assert p1.option['series'][-1]['type'] == 'line'
assert p1.option['series'][-1]['lineStyle'] == {'normal': {'color': '#000'}}
- p2 = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent',stacked=True)
+ p2 = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,
+ c='continent',stacked=True)
opt2 = read_option_tests('bar_year_pop_mean_continent_trendline.json')
assert_barline_data(p2, opt2)
assert p2.option['series'][-1]['data'] == opt2['series'][-1]['data']
@@ -97,12 +102,16 @@ def test_line(gapminder):
def test_smooth_line(gapminder):
- p = kk.line(gapminder[gapminder.year == 1952],'continent',y='pop',how='mean',smooth=True)
+ p = kk.line(gapminder[gapminder.year == 1952],'continent',y='pop',
+ how='mean',smooth=True)
assert p.option['series'][0]['smooth'] == True
+
def test_full_bar_line(gapminder):
- bar = kk.bar(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
- line = kk.line(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
+ bar = kk.bar(gapminder,'year',c='continent',y='pop',how='mean',
+ stacked=True,full=True,annotate='all')
+ line = kk.line(gapminder,'year',c='continent',y='pop',how='mean',
+ stacked=True,full=True,annotate='all')
for i in range(len(bar.option['series'])):
bar.option['series'][i].pop('type')
@@ -116,11 +125,15 @@ def test_full_bar_line(gapminder):
assert_barline_data(bar, true_option)
assert_barline_data(line, true_option)
+
def test_sort_bar_line(gapminder):
- p = kk.line(gapminder,'year', y='pop', how='mean',c='continent', sort_on='mean', sort_c_on='Americas')
+ p = kk.line(gapminder,'year', y='pop', how='mean',c='continent',
+ sort_on= np.mean ,sort_c_on='Americas')
- assert p.option['xAxis']['data'] == [1952, 1957, 1962, 1967, 1972, 1977, 1982, 1987, 1992, 1997, 2002, 2007]
- assert p.option['legend']['data'] == ['Africa', 'Americas', 'Asia', 'Europe', 'Oceania']
+ assert p.option['xAxis']['data'] == [1952, 1957, 1962, 1967, 1972, 1977,
+ 1982, 1987, 1992, 1997, 2002, 2007]
+ assert p.option['legend']['data'] == ['Africa', 'Americas', 'Asia',
+ 'Europe', 'Oceania']
assert p.option['series'][0] == {'data': [-10595881.167,
-9604550.167,
-8874458.167,
@@ -157,15 +170,18 @@ def test_density(gapminder):
p1 = kk.hist(gapminder,'lifeExp',density=True)
assert p1.option['series'][0]['data'] == [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6]
- assert p1.option['series'][-1] == {'data': [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6, 0],
+ assert p1.option['series'][-1] == {'data': [0, 4, 2, 7, 2, 2,
+ 3, 5, 13, 16, 6, 0],
'lineStyle': {'normal': {'color': '#000'}},
'name': 'density',
'smooth': True,
'type': 'line'}
assert p1.option['xAxis']['boundaryGap'] == False
- assert p1.option['xAxis']['data'] == [0, 28, 34, 39, 44, 49, 55, 60, 65, 70, 75, 81, 0]
+ assert p1.option['xAxis']['data'] == [0, 28, 34, 39, 44, 49, 55,
+ 60, 65, 70, 75, 81, 0]
- p2 = kk.hist(gapminder,'lifeExp',bins=10,c='continent',stacked=True,density=True)
+ p2 = kk.hist(gapminder,'lifeExp',bins=10,c='continent',
+ stacked=True,density=True)
opt2 = read_option_tests('hist_lifeExp_b10_continent_density.json')
assert_barline_data(p2, opt2)
@@ -175,7 +191,6 @@ def test_density(gapminder):
pass
-
def test_scatter(gapminder):
# Simple Scatter
p1 = kk.scatter(gapminder[gapminder.year == 1952],'pop','lifeExp')
@@ -204,6 +219,33 @@ def test_scatter(gapminder):
# Scatter
- p3 = kk.scatter(gapminder[gapminder.year == 1952], 'lifeExp', 'gdpPercap', s='pop')
+ p3 = kk.scatter(gapminder[gapminder.year == 1952],
+ 'lifeExp', 'gdpPercap', s='pop')
opt3 = read_option_tests('scatter_single.json')
assert_scatter_data(p3, opt3)
+
+
+def test_bar_line(gapminder):
+
+ p1 = kk.bar_line(gapminder, 'continent', 'lifeExp', 'gdpPercap')
+ assert p1.option['series'][0] == {'data': [59.03, 69.06, 37.479,
+ 68.433, 74.663],
+ 'name': 'lifeExp',
+ 'type': 'bar'}
+ assert p1.option['series'][-1] == {'data': [4426.026, 8955.554, 802.675,
+ 3255.367, 19980.596],
+ 'name': 'gdpPercap',
+ 'type': 'line',
+ 'yAxisIndex': 1}
+ assert p1.option['xAxis']['data'] == ['Africa', 'Americas', 'Asia',
+ 'Europe', 'Oceania']
+
+ p2 = kk.bar_line(gapminder, 'continent', 'lifeExp', 'gdpPercap',
+ is_distinct=True)
+ assert p2.option['series'][0]['data'] == [43.077, 62.485, 28.801,
+ 55.23, 69.12]
+ assert p2.option['series'][-1]['data'] == [2449.008, 5911.315, 779.445,
+ 1601.056, 10039.596]
+ assert p2.option['xAxis']['data'] == ['Africa', 'Americas', 'Asia',
+ 'Europe', 'Oceania']
+
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 4
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@cbf39a0b5f2876a7cf1ce45361be3ba79aba4dc7#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_trendline",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_smooth_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_sort_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_density",
"krisk/tests/test_plot.py::test_scatter",
"krisk/tests/test_plot.py::test_bar_line"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 841 | [
"krisk/plot/make_chart.py",
"krisk/plot/bar_line.py",
"krisk/chart/core.py",
"krisk/plot/api.py"
]
| [
"krisk/plot/make_bar_line.py",
"krisk/plot/make_chart.py",
"krisk/chart/core.py",
"krisk/plot/api.py"
]
|
Azure__azure-cli-1197 | 81372d0c3f60828a26c18c61169bb6e8eb47f995 | 2016-11-01 00:35:23 | 1576ec67f5029db062579da230902a559acbb9fe | tjprescott: Also, one thing I've tried to do is when I "touch" a module I do a quick look through help to make sure there is acceptable help text for all commands and groups since we have a lot of gaps and help reflected from sloppy SDK docstrings. In this case it looks like it's just `az account list-locations` missing a short-summary, but it would be nice to add (save us another issue down the road). | diff --git a/src/azure-cli-core/azure/cli/core/_profile.py b/src/azure-cli-core/azure/cli/core/_profile.py
index 01de246c9..c45f90b50 100644
--- a/src/azure-cli-core/azure/cli/core/_profile.py
+++ b/src/azure-cli-core/azure/cli/core/_profile.py
@@ -167,17 +167,16 @@ class Profile(object):
self._cache_subscriptions_to_local_storage(subscriptions)
- def set_active_subscription(self, subscription_id_or_name):
+ def set_active_subscription(self, subscription): #take id or name
subscriptions = self.load_cached_subscriptions()
- subscription_id_or_name = subscription_id_or_name.lower()
+ subscription = subscription.lower()
result = [x for x in subscriptions
- if subscription_id_or_name == x[_SUBSCRIPTION_ID].lower() or
- subscription_id_or_name == x[_SUBSCRIPTION_NAME].lower()]
+ if subscription in [x[_SUBSCRIPTION_ID].lower(), x[_SUBSCRIPTION_NAME].lower()]]
if len(result) != 1:
raise CLIError('The subscription of "{}" does not exist or has more than'
- ' one match.'.format(subscription_id_or_name))
+ ' one match.'.format(subscription))
for s in subscriptions:
s[_IS_DEFAULT_SUBSCRIPTION] = False
@@ -218,14 +217,14 @@ class Profile(object):
return active_account[_USER_ENTITY][_USER_NAME]
- def get_subscription(self, subscription_id=None):
+ def get_subscription(self, subscription=None):#take id or name
subscriptions = self.load_cached_subscriptions()
if not subscriptions:
raise CLIError("Please run 'az login' to setup account.")
result = [x for x in subscriptions if (
- subscription_id is None and x.get(_IS_DEFAULT_SUBSCRIPTION)) or
- (subscription_id == x.get(_SUBSCRIPTION_ID))]
+ not subscription and x.get(_IS_DEFAULT_SUBSCRIPTION) or
+ subscription and subscription.lower() in [x[_SUBSCRIPTION_ID].lower(), x[_SUBSCRIPTION_NAME].lower()])] #pylint: disable=line-too-long
if len(result) != 1:
raise CLIError("Please run 'az account set' to select active account.")
return result[0]
diff --git a/src/azure-cli-core/azure/cli/core/application.py b/src/azure-cli-core/azure/cli/core/application.py
index 9315940ac..6039ba7c6 100644
--- a/src/azure-cli-core/azure/cli/core/application.py
+++ b/src/azure-cli-core/azure/cli/core/application.py
@@ -168,7 +168,6 @@ class Application(object):
@staticmethod
def _register_builtin_arguments(**kwargs):
global_group = kwargs['global_group']
- global_group.add_argument('--subscription', dest='_subscription_id', help=argparse.SUPPRESS)
global_group.add_argument('--output', '-o', dest='_output_format',
choices=['json', 'tsv', 'list', 'table', 'jsonc'],
default=az_config.get('core', 'output', fallback='json'),
diff --git a/src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py b/src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py
index 9969227cf..99392b7ea 100644
--- a/src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py
+++ b/src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py
@@ -4,12 +4,19 @@
#---------------------------------------------------------------------------------------------
from azure.cli.core.commands import CliArgumentType, register_cli_argument
-from azure.cli.core.commands.parameters import (
- name_type,
- resource_group_name_type)
# pylint: disable=line-too-long
+target_name = CliArgumentType(
+ options_list=('--target-name', '-n'),
+ help='Name of the Azure Container Service cluster to deploy containers to.'
+)
+
+target_resource_group = CliArgumentType(
+ options_list=('--target-resource-group', '-g'),
+ help='Name of the Azure Container Service cluster\'s resource group.'
+)
+
registry_name = CliArgumentType(
options_list=('--registry-name', '-r'),
help='Azure container registry name. A new Azure container registry is created if omitted or does not exist.'
@@ -45,8 +52,8 @@ vsts_project_name = CliArgumentType(
help='VSTS project name to create the build and release definitions. A new VSTS project is created if omitted or does not exist.'
)
-register_cli_argument('container release create', 'name', name_type)
-register_cli_argument('container release create', 'resource_group_name', resource_group_name_type)
+register_cli_argument('container release create', 'target_name', target_name)
+register_cli_argument('container release create', 'target_resource_group', target_resource_group)
register_cli_argument('container release create', 'registry_name', registry_name)
register_cli_argument('container release create', 'registry_resource_id', registry_resource_id)
register_cli_argument('container release create', 'remote_url', remote_url)
@@ -55,8 +62,8 @@ register_cli_argument('container release create', 'remote_access_token', remote_
register_cli_argument('container release create', 'vsts_account_name', vsts_account_name)
register_cli_argument('container release create', 'vsts_project_name', vsts_project_name)
-register_cli_argument('container build create', 'name', name_type)
-register_cli_argument('container build create', 'resource_group_name', resource_group_name_type)
+register_cli_argument('container build create', 'target_name', target_name)
+register_cli_argument('container build create', 'target_resource_group', target_resource_group)
register_cli_argument('container build create', 'registry_name', registry_name)
register_cli_argument('container build create', 'registry_resource_id', registry_resource_id)
register_cli_argument('container build create', 'remote_url', remote_url)
@@ -65,5 +72,5 @@ register_cli_argument('container build create', 'remote_access_token', remote_ac
register_cli_argument('container build create', 'vsts_account_name', vsts_account_name)
register_cli_argument('container build create', 'vsts_project_name', vsts_project_name)
-register_cli_argument('container release list', 'name', name_type)
-register_cli_argument('container release list', 'resource_group_name', resource_group_name_type)
+register_cli_argument('container release list', 'target_name', target_name)
+register_cli_argument('container release list', 'target_resource_group', target_resource_group)
diff --git a/src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py b/src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py
index d2e04d50a..aafbefde9 100644
--- a/src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py
+++ b/src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py
@@ -17,15 +17,13 @@ from azure.cli.core._profile import Profile
from azure.cli.core._util import CLIError
logger = _logging.get_az_logger(__name__)
-BASE_URL = "https://management.azure.com"
-RESOURCE_BASE_URL = "/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}"
-CONTAINER_SERVICE_BASE_URL = RESOURCE_BASE_URL + "/providers/Microsoft.ContainerService"
-CONTAINER_SERVICE_RESOURCE_URL = (CONTAINER_SERVICE_BASE_URL +
- "/containerServices/{container_service_name}")
-VISUAL_STUDIO_BASE_URL = RESOURCE_BASE_URL + "/providers/microsoft.visualstudio"
-VISUAL_STUDIO_ACCOUNT_URL = VISUAL_STUDIO_BASE_URL + "/account/{vsts_account_name}"
-VISUAL_STUDIO_PROJECT_URL = VISUAL_STUDIO_ACCOUNT_URL + "/project/{vsts_project_name}"
-RP_URL = BASE_URL + CONTAINER_SERVICE_RESOURCE_URL + "/providers/Microsoft.Mindaro"
+BASE_URL = "https://westus.mindaro.microsoft.io"
+SUBSCRIPTION_URL = "/subscriptions/{subscription_id}"
+RESOURCE_BASE_URL = SUBSCRIPTION_URL + "/resourceGroups/{resource_group_name}"
+CONTAINER_SERVICE_PROVIDER = "/providers/Microsoft.ContainerService"
+CONTAINER_SERVICE_RESOURCE_URL = (RESOURCE_BASE_URL + CONTAINER_SERVICE_PROVIDER + "/containerServices/{container_service_name}")
+
+SERVICE_URL = BASE_URL + SUBSCRIPTION_URL
API_VERSION = "2016-11-01-preview"
DOCKERFILE_FILE = 'Dockerfile'
@@ -33,9 +31,14 @@ DOCKER_COMPOSE_FILE = 'docker-compose.yml'
DOCKER_COMPOSE_TEST_FILE = 'docker-compose.test.yml'
DOCKER_COMPOSE_EXPECTED_VERSION = '2'
+DOCKERFILE_FILE = 'Dockerfile'
+DOCKER_COMPOSE_FILE = 'docker-compose.yml'
+DOCKER_COMPOSE_TEST_FILE = 'docker-compose.test.yml'
+DOCKER_COMPOSE_EXPECTED_VERSION = '2'
+
def add_release(
- name,
- resource_group_name,
+ target_name,
+ target_resource_group,
remote_url=None,
remote_branch=None,
remote_access_token=None,
@@ -46,10 +49,10 @@ def add_release(
"""
Creates a build definition that automates building and pushing Docker images to an Azure container registry, and creates a release definition that automates deploying container images from a container registry to an Azure container service. Source repository must define a docker-compose.yml file.
- :param name: Name of the target Azure container service instance to deploy containers to.
- :type name: String
- :param resource_group_name: Name of Azure container service's resource group.
- :type resource_group_name: String
+ :param target_name: Name of the target Azure container service instance to deploy containers to.
+ :type target_name: String
+ :param target_resource_group: Name of Azure container service's resource group.
+ :type target_resource_group: String
:param remote_url: Remote url of the GitHub or VSTS source repository that will be built and deployed. If omitted, a source repository will be searched for in the current working directory.
:type remote_url: String
:param remote_branch: Remote branch of the GitHub or VSTS source repository that will be built and deployed. If omitted refs/heads/master will be selected.
@@ -73,8 +76,8 @@ def add_release(
# Call the RP
return _call_rp_configure_cicd(
- name,
- resource_group_name,
+ target_name,
+ target_resource_group,
vsts_account_name,
vsts_project_name,
registry_name,
@@ -112,8 +115,8 @@ def _get_repo_type(remote_url):
return None
def _call_rp_configure_cicd(
- name,
- resource_group_name,
+ target_name,
+ target_resource_group,
vsts_account_name,
vsts_project_name,
registry_name,
@@ -125,10 +128,10 @@ def _call_rp_configure_cicd(
"""
Calls the RP to build and deploy the service(s) in the cluster.
- :param name: Name of the target Azure container service instance to deploy containers to.
- :type name: String
- :param resource_group_name: Name of Azure container service's resource group.
- :type resource_group_name: String
+ :param target_name: Name of the target Azure container service instance to deploy containers to.
+ :type target_name: String
+ :param target_resource_group: Name of Azure container service's resource group.
+ :type target_resource_group: String
:param remote_url: Remote url of the GitHub or VSTS source repository that will be built and deployed. If omitted, a source repository will be searched for in the current working directory.
:type remote_url: String
:param remote_branch: Remote branch of the GitHub or VSTS source repository that will be built and deployed. If omitted refs/heads/master will be selected.
@@ -150,11 +153,11 @@ def _call_rp_configure_cicd(
cred, subscription_id, _ = profile.get_login_credentials()
o_auth_token = cred.signed_session().headers['Authorization']
-
+ container_service_resource_id = CONTAINER_SERVICE_RESOURCE_URL.format(subscription_id=subscription_id, resource_group_name=target_resource_group, container_service_name=target_name)
data = {
+ 'acsResourceId': container_service_resource_id,
'vstsAccountName': vsts_account_name,
'vstsProjectName': vsts_project_name,
- 'token': o_auth_token,
'registryName': registry_name,
'registryResourceId': registry_resource_id,
'remoteToken': remote_access_token,
@@ -163,10 +166,8 @@ def _call_rp_configure_cicd(
'createRelease' : create_release
}
- configure_ci_cd_url = RP_URL.format(
- subscription_id=subscription_id,
- resource_group_name=resource_group_name,
- container_service_name=name) + '/configureCI?api-version=' + API_VERSION
+ configure_ci_cd_url = SERVICE_URL.format(
+ subscription_id=subscription_id) + '/configureCI?api-version=' + API_VERSION
headers = {'Content-type': 'application/json', 'Authorization': o_auth_token}
req = requests.post(configure_ci_cd_url, data=json.dumps(data), headers=headers, timeout=600)
@@ -179,27 +180,26 @@ def _call_rp_configure_cicd(
json_request = req.json()
return json_request
-def list_releases(name, resource_group_name):
+def list_releases(target_name, target_resource_group):
"""
Lists all the release definitions that are deployed to a given Azure container service.
- :param name: Name of the target Azure container service instance.
- :type name: String
- :param resource_group_name: Name of Azure container service's resource group.
- :type resource_group_name: String
+ :param target_name: Name of the target Azure container service instance.
+ :type target_name: String
+ :param target_resource_group: Name of Azure container service's resource group.
+ :type target_resource_group: String
"""
profile = Profile()
cred, subscription_id, _ = profile.get_login_credentials()
o_auth_token = cred.signed_session().headers['Authorization']
+ container_service_resource_id = CONTAINER_SERVICE_RESOURCE_URL.format(subscription_id=subscription_id, resource_group_name=target_resource_group, container_service_name=target_name)
data = {
- 'token': o_auth_token
+ 'acsResourceId': container_service_resource_id
}
- list_releases_url = RP_URL.format(
- subscription_id=subscription_id,
- resource_group_name=resource_group_name,
- container_service_name=name) + '/configureCI/1/listReleases?api-version=' + API_VERSION
+ list_releases_url = SERVICE_URL.format(
+ subscription_id=subscription_id) + '/listReleases?api-version=' + API_VERSION
headers = {'Content-type': 'application/json', 'Authorization': o_auth_token}
req = requests.post(list_releases_url, data=json.dumps(data), headers=headers, timeout=600)
@@ -308,8 +308,8 @@ def _check_registry_information(registry_name, registry_resource_id):
raise CLIError("Please provide only one of registry-name and registry-resource-id, not both.")
def add_ci(
- name,
- resource_group_name,
+ target_name,
+ target_resource_group,
remote_url=None,
remote_branch=None,
remote_access_token=None,
@@ -345,8 +345,8 @@ def add_ci(
# Call the RP
return _call_rp_configure_cicd(
- name,
- resource_group_name,
+ target_name,
+ target_resource_group,
vsts_account_name,
vsts_project_name,
registry_name,
diff --git a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py
index 3752e4c92..c5eca10cb 100644
--- a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py
+++ b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py
@@ -22,4 +22,16 @@ helps['login'] = """
helps['account'] = """
type: group
short-summary: Manages stored and default subscriptions
-"""
\ No newline at end of file
+"""
+
+helps['account list-locations'] = """
+ type: command
+ short-summary: list supported regions of the current subscription
+"""
+
+helps['account show'] = """
+ type: command
+ short-summary: show detail of the current subscription
+"""
+
+
diff --git a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py
index b6ab41532..f6d727a03 100644
--- a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py
+++ b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py
@@ -23,5 +23,5 @@ register_cli_argument('login', 'tenant', options_list=('--tenant', '-t'), help='
register_cli_argument('logout', 'username', help='account user, if missing, logout the current active account')
-register_cli_argument('account', 'subscription_name_or_id', options_list=('--name', '-n'), help='Name or ID of subscription.', completer=get_subscription_id_list)
-register_cli_argument('account list', 'list_all', options_list=('--all',), help='List all subscriptions', action='store_true')
+register_cli_argument('account', 'subscription', help='Name or ID of subscription.', completer=get_subscription_id_list)
+register_cli_argument('account list', 'list_all', options_list=('--all',), help='List all subscriptions across all cloud environments', action='store_true')
diff --git a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py
index 402b3f313..b10919682 100644
--- a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py
+++ b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py
@@ -11,6 +11,7 @@ from .custom import (login,
logout,
list_locations,
list_subscriptions,
+ show_subscription,
set_active_subscription,
account_clear)
@@ -18,7 +19,9 @@ cli_command('login', login)
cli_command('logout', logout)
cli_command('account list', list_subscriptions)
+cli_command('account show', show_subscription)
cli_command('account set', set_active_subscription)
cli_command('account clear', account_clear)
cli_command('account list-locations', list_locations)
+
diff --git a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py
index 7a6912731..338049b18 100644
--- a/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py
+++ b/src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py
@@ -27,12 +27,16 @@ def list_subscriptions(list_all=False): # pylint: disable=redefined-builtin
sub['cloudName'] = sub.pop('environmentName', None)
return [sub for sub in subscriptions if list_all or sub['cloudName'] == CLOUD.name]
-def set_active_subscription(subscription_name_or_id):
+def show_subscription(subscription=None):
+ profile = Profile()
+ return profile.get_subscription(subscription)
+
+def set_active_subscription(subscription):
'''Set the current subscription'''
if not id:
raise CLIError('Please provide subscription id or unique name.')
profile = Profile()
- profile.set_active_subscription(subscription_name_or_id)
+ profile.set_active_subscription(subscription)
def account_clear():
'''Clear all stored subscriptions. To clear individual, use \'logout\''''
| How do I view the current account? `az account show` is not valid...
`azure account show` let's me see my active subscription... what's the equivalent in `azure-cli`?
IMO, `az account show` should just work
| Azure/azure-cli | diff --git a/src/azure-cli-core/azure/cli/core/tests/test_profile.py b/src/azure-cli-core/azure/cli/core/tests/test_profile.py
index 64a05fd98..8d5fa3b31 100644
--- a/src/azure-cli-core/azure/cli/core/tests/test_profile.py
+++ b/src/azure-cli-core/azure/cli/core/tests/test_profile.py
@@ -10,6 +10,7 @@ import mock
from azure.mgmt.resource.subscriptions.models import (SubscriptionState, Subscription,
SubscriptionPolicies, spendingLimit)
from azure.cli.core._profile import Profile, CredsCache, SubscriptionFinder, CLOUD
+from azure.cli.core._util import CLIError
class Test_Profile(unittest.TestCase):
@@ -164,6 +165,24 @@ class Test_Profile(unittest.TestCase):
self.assertFalse(subscription2['isDefault'])
self.assertTrue(subscription1['isDefault'])
+ def test_get_subscription(self):
+ storage_mock = {'subscriptions': None}
+ profile = Profile(storage_mock)
+
+ consolidated = Profile._normalize_properties(self.user1,
+ [self.subscription1],
+ False)
+ profile._set_subscriptions(consolidated)
+
+ self.assertEqual(self.display_name1, profile.get_subscription()['name'])
+ self.assertEqual(self.display_name1,
+ profile.get_subscription(subscription=self.display_name1)['name'])
+
+ sub_id = self.id1.split('/')[-1]
+ self.assertEqual(sub_id, profile.get_subscription()['id'])
+ self.assertEqual(sub_id, profile.get_subscription(subscription=sub_id)['id'])
+ self.assertRaises(CLIError, profile.get_subscription, "random_id")
+
@mock.patch('azure.cli.core._profile._load_tokens_from_file', autospec=True)
def test_get_current_account_user(self, mock_read_cred_file):
#setup
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 8
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.2
applicationinsights==0.10.0
argcomplete==1.3.0
astroid==1.4.9
attrs==22.2.0
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_context&subdirectory=src/command_modules/azure-cli-context
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
-e git+https://github.com/Azure/azure-cli.git@81372d0c3f60828a26c18c61169bb6e8eb47f995#egg=azure_cli_webapp&subdirectory=src/command_modules/azure-cli-webapp
azure-common==1.1.4
azure-graphrbac==0.30.0rc6
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-compute==0.30.0rc6
azure-mgmt-dns==0.30.0rc6
azure-mgmt-iothub==0.1.0
azure-mgmt-keyvault==0.30.0
azure-mgmt-network==0.30.0rc6
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.0rc6
azure-mgmt-storage==0.30.0rc6
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.30.1
azure-nspkg==3.0.2
azure-storage==0.33.0
bcrypt==4.0.1
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.3.7
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycparser==2.21
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
PyNaCl==1.5.0
pyOpenSSL==16.1.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.16
vcrpy==1.7.4
wrapt==1.16.0
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.2
- applicationinsights==0.10.0
- argcomplete==1.3.0
- astroid==1.4.9
- attrs==22.2.0
- azure-common==1.1.4
- azure-graphrbac==0.30.0rc6
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-compute==0.30.0rc6
- azure-mgmt-dns==0.30.0rc6
- azure-mgmt-iothub==0.1.0
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-network==0.30.0rc6
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.0rc6
- azure-mgmt-storage==0.30.0rc6
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.30.1
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- bcrypt==4.0.1
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.3.7
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycparser==2.21
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pynacl==1.5.0
- pyopenssl==16.1.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- secretstorage==3.3.3
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.7.4
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_get_subscription"
]
| []
| [
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_create_token_cache",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_credscache_add_new_sp_creds",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_credscache_new_token_added_by_adal",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_credscache_remove_creds",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_id",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_find_subscriptions_through_interactive_flow",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_get_current_account_user",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_get_login_credentials",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_get_login_credentials_for_graph_client",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_load_cached_tokens",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_logout",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_logout_all",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_normalize",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_set_active_subscription",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_update_add_two_different_subscriptions",
"src/azure-cli-core/azure/cli/core/tests/test_profile.py::Test_Profile::test_update_with_same_subscription_added_twice"
]
| []
| MIT License | 842 | [
"src/azure-cli-core/azure/cli/core/application.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py",
"src/azure-cli-core/azure/cli/core/_profile.py",
"src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py",
"src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py"
]
| [
"src/azure-cli-core/azure/cli/core/application.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/commands.py",
"src/azure-cli-core/azure/cli/core/_profile.py",
"src/command_modules/azure-cli-container/azure/cli/command_modules/container/_params.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_help.py",
"src/command_modules/azure-cli-container/azure/cli/command_modules/container/custom.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/custom.py",
"src/command_modules/azure-cli-profile/azure/cli/command_modules/profile/_params.py"
]
|
tornadoweb__tornado-1873 | 9bdc3176741114ffeb1048bded5d77d9e4337bbd | 2016-11-01 20:32:31 | ecd8968c5135b810cd607b5902dda2cd32122b39 | diff --git a/tornado/iostream.py b/tornado/iostream.py
index bcf44414..fc419dcf 100644
--- a/tornado/iostream.py
+++ b/tornado/iostream.py
@@ -82,6 +82,8 @@ _ERRNO_INPROGRESS = (errno.EINPROGRESS,)
if hasattr(errno, "WSAEINPROGRESS"):
_ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore
+_WINDOWS = sys.platform.startswith('win')
+
class StreamClosedError(IOError):
"""Exception raised by `IOStream` methods when the stream is closed.
@@ -158,11 +160,14 @@ class BaseIOStream(object):
self.max_buffer_size // 2)
self.max_write_buffer_size = max_write_buffer_size
self.error = None
- self._read_buffer = collections.deque()
- self._write_buffer = collections.deque()
+ self._read_buffer = bytearray()
+ self._read_buffer_pos = 0
self._read_buffer_size = 0
+ self._write_buffer = bytearray()
+ self._write_buffer_pos = 0
self._write_buffer_size = 0
self._write_buffer_frozen = False
+ self._pending_writes_while_frozen = []
self._read_delimiter = None
self._read_regex = None
self._read_max_bytes = None
@@ -373,21 +378,16 @@ class BaseIOStream(object):
.. versionchanged:: 4.0
Now returns a `.Future` if no callback is given.
"""
- assert isinstance(data, bytes)
self._check_closed()
- # We use bool(_write_buffer) as a proxy for write_buffer_size>0,
- # so never put empty strings in the buffer.
if data:
if (self.max_write_buffer_size is not None and
self._write_buffer_size + len(data) > self.max_write_buffer_size):
raise StreamBufferFullError("Reached maximum write buffer size")
- # Break up large contiguous strings before inserting them in the
- # write buffer, so we don't have to recopy the entire thing
- # as we slice off pieces to send to the socket.
- WRITE_BUFFER_CHUNK_SIZE = 128 * 1024
- for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE):
- self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE])
- self._write_buffer_size += len(data)
+ if self._write_buffer_frozen:
+ self._pending_writes_while_frozen.append(data)
+ else:
+ self._write_buffer += data
+ self._write_buffer_size += len(data)
if callback is not None:
self._write_callback = stack_context.wrap(callback)
future = None
@@ -396,7 +396,7 @@ class BaseIOStream(object):
future.add_done_callback(lambda f: f.exception())
if not self._connecting:
self._handle_write()
- if self._write_buffer:
+ if self._write_buffer_size:
self._add_io_state(self.io_loop.WRITE)
self._maybe_add_error_listener()
return future
@@ -466,6 +466,7 @@ class BaseIOStream(object):
# if the IOStream object is kept alive by a reference cycle.
# TODO: Clear the read buffer too; it currently breaks some tests.
self._write_buffer = None
+ self._write_buffer_size = 0
def reading(self):
"""Returns true if we are currently reading from the stream."""
@@ -473,7 +474,7 @@ class BaseIOStream(object):
def writing(self):
"""Returns true if we are currently writing to the stream."""
- return bool(self._write_buffer)
+ return self._write_buffer_size > 0
def closed(self):
"""Returns true if the stream has been closed."""
@@ -743,7 +744,7 @@ class BaseIOStream(object):
break
if chunk is None:
return 0
- self._read_buffer.append(chunk)
+ self._read_buffer += chunk
self._read_buffer_size += len(chunk)
if self._read_buffer_size > self.max_buffer_size:
gen_log.error("Reached maximum read buffer size")
@@ -791,30 +792,25 @@ class BaseIOStream(object):
# since large merges are relatively expensive and get undone in
# _consume().
if self._read_buffer:
- while True:
- loc = self._read_buffer[0].find(self._read_delimiter)
- if loc != -1:
- delimiter_len = len(self._read_delimiter)
- self._check_max_bytes(self._read_delimiter,
- loc + delimiter_len)
- return loc + delimiter_len
- if len(self._read_buffer) == 1:
- break
- _double_prefix(self._read_buffer)
+ loc = self._read_buffer.find(self._read_delimiter,
+ self._read_buffer_pos)
+ if loc != -1:
+ loc -= self._read_buffer_pos
+ delimiter_len = len(self._read_delimiter)
+ self._check_max_bytes(self._read_delimiter,
+ loc + delimiter_len)
+ return loc + delimiter_len
self._check_max_bytes(self._read_delimiter,
- len(self._read_buffer[0]))
+ self._read_buffer_size)
elif self._read_regex is not None:
if self._read_buffer:
- while True:
- m = self._read_regex.search(self._read_buffer[0])
- if m is not None:
- self._check_max_bytes(self._read_regex, m.end())
- return m.end()
- if len(self._read_buffer) == 1:
- break
- _double_prefix(self._read_buffer)
- self._check_max_bytes(self._read_regex,
- len(self._read_buffer[0]))
+ m = self._read_regex.search(self._read_buffer,
+ self._read_buffer_pos)
+ if m is not None:
+ loc = m.end() - self._read_buffer_pos
+ self._check_max_bytes(self._read_regex, loc)
+ return loc
+ self._check_max_bytes(self._read_regex, self._read_buffer_size)
return None
def _check_max_bytes(self, delimiter, size):
@@ -824,35 +820,55 @@ class BaseIOStream(object):
"delimiter %r not found within %d bytes" % (
delimiter, self._read_max_bytes))
+ def _freeze_write_buffer(self, size):
+ self._write_buffer_frozen = size
+
+ def _unfreeze_write_buffer(self):
+ self._write_buffer_frozen = False
+ self._write_buffer += b''.join(self._pending_writes_while_frozen)
+ self._write_buffer_size += sum(map(len, self._pending_writes_while_frozen))
+ self._pending_writes_while_frozen[:] = []
+
+ def _got_empty_write(self, size):
+ """
+ Called when a non-blocking write() failed writing anything.
+ Can be overridden in subclasses.
+ """
+
def _handle_write(self):
- while self._write_buffer:
+ while self._write_buffer_size:
+ assert self._write_buffer_size >= 0
try:
- if not self._write_buffer_frozen:
+ start = self._write_buffer_pos
+ if self._write_buffer_frozen:
+ size = self._write_buffer_frozen
+ elif _WINDOWS:
# On windows, socket.send blows up if given a
# write buffer that's too large, instead of just
# returning the number of bytes it was able to
# process. Therefore we must not call socket.send
# with more than 128KB at a time.
- _merge_prefix(self._write_buffer, 128 * 1024)
- num_bytes = self.write_to_fd(self._write_buffer[0])
+ size = 128 * 1024
+ else:
+ size = self._write_buffer_size
+ num_bytes = self.write_to_fd(
+ memoryview(self._write_buffer)[start:start + size])
if num_bytes == 0:
- # With OpenSSL, if we couldn't write the entire buffer,
- # the very same string object must be used on the
- # next call to send. Therefore we suppress
- # merging the write buffer after an incomplete send.
- # A cleaner solution would be to set
- # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is
- # not yet accessible from python
- # (http://bugs.python.org/issue8240)
- self._write_buffer_frozen = True
+ self._got_empty_write(size)
break
- self._write_buffer_frozen = False
- _merge_prefix(self._write_buffer, num_bytes)
- self._write_buffer.popleft()
+ self._write_buffer_pos += num_bytes
self._write_buffer_size -= num_bytes
+ # Amortized O(1) shrink
+ # (this heuristic is implemented natively in Python 3.4+
+ # but is replicated here for Python 2)
+ if self._write_buffer_pos > self._write_buffer_size:
+ del self._write_buffer[:self._write_buffer_pos]
+ self._write_buffer_pos = 0
+ if self._write_buffer_frozen:
+ self._unfreeze_write_buffer()
except (socket.error, IOError, OSError) as e:
if e.args[0] in _ERRNO_WOULDBLOCK:
- self._write_buffer_frozen = True
+ self._got_empty_write(size)
break
else:
if not self._is_connreset(e):
@@ -863,7 +879,7 @@ class BaseIOStream(object):
self.fileno(), e)
self.close(exc_info=True)
return
- if not self._write_buffer:
+ if not self._write_buffer_size:
if self._write_callback:
callback = self._write_callback
self._write_callback = None
@@ -874,11 +890,23 @@ class BaseIOStream(object):
future.set_result(None)
def _consume(self, loc):
+ # Consume loc bytes from the read buffer and return them
if loc == 0:
return b""
- _merge_prefix(self._read_buffer, loc)
+ assert loc <= self._read_buffer_size
+ # Slice the bytearray buffer into bytes, without intermediate copying
+ b = (memoryview(self._read_buffer)
+ [self._read_buffer_pos:self._read_buffer_pos + loc]
+ ).tobytes()
+ self._read_buffer_pos += loc
self._read_buffer_size -= loc
- return self._read_buffer.popleft()
+ # Amortized O(1) shrink
+ # (this heuristic is implemented natively in Python 3.4+
+ # but is replicated here for Python 2)
+ if self._read_buffer_pos > self._read_buffer_size:
+ del self._read_buffer[:self._read_buffer_pos]
+ self._read_buffer_pos = 0
+ return b
def _check_closed(self):
if self.closed():
@@ -1251,6 +1279,17 @@ class SSLIOStream(IOStream):
def writing(self):
return self._handshake_writing or super(SSLIOStream, self).writing()
+ def _got_empty_write(self, size):
+ # With OpenSSL, if we couldn't write the entire buffer,
+ # the very same string object must be used on the
+ # next call to send. Therefore we suppress
+ # merging the write buffer after an incomplete send.
+ # A cleaner solution would be to set
+ # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is
+ # not yet accessible from python
+ # (http://bugs.python.org/issue8240)
+ self._freeze_write_buffer(size)
+
def _do_ssl_handshake(self):
# Based on code from test_ssl.py in the python stdlib
try:
@@ -1498,53 +1537,6 @@ class PipeIOStream(BaseIOStream):
return chunk
-def _double_prefix(deque):
- """Grow by doubling, but don't split the second chunk just because the
- first one is small.
- """
- new_len = max(len(deque[0]) * 2,
- (len(deque[0]) + len(deque[1])))
- _merge_prefix(deque, new_len)
-
-
-def _merge_prefix(deque, size):
- """Replace the first entries in a deque of strings with a single
- string of up to size bytes.
-
- >>> d = collections.deque(['abc', 'de', 'fghi', 'j'])
- >>> _merge_prefix(d, 5); print(d)
- deque(['abcde', 'fghi', 'j'])
-
- Strings will be split as necessary to reach the desired size.
- >>> _merge_prefix(d, 7); print(d)
- deque(['abcdefg', 'hi', 'j'])
-
- >>> _merge_prefix(d, 3); print(d)
- deque(['abc', 'defg', 'hi', 'j'])
-
- >>> _merge_prefix(d, 100); print(d)
- deque(['abcdefghij'])
- """
- if len(deque) == 1 and len(deque[0]) <= size:
- return
- prefix = []
- remaining = size
- while deque and remaining > 0:
- chunk = deque.popleft()
- if len(chunk) > remaining:
- deque.appendleft(chunk[remaining:])
- chunk = chunk[:remaining]
- prefix.append(chunk)
- remaining -= len(chunk)
- # This data structure normally just contains byte strings, but
- # the unittest gets messy if it doesn't use the default str() type,
- # so do the merge based on the type of data that's actually present.
- if prefix:
- deque.appendleft(type(prefix[0])().join(prefix))
- if not deque:
- deque.appendleft(b"")
-
-
def doctests():
import doctest
return doctest.DocTestSuite()
| Suboptimal performance sending large messages
I notice that when I send large messages over a TCP IOStream I get considerably less bandwidth than my network provides (about 10% of total). A bit of profiling shows that a large fraction of time is spent in `str.join` and `str.find`.
Below follows a tiny example to replicate this on localhost.
### Server
``` python
from tornado.tcpserver import TCPServer
from tornado.ioloop import IOLoop
from tornado import gen
import numpy as np
data = bytes(np.random.randint(0, 255, dtype='u1', size=100000000).data) # 100M
sentinel = b'--my-sentinel--'
class MyServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
yield stream.write(data + sentinel)
s = MyServer()
s.listen(8000)
IOLoop.current().start()
```
### Client
``` python
from tornado.tcpclient import TCPClient
from tornado.ioloop import IOLoop
from tornado import gen
sentinel = b'--my-sentinel--'
@gen.coroutine
def f():
client = TCPClient()
stream = yield client.connect('localhost', 8000,
max_buffer_size=int(1e9))
msg = yield stream.read_until(sentinel)
print(len(msg))
if __name__ == '__main__':
IOLoop().run_sync(f)
```
### Add a bit of network delay
This adds 10ms on to localhost, just to simulate being in a real network
```
# tc qdisc add dev lo root netem delay 10ms
```
### Remove network delay when done
```
# tc qdisc del dev lo root netem
```
### Profiler output
```
ncalls tottime percall cumtime percall filename:lineno(function)
92 0.430 0.005 0.430 0.005 {method 'find' of 'str' objects}
46 0.249 0.005 0.249 0.005 {method 'join' of 'str' objects}
25 0.216 0.009 0.216 0.009 {method 'poll' of 'select.epoll' objects}
1553 0.019 0.000 0.019 0.000 {method 'recv' of '_socket.socket' objects}
46 0.008 0.000 0.259 0.006 iostream.py:1501(_double_prefix)
1553 0.002 0.000 0.022 0.000 iostream.py:721(_read_to_buffer)
22 0.002 0.000 0.715 0.032 iostream.py:585(_read_to_buffer_loop)
47 0.001 0.000 0.251 0.005 iostream.py:1510(_merge_prefix)
1553 0.001 0.000 0.020 0.000 iostream.py:1010(read_from_fd)
1 0.001 0.001 0.934 0.934 ioloop.py:746(start)
48 0.001 0.000 0.690 0.014 iostream.py:772(_find_read_pos)
6549 0.000 0.000 0.000 0.000 {len}
Ordered by: cumulative time
ncalls tottime percall cumtime percall filename:lineno(function)
1 0.000 0.000 0.957 0.957 <string>:1(<module>)
1 0.000 0.000 0.957 0.957 ioloop.py:400(run_sync)
1 0.001 0.001 0.957 0.957 ioloop.py:746(start)
29 0.000 0.000 0.757 0.026 stack_context.py:271(null_wrapper)
23 0.000 0.000 0.756 0.033 iostream.py:497(_handle_events)
22 0.000 0.000 0.755 0.034 iostream.py:645(_handle_read)
23 0.002 0.000 0.755 0.033 iostream.py:585(_read_to_buffer_loop)
49 0.001 0.000 0.727 0.015 iostream.py:772(_find_read_pos)
94 0.454 0.005 0.454 0.005 {method 'find' of 'str' objects}
47 0.008 0.000 0.272 0.006 iostream.py:1501(_double_prefix)
48 0.002 0.000 0.264 0.005 iostream.py:1510(_merge_prefix)
47 0.261 0.006 0.261 0.006 {method 'join' of 'str' objects}
26 0.198 0.008 0.198 0.008 {method 'poll' of 'select.epoll' objects}
1553 0.002 0.000 0.026 0.000 iostream.py:721(_read_to_buffer)
1553 0.001 0.000 0.023 0.000 iostream.py:1010(read_from_fd)
1553 0.022 0.000 0.022 0.000 {method 'recv' of '_socket.socket' objects}
5 0.000 0.000 0.001 0.000 ioloop.py:594(_run_callback)
3 0.000 0.000 0.001 0.000 client.py:7(f)
```
| tornadoweb/tornado | diff --git a/tornado/test/iostream_test.py b/tornado/test/iostream_test.py
index 6e15136c..9680cb1c 100644
--- a/tornado/test/iostream_test.py
+++ b/tornado/test/iostream_test.py
@@ -602,6 +602,17 @@ class TestIOStreamMixin(object):
server.close()
client.close()
+ def test_write_memoryview(self):
+ server, client = self.make_iostream_pair()
+ try:
+ client.read_bytes(4, self.stop)
+ server.write(memoryview(b"hello"))
+ data = self.wait()
+ self.assertEqual(data, b"hell")
+ finally:
+ server.close()
+ client.close()
+
def test_read_bytes_partial(self):
server, client = self.make_iostream_pair()
try:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
futures==2.2.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@9bdc3176741114ffeb1048bded5d77d9e4337bbd#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- futures==2.2.0
- mock==5.2.0
- monotonic==1.6
- six==1.17.0
- trollius==2.1.post2
prefix: /opt/conda/envs/tornado
| [
"tornado/test/iostream_test.py::TestIOStream::test_write_memoryview",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_write_memoryview",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_write_memoryview"
]
| [
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_connection_closed",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_read_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_write_while_connecting",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_future_interface",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_future_close_while_reading",
"tornado/test/iostream_test.py::TestIOStreamWebMixin::test_future_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_streaming_callback_with_data_in_buffer",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_write_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_connection_refused",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_gaierror",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_callback_error",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_streaming_callback",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_streaming_until_close",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_streaming_until_close_future",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_future_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_close_buffered_data",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_close_with_error",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_streaming_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_large_read_until",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_close_callback_with_pending_read",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_inline_read_error",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_async_read_error_logging",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_future_close_callback",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_write_memoryview",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_bytes_partial",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_small_reads_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_small_read_untils_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamMixin::test_flow_control",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_max_bytes",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_regex_max_bytes",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_regex_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_regex_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_inline_read_error",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_regex_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_regex_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_regex_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_streaming_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_write_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_inline_read_error",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_regex_max_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_regex_max_bytes_ignores_extra",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_regex_max_bytes_inline",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_streaming_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_write_zero_bytes"
]
| [
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_connection_closed",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_future_close_while_reading",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_future_interface",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_future_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_read_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamWebHTTP::test_write_while_connecting",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_connection_closed",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_future_close_while_reading",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_future_interface",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_future_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_read_until_close",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_read_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamWebHTTPS::test_write_while_connecting",
"tornado/test/iostream_test.py::TestIOStream::test_async_read_error_logging",
"tornado/test/iostream_test.py::TestIOStream::test_close_buffered_data",
"tornado/test/iostream_test.py::TestIOStream::test_close_callback_with_pending_read",
"tornado/test/iostream_test.py::TestIOStream::test_connection_refused",
"tornado/test/iostream_test.py::TestIOStream::test_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStream::test_flow_control",
"tornado/test/iostream_test.py::TestIOStream::test_future_close_callback",
"tornado/test/iostream_test.py::TestIOStream::test_future_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStream::test_gaierror",
"tornado/test/iostream_test.py::TestIOStream::test_inline_read_error",
"tornado/test/iostream_test.py::TestIOStream::test_large_read_until",
"tornado/test/iostream_test.py::TestIOStream::test_read_bytes_partial",
"tornado/test/iostream_test.py::TestIOStream::test_read_callback_error",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStream::test_read_until_close_with_error",
"tornado/test/iostream_test.py::TestIOStream::test_small_read_untils_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStream::test_small_reads_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStream::test_streaming_callback",
"tornado/test/iostream_test.py::TestIOStream::test_streaming_callback_with_data_in_buffer",
"tornado/test/iostream_test.py::TestIOStream::test_streaming_read_until_close_after_close",
"tornado/test/iostream_test.py::TestIOStream::test_streaming_until_close",
"tornado/test/iostream_test.py::TestIOStream::test_streaming_until_close_future",
"tornado/test/iostream_test.py::TestIOStream::test_write_zero_bytes",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_async_read_error_logging",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_close_buffered_data",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_close_callback_with_pending_read",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_connection_refused",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_flow_control",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_future_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_future_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_gaierror",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_large_read_until",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_bytes_partial",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_callback_error",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_read_until_close_with_error",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_small_read_untils_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_small_reads_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_streaming_callback",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_streaming_callback_with_data_in_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_streaming_until_close",
"tornado/test/iostream_test.py::TestIOStreamSSL::test_streaming_until_close_future",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_async_read_error_logging",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_close_buffered_data",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_close_callback_with_pending_read",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_connection_refused",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_flow_control",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_future_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_future_delayed_close_callback",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_gaierror",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_large_read_until",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_bytes_partial",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_callback_error",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_read_until_close_with_error",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_small_read_untils_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_small_reads_from_large_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_streaming_callback",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_streaming_callback_with_data_in_buffer",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_streaming_until_close",
"tornado/test/iostream_test.py::TestIOStreamSSLContext::test_streaming_until_close_future",
"tornado/test/iostream_test.py::TestIOStreamStartTLS::test_check_hostname",
"tornado/test/iostream_test.py::TestIOStreamStartTLS::test_handshake_fail",
"tornado/test/iostream_test.py::TestIOStreamStartTLS::test_start_tls_smtp",
"tornado/test/iostream_test.py::WaitForHandshakeTest::test_wait_for_handshake_already_connected",
"tornado/test/iostream_test.py::WaitForHandshakeTest::test_wait_for_handshake_already_waiting_error",
"tornado/test/iostream_test.py::WaitForHandshakeTest::test_wait_for_handshake_callback",
"tornado/test/iostream_test.py::WaitForHandshakeTest::test_wait_for_handshake_future",
"tornado/test/iostream_test.py::TestPipeIOStream::test_pipe_iostream",
"tornado/test/iostream_test.py::TestPipeIOStream::test_pipe_iostream_big_write"
]
| []
| Apache License 2.0 | 843 | [
"tornado/iostream.py"
]
| [
"tornado/iostream.py"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.