instance_id
stringlengths 10
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2014-04-30 14:58:36
2025-04-30 20:14:11
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
273k
| patch
stringlengths 251
7.06M
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 231
997k
| meta
dict | version
stringclasses 851
values | install_config
dict | requirements
stringlengths 93
34.2k
⌀ | environment
stringlengths 760
20.5k
⌀ | FAIL_TO_PASS
listlengths 1
9.39k
| FAIL_TO_FAIL
listlengths 0
2.69k
| PASS_TO_PASS
listlengths 0
7.87k
| PASS_TO_FAIL
listlengths 0
192
| license_name
stringclasses 55
values | __index_level_0__
int64 0
21.4k
| before_filepaths
listlengths 1
105
| after_filepaths
listlengths 1
105
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
keredson__peewee-5 | 116c8e37da50b0ccfbf21310a61d978beb91df9c | 2016-07-15 21:57:39 | 86cb70e5cf9695add1e7cde1aa60d461290d71db | diff --git a/peewee.py b/peewee.py
index bdc1506..796265c 100644
--- a/peewee.py
+++ b/peewee.py
@@ -2251,6 +2251,8 @@ if _DictQueryResultWrapper is None:
class ModelQueryResultWrapper(QueryResultWrapper):
def initialize(self, description):
self.column_map, model_set = self.generate_column_map()
+ self._col_set = set(col for col in self.column_meta
+ if isinstance(col, Field))
self.join_list = self.generate_join_list(model_set)
def generate_column_map(self):
@@ -2293,7 +2295,19 @@ class ModelQueryResultWrapper(QueryResultWrapper):
for join in joins[current]:
metadata = join.metadata
if metadata.dest in models or metadata.dest_model in models:
- join_list.append(metadata)
+ if metadata.foreign_key is not None:
+ fk_present = metadata.foreign_key in self._col_set
+ pk_present = metadata.primary_key in self._col_set
+ check = metadata.foreign_key.null and (fk_present or
+ pk_present)
+ else:
+ check = fk_present = pk_present = False
+
+ join_list.append((
+ metadata,
+ check,
+ fk_present,
+ pk_present))
stack.append(join.dest)
return join_list
@@ -2324,13 +2338,23 @@ class ModelQueryResultWrapper(QueryResultWrapper):
def follow_joins(self, collected):
prepared = [collected[self.model]]
- for metadata in self.join_list:
+ for (metadata, check_null, fk_present, pk_present) in self.join_list:
inst = collected[metadata.src]
try:
joined_inst = collected[metadata.dest]
except KeyError:
joined_inst = collected[metadata.dest_model]
+ has_fk = True
+ if check_null:
+ if fk_present:
+ has_fk = inst._data.get(metadata.foreign_key.name)
+ elif pk_present:
+ has_fk = joined_inst._data.get(metadata.primary_key.name)
+
+ if not has_fk:
+ continue
+
# Can we populate a value on the joined instance using the current?
mpk = metadata.primary_key is not None
can_populate_joined_pk = (
@@ -2384,7 +2408,7 @@ class AggregateQueryResultWrapper(ModelQueryResultWrapper):
self.source_to_dest = {}
self.dest_to_source = {}
- for metadata in self.join_list:
+ for (metadata, _, _, _) in self.join_list:
if metadata.is_backref:
att_name = metadata.foreign_key.related_name
else:
| null FKs return non-null objects with all-null attributes
clone of https://github.com/coleifer/peewee/issues/1012:
when doing a left join across a nullable FK, peewee 2.8.1 populates an empty object where the null is, rather than putting a `None` in the attribute. example:
```python
import logging, os, sys
import peewee as pw
logger = logging.getLogger('peewee')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
db_fn = 'example.db'
if os.path.exists(db_fn): os.remove(db_fn)
db = pw.SqliteDatabase(db_fn)
class Person(pw.Model):
name = pw.CharField()
class Meta:
database = db
class Pet(pw.Model):
name = pw.CharField()
owner = pw.ForeignKeyField(db_column='owner_id', rel_model=Person, to_field='id', null=True)
class Meta:
database = db
db.connect()
db.create_tables([Person, Pet])
Pet.create(name='Chance', owner=None)
chance = Pet.select(Pet, Person).join(Person, join_type=pw.JOIN.LEFT_OUTER, on=Pet.owner).get()
print 'chance', chance
print 'chance.owner_id', chance.owner_id
print 'chance.owner', chance.owner
print 'chance.owner.id', chance.owner.id
```
output:
```
$ python test_join_null_fk.py
('CREATE TABLE "person" ("id" INTEGER NOT NULL PRIMARY KEY, "name" VARCHAR(255) NOT NULL)', [])
('CREATE TABLE "pet" ("id" INTEGER NOT NULL PRIMARY KEY, "name" VARCHAR(255) NOT NULL, "owner_id" INTEGER, FOREIGN KEY ("owner_id") REFERENCES "person" ("id"))', [])
('CREATE INDEX "pet_owner_id" ON "pet" ("owner_id")', [])
('INSERT INTO "pet" ("name", "owner_id") VALUES (?, ?)', [u'Chance', None])
('SELECT "t1"."id", "t1"."name", "t1"."owner_id", "t2"."id", "t2"."name" FROM "pet" AS t1 LEFT OUTER JOIN "person" AS t2 ON ("t1"."owner_id" = "t2"."id") LIMIT 1 OFFSET 0', [])
chance <__main__.Pet object at 0x7f9b3c249950>
chance.owner_id None
chance.owner <__main__.Person object at 0x7f9b3c249b90>
chance.owner.id None
```
is this the intended behavior? consensus here is this is a bug and that:
1. `chance.owner` should be `None`, not an empty `Person` object with all null attributes
2. `chance.owner.id` should throw `AttributeError: 'NoneType' object has no attribute 'id'`
we couldn't find any documentation about what this should do one way or the other, nor any old issues that address this.
| keredson/peewee | diff --git a/playhouse/tests/test_models.py b/playhouse/tests/test_models.py
index 90ec728..e23e99d 100644
--- a/playhouse/tests/test_models.py
+++ b/playhouse/tests/test_models.py
@@ -2058,3 +2058,76 @@ class TestDeleteNullableForeignKeys(ModelTestCase):
self.assertEqual(nf2.delete_instance(), 1)
self.assertEqual(nf3.delete_instance(), 1)
self.assertEqual(nf4.delete_instance(), 1)
+
+
+class TestJoinNullableForeignKey(ModelTestCase):
+ requires = [Parent, Orphan, Child]
+
+ def setUp(self):
+ super(TestJoinNullableForeignKey, self).setUp()
+
+ p1 = Parent.create(data='p1')
+ p2 = Parent.create(data='p2')
+ for i in range(1, 3):
+ Child.create(parent=p1, data='child%s-p1' % i)
+ Child.create(parent=p2, data='child%s-p2' % i)
+ Orphan.create(parent=p1, data='orphan%s-p1' % i)
+
+ Orphan.create(data='orphan1-noparent')
+ Orphan.create(data='orphan2-noparent')
+
+ def test_no_empty_instances(self):
+ with self.assertQueryCount(1):
+ query = (Orphan
+ .select(Orphan, Parent)
+ .join(Parent, JOIN.LEFT_OUTER)
+ .order_by(Orphan.id))
+ res = [(orphan.data, orphan.parent is None) for orphan in query]
+
+ self.assertEqual(res, [
+ ('orphan1-p1', False),
+ ('orphan2-p1', False),
+ ('orphan1-noparent', True),
+ ('orphan2-noparent', True),
+ ])
+
+ def test_unselected_fk_pk(self):
+ with self.assertQueryCount(1):
+ query = (Orphan
+ .select(Orphan.data, Parent.data)
+ .join(Parent, JOIN.LEFT_OUTER)
+ .order_by(Orphan.id))
+ res = [(orphan.data, orphan.parent is None) for orphan in query]
+
+ self.assertEqual(res, [
+ ('orphan1-p1', False),
+ ('orphan2-p1', False),
+ ('orphan1-noparent', False),
+ ('orphan2-noparent', False),
+ ])
+
+ def test_non_null_fk_unselected_fk(self):
+ with self.assertQueryCount(1):
+ query = (Child
+ .select(Child.data, Parent.data)
+ .join(Parent, JOIN.LEFT_OUTER)
+ .order_by(Child.id))
+ res = [(child.data, child.parent is None) for child in query]
+
+ self.assertEqual(res, [
+ ('child1-p1', False),
+ ('child1-p2', False),
+ ('child2-p1', False),
+ ('child2-p2', False),
+ ])
+
+ res = [child.parent.data for child in query]
+ self.assertEqual(res, ['p1', 'p2', 'p1', 'p2'])
+
+ res = [(child._data['parent'], child.parent.id) for child in query]
+ self.assertEqual(res, [
+ (None, None),
+ (None, None),
+ (None, None),
+ (None, None),
+ ])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 2.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/keredson/peewee.git@116c8e37da50b0ccfbf21310a61d978beb91df9c#egg=peewee
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: peewee
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/peewee
| [
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_no_empty_instances"
]
| [
"playhouse/tests/test_models.py::TestModelAPIs::test_create_or_get",
"playhouse/tests/test_models.py::TestModelAPIs::test_first",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_cache_invalidated",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_caching",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_fk"
]
| [
"playhouse/tests/test_models.py::TestQueryingModels::test_delete",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_from",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_many",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_many_fallback",
"playhouse/tests/test_models.py::TestQueryingModels::test_limits_offsets",
"playhouse/tests/test_models.py::TestQueryingModels::test_model_iter",
"playhouse/tests/test_models.py::TestQueryingModels::test_raw",
"playhouse/tests/test_models.py::TestQueryingModels::test_raw_fn",
"playhouse/tests/test_models.py::TestQueryingModels::test_scalar",
"playhouse/tests/test_models.py::TestQueryingModels::test_select",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_all",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_get",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_subquery",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_with_bind_to",
"playhouse/tests/test_models.py::TestQueryingModels::test_update",
"playhouse/tests/test_models.py::TestQueryingModels::test_update_subquery",
"playhouse/tests/test_models.py::TestInsertEmptyModel::test_insert_empty",
"playhouse/tests/test_models.py::TestInsertEmptyModel::test_no_pk",
"playhouse/tests/test_models.py::TestModelAPIs::test_callable_related_name",
"playhouse/tests/test_models.py::TestModelAPIs::test_category_select_related_alias",
"playhouse/tests/test_models.py::TestModelAPIs::test_count_transaction",
"playhouse/tests/test_models.py::TestModelAPIs::test_counting",
"playhouse/tests/test_models.py::TestModelAPIs::test_creation",
"playhouse/tests/test_models.py::TestModelAPIs::test_deleting",
"playhouse/tests/test_models.py::TestModelAPIs::test_dirty_from_query",
"playhouse/tests/test_models.py::TestModelAPIs::test_exists",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_exceptions",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_ints",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_object_id",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_exception",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_or_create",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_or_create_extended",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_get_field_index",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_rel_for_model",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_remove_field",
"playhouse/tests/test_models.py::TestModelAPIs::test_modify_model_cause_it_dirty",
"playhouse/tests/test_models.py::TestModelAPIs::test_on_conflict",
"playhouse/tests/test_models.py::TestModelAPIs::test_on_conflict_many",
"playhouse/tests/test_models.py::TestModelAPIs::test_ordering",
"playhouse/tests/test_models.py::TestModelAPIs::test_peek",
"playhouse/tests/test_models.py::TestModelAPIs::test_reading",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_id",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_name",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_name_collision",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_dirty_auto",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_only",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_only_dirty_fields",
"playhouse/tests/test_models.py::TestModelAPIs::test_saving",
"playhouse/tests/test_models.py::TestModelAPIs::test_saving_via_create_gh111",
"playhouse/tests/test_models.py::TestModelAPIs::test_unicode",
"playhouse/tests/test_models.py::TestModelAPIs::test_unicode_issue202",
"playhouse/tests/test_models.py::TestModelAPIs::test_zero_id",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_aggregate_datetime",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_aggregate_int",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_annotate_datetime",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_annotate_int",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_from_multi_table",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_join_on_query",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect_with_column",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect_with_join",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete_child_queries",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete_parent_sql",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_non_pk_fk",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_update",
"playhouse/tests/test_models.py::TestTruncate::test_truncate",
"playhouse/tests/test_models.py::TestManyToMany::test_m2m",
"playhouse/tests/test_models.py::TestManyToMany::test_many_to_many_prefetch",
"playhouse/tests/test_models.py::TestCustomModelOptionsBase::test_custom_model_options_base",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_custom_options",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_db_table",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_option_inheritance",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_order_by_inheritance",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_table_name_function",
"playhouse/tests/test_models.py::TestModelInheritance::test_model_inheritance_attrs",
"playhouse/tests/test_models.py::TestModelInheritance::test_model_inheritance_flow",
"playhouse/tests/test_models.py::TestAliasBehavior::test_alias_with_coerce",
"playhouse/tests/test_models.py::TestModelHash::test_hash",
"playhouse/tests/test_models.py::TestDeleteNullableForeignKeys::test_delete",
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_non_null_fk_unselected_fk",
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_unselected_fk_pk"
]
| []
| MIT License | 636 | [
"peewee.py"
]
| [
"peewee.py"
]
|
|
cwacek__python-jsonschema-objects-46 | 35cdc2e1e6e1e3978fe2474243f67e4142c93c3d | 2016-07-16 17:35:57 | 03be1567ef25edc27fe36675444bd14da93b0f15 | diff --git a/python_jsonschema_objects/classbuilder.py b/python_jsonschema_objects/classbuilder.py
index 740f2a6..c01fdcc 100644
--- a/python_jsonschema_objects/classbuilder.py
+++ b/python_jsonschema_objects/classbuilder.py
@@ -355,6 +355,9 @@ class LiteralValue(object):
str(self._value)
)
+ def __str__(self):
+ return str(self._value)
+
def validate(self):
info = self.propinfo('__literal__')
@@ -364,16 +367,14 @@ class LiteralValue(object):
if validator is not None:
validator(paramval, self._value, info)
+ def __eq__(self, other):
+ return self._value == other
+
+ def __hash__(self):
+ return hash(self._value)
- def __cmp__(self, other):
- if isinstance(other, six.integer_types):
- return cmp(int(self), other)
- elif isinstance(other, six.string_types):
- return cmp(str(self), other)
- elif isinstance(other, float):
- return cmp(float(self), other)
- else:
- return cmp(id(self), id(other))
+ def __lt__(self, other):
+ return self._value < other
def __int__(self):
return int(self._value)
diff --git a/tox.ini b/tox.ini
index ae66538..be7e5cb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
-envlist = py27, py34
+envlist = py27, py35
[testenv]
;install_command = pip install {opts} {packages}
| Creation of array anonymous objects fails
It seems [classbuilder.py:485](https://github.com/cwacek/python-jsonschema-objects/tree/2a6f5af53e69a99a01d867285dd2f138a26549bd/python_jsonschema_objects/classbuilder.py#L485) is passing the whole array `detail['items']` to `construct` when it should be passing `elem` from the enumeration. | cwacek/python-jsonschema-objects | diff --git a/test/test_regression_8.py b/test/test_regression_8.py
new file mode 100644
index 0000000..42147c3
--- /dev/null
+++ b/test/test_regression_8.py
@@ -0,0 +1,51 @@
+import pytest
+
+import python_jsonschema_objects as pjo
+
+
[email protected]
+def test_instance():
+ schema = {
+ 'title': 'Example',
+ 'properties': {
+ 'stringProp': {'type': 'string'},
+ 'arrayProp': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'string',
+ }
+ }
+ }
+ }
+
+ builder = pjo.ObjectBuilder(schema)
+ ns = builder.build_classes()
+ instance = ns.Example(
+ stringProp='This seems fine',
+ arrayProp=['these', 'are', 'problematic']
+ )
+ return instance
+
+
+def test_string_properties_compare_to_strings(test_instance):
+ test = test_instance.stringProp == "This seems fine"
+ assert test
+
+
+def test_arrays_of_strings_compare_to_strings(test_instance):
+ test = test_instance.arrayProp == ['these', 'are', 'problematic']
+ assert test
+
+
+def test_array_elements_compare_to_types(test_instance):
+ elem = test_instance.arrayProp[0]
+ test = elem == 'these'
+ assert test
+
+def test_repr_shows_property_values(test_instance):
+ expected = "<example/arrayProp_<anonymous_field> these>"
+ assert repr(test_instance.arrayProp[0]) == expected
+
+def test_str_shows_just_strings(test_instance):
+ test = str(test_instance.arrayProp[0])
+ assert test == 'these'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
execnet==1.9.0
importlib-metadata==4.8.3
inflection==0.2.0
iniconfig==1.1.1
jsonschema==2.3.0
Markdown==2.4
packaging==21.3
pandocfilters==1.2
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
-e git+https://github.com/cwacek/python-jsonschema-objects.git@35cdc2e1e6e1e3978fe2474243f67e4142c93c3d#egg=python_jsonschema_objects
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-jsonschema-objects
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- inflection==0.2.0
- iniconfig==1.1.1
- jsonschema==2.3.0
- markdown==2.4
- packaging==21.3
- pandocfilters==1.2
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-jsonschema-objects
| [
"test/test_regression_8.py::test_arrays_of_strings_compare_to_strings",
"test/test_regression_8.py::test_array_elements_compare_to_types"
]
| []
| [
"test/test_regression_8.py::test_string_properties_compare_to_strings",
"test/test_regression_8.py::test_repr_shows_property_values",
"test/test_regression_8.py::test_str_shows_just_strings"
]
| []
| MIT License | 637 | [
"tox.ini",
"python_jsonschema_objects/classbuilder.py"
]
| [
"tox.ini",
"python_jsonschema_objects/classbuilder.py"
]
|
|
cwacek__python-jsonschema-objects-47 | ad1ce37fe82cdfa75a8193566c7c7cdcae07b09c | 2016-07-16 18:15:46 | 03be1567ef25edc27fe36675444bd14da93b0f15 | diff --git a/python_jsonschema_objects/classbuilder.py b/python_jsonschema_objects/classbuilder.py
index c01fdcc..3a5ca41 100644
--- a/python_jsonschema_objects/classbuilder.py
+++ b/python_jsonschema_objects/classbuilder.py
@@ -145,7 +145,7 @@ class ProtocolBase(collections.MutableMapping):
else: # We got nothing
raise validators.ValidationError(
"Unable to instantiate any valid types: \n"
- "\n".join("{0}: {1}".format(k, e) for k, e in validation_errors)
+ "".join("{0}: {1}\n".format(k, e) for k, e in validation_errors)
)
return obj
@@ -314,7 +314,7 @@ class TypeProxy(object):
else: # We got nothing
raise validators.ValidationError(
"Unable to instantiate any valid types: \n"
- "\n".join("{0}: {1}".format(k, e) for k, e in validation_errors)
+ "".join("{0}: {1}\n".format(k, e) for k, e in validation_errors)
)
@@ -474,6 +474,18 @@ class ClassBuilder(object):
**clsdata_copy)
return self.resolved[uri]
+ elif isinstance(clsdata.get('type'), list):
+ types = []
+ for i, item_detail in enumerate(clsdata['type']):
+ subdata = {k: v for k, v in six.iteritems(clsdata) if k != 'type'}
+ subdata['type'] = item_detail
+ types.append(self._build_literal(
+ uri + "_%s" % i,
+ subdata))
+
+ self.resolved[uri] = TypeProxy(types)
+ return self.resolved[uri]
+
elif (clsdata.get('type', None) == 'object' or
clsdata.get('properties', None) is not None or
clsdata.get('additionalProperties', False)):
@@ -770,6 +782,10 @@ def make_property(prop, info, desc=""):
val = info['type'](**util.coerce_for_expansion(val))
val.validate()
+
+ elif isinstance(info['type'], TypeProxy):
+ val = info['type'](val)
+
elif info['type'] is None:
# This is the null value
if val is not None:
| Properties with multiple types are not parsed correctly
http://json-schema.org/latest/json-schema-validation.html#anchor79
Example property
"claimed_by": {
"id": "claimed",
"type": ["string", "null"],
"description": "Robots Only. The human agent that has claimed this robot.",
"required": false
},
Traceback (most recent call last):
File "/home/idanforth/fetch/src/sandbox/fetchcore/test/unit/test_scheduler.py", line 58, in setUp
agent_ns = agent_builder.build_classes()
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/__init__.py", line 83, in build_classes
builder.construct(nm, self.schema)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 288, in construct
ret = self._construct(uri, *args, **kw)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 362, in _construct
parent)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 498, in _build_object
typ = self.construct(uri, detail)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 288, in construct
ret = self._construct(uri, *args, **kw)
File "/usr/local/lib/python2.7/dist-packages/python_jsonschema_objects/classbuilder.py", line 380, in _construct
"no type and no reference".format(clsdata))
NotImplementedError: Unable to parse schema object '{'raw_name': u'claimed_by', u'required': False, u'type': [u'string'
, u'null'], u'id': u'claimed', u'description': u'Robots Only. The human agent that has claimed this robot.'}' with no t
ype and no reference
| cwacek/python-jsonschema-objects | diff --git a/test/test_regression_17.py b/test/test_regression_17.py
new file mode 100644
index 0000000..c13e942
--- /dev/null
+++ b/test/test_regression_17.py
@@ -0,0 +1,33 @@
+import pytest
+
+import python_jsonschema_objects as pjo
+
+
[email protected]
+def test_class():
+ schema = {
+ 'title': 'Example',
+ 'properties': {
+ "claimed_by": {
+ "id": "claimed",
+ "type": ["string", "integer", "null"],
+ "description": "Robots Only. The human agent that has claimed this robot.",
+ "required": False
+ },
+ }
+ }
+
+ builder = pjo.ObjectBuilder(schema)
+ ns = builder.build_classes()
+ return ns
+
+
[email protected]('value', [
+ "Hi", 4, None])
+def test_properties_can_have_multiple_types(test_class, value):
+ test_class.Example(claimed_by=value)
+
[email protected]('value', [2.4])
+def test_multiply_typed_properties_still_validate(test_class, value):
+ with pytest.raises(pjo.ValidationError):
+ test_class.Example(claimed_by=value)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
execnet==1.9.0
importlib-metadata==4.8.3
inflection==0.2.0
iniconfig==1.1.1
jsonschema==2.3.0
Markdown==2.4
packaging==21.3
pandocfilters==1.2
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
-e git+https://github.com/cwacek/python-jsonschema-objects.git@ad1ce37fe82cdfa75a8193566c7c7cdcae07b09c#egg=python_jsonschema_objects
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-jsonschema-objects
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- inflection==0.2.0
- iniconfig==1.1.1
- jsonschema==2.3.0
- markdown==2.4
- packaging==21.3
- pandocfilters==1.2
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-jsonschema-objects
| [
"test/test_regression_17.py::test_properties_can_have_multiple_types[Hi]",
"test/test_regression_17.py::test_properties_can_have_multiple_types[4]",
"test/test_regression_17.py::test_properties_can_have_multiple_types[None]",
"test/test_regression_17.py::test_multiply_typed_properties_still_validate[2.4]"
]
| []
| []
| []
| MIT License | 638 | [
"python_jsonschema_objects/classbuilder.py"
]
| [
"python_jsonschema_objects/classbuilder.py"
]
|
|
dpkp__kafka-python-762 | 3666b66a21776d620f68d2f7ff2fed1bc18b94e5 | 2016-07-16 20:07:03 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/kafka/client.py b/kafka/client.py
index 891ae03..8a34cc4 100644
--- a/kafka/client.py
+++ b/kafka/client.py
@@ -137,7 +137,7 @@ class SimpleClient(object):
kafka.errors.check_error(resp)
# Otherwise return the BrokerMetadata
- return BrokerMetadata(resp.nodeId, resp.host, resp.port)
+ return BrokerMetadata(resp.nodeId, resp.host, resp.port, None)
def _next_id(self):
"""Generate a new correlation id"""
@@ -525,7 +525,7 @@ class SimpleClient(object):
log.debug('Updating broker metadata: %s', resp.brokers)
log.debug('Updating topic metadata: %s', [topic for _, topic, _ in resp.topics])
- self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port))
+ self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port, None))
for nodeId, host, port in resp.brokers])
for error, topic, partitions in resp.topics:
@@ -577,7 +577,7 @@ class SimpleClient(object):
# (not sure how this could happen. server could be in bad state)
else:
self.topics_to_brokers[topic_part] = BrokerMetadata(
- leader, None, None
+ leader, None, None, None
)
def send_metadata_request(self, payloads=[], fail_on_error=True,
diff --git a/kafka/client_async.py b/kafka/client_async.py
index 6fa9434..e064d51 100644
--- a/kafka/client_async.py
+++ b/kafka/client_async.py
@@ -178,7 +178,11 @@ class KafkaClient(object):
time.sleep(next_at - now)
self._last_bootstrap = time.time()
- metadata_request = MetadataRequest[0]([])
+ if self.config['api_version'] is None or self.config['api_version'] < (0, 10):
+ metadata_request = MetadataRequest[0]([])
+ else:
+ metadata_request = MetadataRequest[1](None)
+
for host, port, afi in hosts:
log.debug("Attempting to bootstrap via node at %s:%s", host, port)
cb = functools.partial(self._conn_state_change, 'bootstrap')
@@ -643,10 +647,17 @@ class KafkaClient(object):
topics = list(self._topics)
if self.cluster.need_all_topic_metadata:
- topics = []
+ if self.config['api_version'] < (0, 10):
+ topics = []
+ else:
+ topics = None
if self._can_send_request(node_id):
- request = MetadataRequest[0](topics)
+ if self.config['api_version'] < (0, 10):
+ api_version = 0
+ else:
+ api_version = 1
+ request = MetadataRequest[api_version](topics)
log.debug("Sending metadata request %s to node %s", request, node_id)
future = self.send(node_id, request)
future.add_callback(self.cluster.update_metadata)
diff --git a/kafka/cluster.py b/kafka/cluster.py
index 9aabec1..694e115 100644
--- a/kafka/cluster.py
+++ b/kafka/cluster.py
@@ -34,6 +34,8 @@ class ClusterMetadata(object):
self._lock = threading.Lock()
self.need_all_topic_metadata = False
self.unauthorized_topics = set()
+ self.internal_topics = set()
+ self.controller = None
self.config = copy.copy(self.DEFAULT_CONFIG)
for key in self.config:
@@ -150,13 +152,23 @@ class ClusterMetadata(object):
self._future = Future()
return self._future
- def topics(self):
+ def topics(self, exclude_internal_topics=True):
"""Get set of known topics.
+ Arguments:
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to
+ True the only way to receive records from an internal topic is
+ subscribing to it. Default True
+
Returns:
set: {topic (str), ...}
"""
- return set(self._partitions.keys())
+ topics = set(self._partitions.keys())
+ if exclude_internal_topics:
+ return topics - self.internal_topics
+ else:
+ return topics
def failed_update(self, exception):
"""Update cluster state given a failed MetadataRequest."""
@@ -180,23 +192,41 @@ class ClusterMetadata(object):
# In the common case where we ask for a single topic and get back an
# error, we should fail the future
if len(metadata.topics) == 1 and metadata.topics[0][0] != 0:
- error_code, topic, _ = metadata.topics[0]
+ error_code, topic = metadata.topics[0][:2]
error = Errors.for_code(error_code)(topic)
return self.failed_update(error)
if not metadata.brokers:
log.warning("No broker metadata found in MetadataResponse")
- for node_id, host, port in metadata.brokers:
+ for broker in metadata.brokers:
+ if metadata.API_VERSION == 0:
+ node_id, host, port = broker
+ rack = None
+ else:
+ node_id, host, port, rack = broker
self._brokers.update({
- node_id: BrokerMetadata(node_id, host, port)
+ node_id: BrokerMetadata(node_id, host, port, rack)
})
+ if metadata.API_VERSION == 0:
+ self.controller = None
+ else:
+ self.controller = self._brokers.get(metadata.controller_id)
+
_new_partitions = {}
_new_broker_partitions = collections.defaultdict(set)
_new_unauthorized_topics = set()
+ _new_internal_topics = set()
- for error_code, topic, partitions in metadata.topics:
+ for topic_data in metadata.topics:
+ if metadata.API_VERSION == 0:
+ error_code, topic, partitions = topic_data
+ is_internal = False
+ else:
+ error_code, topic, is_internal, partitions = topic_data
+ if is_internal:
+ _new_internal_topics.add(topic)
error_type = Errors.for_code(error_code)
if error_type is Errors.NoError:
_new_partitions[topic] = {}
@@ -226,6 +256,7 @@ class ClusterMetadata(object):
self._partitions = _new_partitions
self._broker_partitions = _new_broker_partitions
self.unauthorized_topics = _new_unauthorized_topics
+ self.internal_topics = _new_internal_topics
f = None
if self._future:
f = self._future
@@ -272,7 +303,8 @@ class ClusterMetadata(object):
coordinator = BrokerMetadata(
response.coordinator_id,
response.host,
- response.port)
+ response.port,
+ None)
# Assume that group coordinators are just brokers
# (this is true now, but could diverge in future)
@@ -281,12 +313,14 @@ class ClusterMetadata(object):
# If this happens, either brokers have moved without
# changing IDs, or our assumption above is wrong
- elif coordinator != self._brokers[node_id]:
- log.error("GroupCoordinator metadata conflicts with existing"
- " broker metadata. Coordinator: %s, Broker: %s",
- coordinator, self._brokers[node_id])
- self._groups[group] = node_id
- return False
+ else:
+ node = self._brokers[node_id]
+ if coordinator.host != node.host or coordinator.port != node.port:
+ log.error("GroupCoordinator metadata conflicts with existing"
+ " broker metadata. Coordinator: %s, Broker: %s",
+ coordinator, node)
+ self._groups[group] = node_id
+ return False
log.info("Group coordinator for %s is %s", group, coordinator)
self._groups[group] = node_id
diff --git a/kafka/conn.py b/kafka/conn.py
index 38829c6..6028867 100644
--- a/kafka/conn.py
+++ b/kafka/conn.py
@@ -547,6 +547,7 @@ class BrokerConnection(object):
Returns: version tuple, i.e. (0, 10), (0, 9), (0, 8, 2), ...
"""
+
# Monkeypatch the connection request timeout
# Generally this timeout should not get triggered
# but in case it does, we want it to be reasonably short
@@ -574,11 +575,11 @@ class BrokerConnection(object):
log.addFilter(log_filter)
test_cases = [
- ((0, 10), ApiVersionRequest[0]()),
- ((0, 9), ListGroupsRequest[0]()),
- ((0, 8, 2), GroupCoordinatorRequest[0]('kafka-python-default-group')),
- ((0, 8, 1), OffsetFetchRequest[0]('kafka-python-default-group', [])),
- ((0, 8, 0), MetadataRequest[0]([])),
+ ('0.10', ApiVersionRequest[0]()),
+ ('0.9', ListGroupsRequest[0]()),
+ ('0.8.2', GroupCoordinatorRequest[0]('kafka-python-default-group')),
+ ('0.8.1', OffsetFetchRequest[0]('kafka-python-default-group', [])),
+ ('0.8.0', MetadataRequest[0]([])),
]
def connect():
@@ -614,9 +615,9 @@ class BrokerConnection(object):
self._sock.setblocking(False)
if f.succeeded():
- log.info('Broker version identifed as %s', '.'.join(map(str, version)))
- log.info('Set configuration api_version=%s to skip auto'
- ' check_version requests on startup', version)
+ log.info('Broker version identifed as %s', version)
+ log.info("Set configuration api_version='%s' to skip auto"
+ " check_version requests on startup", version)
break
# Only enable strict checking to verify that we understand failure
@@ -633,7 +634,7 @@ class BrokerConnection(object):
# requests (bug...). In this case we expect to see a correlation
# id mismatch
elif (isinstance(f.exception, Errors.CorrelationIdError) and
- version == (0, 10)):
+ version == '0.10'):
pass
elif six.PY2:
assert isinstance(f.exception.args[0], socket.error)
@@ -647,7 +648,7 @@ class BrokerConnection(object):
log.removeFilter(log_filter)
self.config['request_timeout_ms'] = stashed_request_timeout_ms
- return version
+ return tuple(map(int, version.split('.')))
def __repr__(self):
return "<BrokerConnection host=%s/%s port=%d>" % (self.hostname, self.host,
diff --git a/kafka/protocol/metadata.py b/kafka/protocol/metadata.py
index 8063dda..2711abb 100644
--- a/kafka/protocol/metadata.py
+++ b/kafka/protocol/metadata.py
@@ -1,5 +1,5 @@
from .struct import Struct
-from .types import Array, Int16, Int32, Schema, String
+from .types import Array, Boolean, Int16, Int32, Schema, String
class MetadataResponse_v0(Struct):
@@ -22,14 +22,46 @@ class MetadataResponse_v0(Struct):
)
+class MetadataResponse_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ SCHEMA = Schema(
+ ('brokers', Array(
+ ('node_id', Int32),
+ ('host', String('utf-8')),
+ ('port', Int32),
+ ('rack', String('utf-8')))),
+ ('controller_id', Int32),
+ ('topics', Array(
+ ('error_code', Int16),
+ ('topic', String('utf-8')),
+ ('is_internal', Boolean),
+ ('partitions', Array(
+ ('error_code', Int16),
+ ('partition', Int32),
+ ('leader', Int32),
+ ('replicas', Array(Int32)),
+ ('isr', Array(Int32))))))
+ )
+
+
class MetadataRequest_v0(Struct):
API_KEY = 3
API_VERSION = 0
RESPONSE_TYPE = MetadataResponse_v0
SCHEMA = Schema(
- ('topics', Array(String('utf-8')))
+ ('topics', Array(String('utf-8'))) # Empty Array (len 0) for all topics
+ )
+
+
+class MetadataRequest_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ RESPONSE_TYPE = MetadataResponse_v1
+ SCHEMA = Schema(
+ ('topics', Array(String('utf-8'))) # Null Array (len -1) for all topics
)
-MetadataRequest = [MetadataRequest_v0]
-MetadataResponse = [MetadataResponse_v0]
+MetadataRequest = [MetadataRequest_v0, MetadataRequest_v1]
+MetadataResponse = [MetadataResponse_v0, MetadataResponse_v1]
diff --git a/kafka/protocol/types.py b/kafka/protocol/types.py
index 18aaca1..da10326 100644
--- a/kafka/protocol/types.py
+++ b/kafka/protocol/types.py
@@ -99,6 +99,16 @@ class Bytes(AbstractType):
return value
+class Boolean(AbstractType):
+ @classmethod
+ def encode(cls, value):
+ return _pack('>?', value)
+
+ @classmethod
+ def decode(cls, data):
+ return _unpack('>?', data.read(1))
+
+
class Schema(AbstractType):
def __init__(self, *fields):
if fields:
@@ -145,6 +155,8 @@ class Array(AbstractType):
raise ValueError('Array instantiated with no array_of type')
def encode(self, items):
+ if items is None:
+ return Int32.encode(-1)
return b''.join(
[Int32.encode(len(items))] +
[self.array_of.encode(item) for item in items]
@@ -152,7 +164,11 @@ class Array(AbstractType):
def decode(self, data):
length = Int32.decode(data)
+ if length == -1:
+ return None
return [self.array_of.decode(data) for _ in range(length)]
def repr(self, list_of_items):
+ if list_of_items is None:
+ return 'NULL'
return '[' + ', '.join([self.array_of.repr(item) for item in list_of_items]) + ']'
diff --git a/kafka/structs.py b/kafka/structs.py
index 5902930..3188516 100644
--- a/kafka/structs.py
+++ b/kafka/structs.py
@@ -58,7 +58,7 @@ TopicPartition = namedtuple("TopicPartition",
["topic", "partition"])
BrokerMetadata = namedtuple("BrokerMetadata",
- ["nodeId", "host", "port"])
+ ["nodeId", "host", "port", "rack"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
| KAFKA-3306: MetadataRequest v1
Related to KIP-4 | dpkp/kafka-python | diff --git a/test/test_client.py b/test/test_client.py
index 660af61..79ac8be 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -1,6 +1,7 @@
import socket
from mock import ANY, MagicMock, patch
+from operator import itemgetter
import six
from . import unittest
@@ -117,9 +118,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_1', [
@@ -137,7 +139,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 2, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# client loads metadata at init
client = SimpleClient(hosts=['broker_1:4567'])
@@ -167,9 +169,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -179,7 +182,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -197,9 +200,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -209,7 +213,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -230,14 +234,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', [])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -249,7 +254,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 0, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# calling _get_leader_for_partition (from any broker aware request)
# will try loading metadata again for the same topic
@@ -267,15 +272,16 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', []),
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_unknown', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -294,9 +300,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -304,7 +311,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
self.assertDictEqual(
@@ -330,7 +337,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 1, 1, [1, 0], [1, 0])
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
self.assertEqual(brokers[0], client._get_leader_for_partition('topic_noleader', 0))
self.assertEqual(brokers[1], client._get_leader_for_partition('topic_noleader', 1))
@@ -340,9 +347,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -350,7 +358,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -368,14 +376,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_doesnt_exist', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
diff --git a/test/test_client_async.py b/test/test_client_async.py
index dfe11ea..aa91704 100644
--- a/test/test_client_async.py
+++ b/test/test_client_async.py
@@ -53,8 +53,8 @@ def test_bootstrap_success(conn):
conn.connect.assert_called_with()
conn.send.assert_called_once_with(MetadataRequest[0]([]))
assert cli._bootstrap_fails == 0
- assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12),
- BrokerMetadata(1, 'bar', 34)])
+ assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12, None),
+ BrokerMetadata(1, 'bar', 34, None)])
def test_bootstrap_failure(conn):
conn.state = ConnectionStates.DISCONNECTED
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 7
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/dpkp/kafka-python.git@3666b66a21776d620f68d2f7ff2fed1bc18b94e5#egg=kafka_python
lz4tools==1.3.1.2
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
tomli==1.2.3
typing_extensions==4.1.1
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- cramjam==2.5.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- lz4tools==1.3.1.2
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_client.py::TestSimpleClient::test_ensure_topic_exists",
"test/test_client.py::TestSimpleClient::test_get_leader_exceptions_when_noleader",
"test/test_client.py::TestSimpleClient::test_get_leader_for_partitions_reloads_metadata",
"test/test_client.py::TestSimpleClient::test_get_leader_for_unassigned_partitions",
"test/test_client.py::TestSimpleClient::test_has_metadata_for_topic",
"test/test_client.py::TestSimpleClient::test_load_metadata",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_noleader",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_topic_unknown",
"test/test_client_async.py::test_bootstrap_success"
]
| []
| [
"test/test_client.py::TestSimpleClient::test_correlation_rollover",
"test/test_client.py::TestSimpleClient::test_init_with_csv",
"test/test_client.py::TestSimpleClient::test_init_with_list",
"test/test_client.py::TestSimpleClient::test_init_with_unicode_csv",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request_fail",
"test/test_client_async.py::test_bootstrap_servers[None-expected_hosts0]",
"test/test_client_async.py::test_bootstrap_servers[foobar:1234-expected_hosts1]",
"test/test_client_async.py::test_bootstrap_servers[fizzbuzz-expected_hosts2]",
"test/test_client_async.py::test_bootstrap_servers[foo:12,bar:34-expected_hosts3]",
"test/test_client_async.py::test_bootstrap_servers[bootstrap4-expected_hosts4]",
"test/test_client_async.py::test_bootstrap_failure",
"test/test_client_async.py::test_can_connect",
"test/test_client_async.py::test_maybe_connect",
"test/test_client_async.py::test_conn_state_change",
"test/test_client_async.py::test_ready",
"test/test_client_async.py::test_is_ready",
"test/test_client_async.py::test_close",
"test/test_client_async.py::test_is_disconnected",
"test/test_client_async.py::test_send",
"test/test_client_async.py::test_poll",
"test/test_client_async.py::test__poll",
"test/test_client_async.py::test_in_flight_request_count",
"test/test_client_async.py::test_least_loaded_node",
"test/test_client_async.py::test_set_topics",
"test/test_client_async.py::test_maybe_refresh_metadata_ttl",
"test/test_client_async.py::test_maybe_refresh_metadata_backoff",
"test/test_client_async.py::test_maybe_refresh_metadata_in_progress",
"test/test_client_async.py::test_maybe_refresh_metadata_update",
"test/test_client_async.py::test_maybe_refresh_metadata_failure",
"test/test_client_async.py::test_schedule",
"test/test_client_async.py::test_unschedule"
]
| []
| Apache License 2.0 | 639 | [
"kafka/cluster.py",
"kafka/client_async.py",
"kafka/protocol/types.py",
"kafka/client.py",
"kafka/structs.py",
"kafka/protocol/metadata.py",
"kafka/conn.py"
]
| [
"kafka/cluster.py",
"kafka/client_async.py",
"kafka/protocol/types.py",
"kafka/client.py",
"kafka/structs.py",
"kafka/protocol/metadata.py",
"kafka/conn.py"
]
|
|
dpkp__kafka-python-763 | 3666b66a21776d620f68d2f7ff2fed1bc18b94e5 | 2016-07-17 03:45:22 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/benchmarks/consumer_performance.py b/benchmarks/consumer_performance.py
new file mode 100755
index 0000000..3e879ae
--- /dev/null
+++ b/benchmarks/consumer_performance.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+# Adapted from https://github.com/mrafayaleem/kafka-jython
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import logging
+import pprint
+import sys
+import threading
+import traceback
+
+from kafka import KafkaConsumer, KafkaProducer
+from test.fixtures import KafkaFixture, ZookeeperFixture
+
+logging.basicConfig(level=logging.ERROR)
+
+
+def start_brokers(n):
+ print('Starting {0} {1}-node cluster...'.format(KafkaFixture.kafka_version, n))
+ print('-> 1 Zookeeper')
+ zk = ZookeeperFixture.instance()
+ print('---> {0}:{1}'.format(zk.host, zk.port))
+ print()
+
+ partitions = min(n, 3)
+ replicas = min(n, 3)
+ print('-> {0} Brokers [{1} partitions / {2} replicas]'.format(n, partitions, replicas))
+ brokers = [
+ KafkaFixture.instance(i, zk.host, zk.port, zk_chroot='',
+ partitions=partitions, replicas=replicas)
+ for i in range(n)
+ ]
+ for broker in brokers:
+ print('---> {0}:{1}'.format(broker.host, broker.port))
+ print()
+ return brokers
+
+
+class ConsumerPerformance(object):
+
+ @staticmethod
+ def run(args):
+ try:
+ props = {}
+ for prop in args.consumer_config:
+ k, v = prop.split('=')
+ try:
+ v = int(v)
+ except ValueError:
+ pass
+ if v == 'None':
+ v = None
+ props[k] = v
+
+ if args.brokers:
+ brokers = start_brokers(args.brokers)
+ props['bootstrap_servers'] = ['{0}:{1}'.format(broker.host, broker.port)
+ for broker in brokers]
+ print('---> bootstrap_servers={0}'.format(props['bootstrap_servers']))
+ print()
+
+ print('-> Producing records')
+ record = bytes(bytearray(args.record_size))
+ producer = KafkaProducer(compression_type=args.fixture_compression,
+ **props)
+ for i in xrange(args.num_records):
+ producer.send(topic=args.topic, value=record)
+ producer.flush()
+ producer.close()
+ print('-> OK!')
+ print()
+
+ print('Initializing Consumer...')
+ props['auto_offset_reset'] = 'earliest'
+ if 'consumer_timeout_ms' not in props:
+ props['consumer_timeout_ms'] = 10000
+ props['metrics_sample_window_ms'] = args.stats_interval * 1000
+ for k, v in props.items():
+ print('---> {0}={1}'.format(k, v))
+ consumer = KafkaConsumer(args.topic, **props)
+ print('---> group_id={0}'.format(consumer.config['group_id']))
+ print('---> report stats every {0} secs'.format(args.stats_interval))
+ print('---> raw metrics? {0}'.format(args.raw_metrics))
+ timer_stop = threading.Event()
+ timer = StatsReporter(args.stats_interval, consumer,
+ event=timer_stop,
+ raw_metrics=args.raw_metrics)
+ timer.start()
+ print('-> OK!')
+ print()
+
+ records = 0
+ for msg in consumer:
+ records += 1
+ if records >= args.num_records:
+ break
+ print('Consumed {0} records'.format(records))
+
+ timer_stop.set()
+
+ except Exception:
+ exc_info = sys.exc_info()
+ traceback.print_exception(*exc_info)
+ sys.exit(1)
+
+
+class StatsReporter(threading.Thread):
+ def __init__(self, interval, consumer, event=None, raw_metrics=False):
+ super(StatsReporter, self).__init__()
+ self.interval = interval
+ self.consumer = consumer
+ self.event = event
+ self.raw_metrics = raw_metrics
+
+ def print_stats(self):
+ metrics = self.consumer.metrics()
+ if self.raw_metrics:
+ pprint.pprint(metrics)
+ else:
+ print('{records-consumed-rate} records/sec ({bytes-consumed-rate} B/sec),'
+ ' {fetch-latency-avg} latency,'
+ ' {fetch-rate} fetch/s,'
+ ' {fetch-size-avg} fetch size,'
+ ' {records-lag-max} max record lag,'
+ ' {records-per-request-avg} records/req'
+ .format(**metrics['consumer-fetch-manager-metrics']))
+
+
+ def print_final(self):
+ self.print_stats()
+
+ def run(self):
+ while self.event and not self.event.wait(self.interval):
+ self.print_stats()
+ else:
+ self.print_final()
+
+
+def get_args_parser():
+ parser = argparse.ArgumentParser(
+ description='This tool is used to verify the consumer performance.')
+
+ parser.add_argument(
+ '--topic', type=str,
+ help='Topic for consumer test',
+ default='kafka-python-benchmark-test')
+ parser.add_argument(
+ '--num-records', type=long,
+ help='number of messages to consume',
+ default=1000000)
+ parser.add_argument(
+ '--record-size', type=int,
+ help='message size in bytes',
+ default=100)
+ parser.add_argument(
+ '--consumer-config', type=str, nargs='+', default=(),
+ help='kafka consumer related configuaration properties like '
+ 'bootstrap_servers,client_id etc..')
+ parser.add_argument(
+ '--fixture-compression', type=str,
+ help='specify a compression type for use with broker fixtures / producer')
+ parser.add_argument(
+ '--brokers', type=int,
+ help='Number of kafka brokers to start',
+ default=0)
+ parser.add_argument(
+ '--stats-interval', type=int,
+ help='Interval in seconds for stats reporting to console',
+ default=5)
+ parser.add_argument(
+ '--raw-metrics', action='store_true',
+ help='Enable this flag to print full metrics dict on each interval')
+ return parser
+
+
+if __name__ == '__main__':
+ args = get_args_parser().parse_args()
+ ConsumerPerformance.run(args)
diff --git a/load_example.py b/benchmarks/load_example.py
similarity index 100%
rename from load_example.py
rename to benchmarks/load_example.py
diff --git a/benchmarks/producer_performance.py b/benchmarks/producer_performance.py
new file mode 100755
index 0000000..e958735
--- /dev/null
+++ b/benchmarks/producer_performance.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+# Adapted from https://github.com/mrafayaleem/kafka-jython
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import pprint
+import sys
+import threading
+import traceback
+
+from kafka import KafkaProducer
+from test.fixtures import KafkaFixture, ZookeeperFixture
+
+
+def start_brokers(n):
+ print('Starting {0} {1}-node cluster...'.format(KafkaFixture.kafka_version, n))
+ print('-> 1 Zookeeper')
+ zk = ZookeeperFixture.instance()
+ print('---> {0}:{1}'.format(zk.host, zk.port))
+ print()
+
+ partitions = min(n, 3)
+ replicas = min(n, 3)
+ print('-> {0} Brokers [{1} partitions / {2} replicas]'.format(n, partitions, replicas))
+ brokers = [
+ KafkaFixture.instance(i, zk.host, zk.port, zk_chroot='',
+ partitions=partitions, replicas=replicas)
+ for i in range(n)
+ ]
+ for broker in brokers:
+ print('---> {0}:{1}'.format(broker.host, broker.port))
+ print()
+ return brokers
+
+
+class ProducerPerformance(object):
+
+ @staticmethod
+ def run(args):
+ try:
+ props = {}
+ for prop in args.producer_config:
+ k, v = prop.split('=')
+ try:
+ v = int(v)
+ except ValueError:
+ pass
+ if v == 'None':
+ v = None
+ props[k] = v
+
+ if args.brokers:
+ brokers = start_brokers(args.brokers)
+ props['bootstrap_servers'] = ['{0}:{1}'.format(broker.host, broker.port)
+ for broker in brokers]
+ print("---> bootstrap_servers={0}".format(props['bootstrap_servers']))
+ print()
+ print('-> OK!')
+ print()
+
+ print('Initializing producer...')
+ record = bytes(bytearray(args.record_size))
+ props['metrics_sample_window_ms'] = args.stats_interval * 1000
+
+ producer = KafkaProducer(**props)
+ for k, v in props.items():
+ print('---> {0}={1}'.format(k, v))
+ print('---> send {0} byte records'.format(args.record_size))
+ print('---> report stats every {0} secs'.format(args.stats_interval))
+ print('---> raw metrics? {0}'.format(args.raw_metrics))
+ timer_stop = threading.Event()
+ timer = StatsReporter(args.stats_interval, producer,
+ event=timer_stop,
+ raw_metrics=args.raw_metrics)
+ timer.start()
+ print('-> OK!')
+ print()
+
+ for i in xrange(args.num_records):
+ producer.send(topic=args.topic, value=record)
+ producer.flush()
+
+ timer_stop.set()
+
+ except Exception:
+ exc_info = sys.exc_info()
+ traceback.print_exception(*exc_info)
+ sys.exit(1)
+
+
+class StatsReporter(threading.Thread):
+ def __init__(self, interval, producer, event=None, raw_metrics=False):
+ super(StatsReporter, self).__init__()
+ self.interval = interval
+ self.producer = producer
+ self.event = event
+ self.raw_metrics = raw_metrics
+
+ def print_stats(self):
+ metrics = self.producer.metrics()
+ if self.raw_metrics:
+ pprint.pprint(metrics)
+ else:
+ print('{record-send-rate} records/sec ({byte-rate} B/sec),'
+ ' {request-latency-avg} latency,'
+ ' {record-size-avg} record size,'
+ ' {batch-size-avg} batch size,'
+ ' {records-per-request-avg} records/req'
+ .format(**metrics['producer-metrics']))
+
+ def print_final(self):
+ self.print_stats()
+
+ def run(self):
+ while self.event and not self.event.wait(self.interval):
+ self.print_stats()
+ else:
+ self.print_final()
+
+
+def get_args_parser():
+ parser = argparse.ArgumentParser(
+ description='This tool is used to verify the producer performance.')
+
+ parser.add_argument(
+ '--topic', type=str,
+ help='Topic name for test',
+ default='kafka-python-benchmark-test')
+ parser.add_argument(
+ '--num-records', type=long,
+ help='number of messages to produce',
+ default=1000000)
+ parser.add_argument(
+ '--record-size', type=int,
+ help='message size in bytes',
+ default=100)
+ parser.add_argument(
+ '--producer-config', type=str, nargs='+', default=(),
+ help='kafka producer related configuaration properties like '
+ 'bootstrap_servers,client_id etc..')
+ parser.add_argument(
+ '--brokers', type=int,
+ help='Number of kafka brokers to start',
+ default=0)
+ parser.add_argument(
+ '--stats-interval', type=int,
+ help='Interval in seconds for stats reporting to console',
+ default=5)
+ parser.add_argument(
+ '--raw-metrics', action='store_true',
+ help='Enable this flag to print full metrics dict on each interval')
+ return parser
+
+
+if __name__ == '__main__':
+ args = get_args_parser().parse_args()
+ ProducerPerformance.run(args)
diff --git a/kafka/client.py b/kafka/client.py
index 891ae03..8a34cc4 100644
--- a/kafka/client.py
+++ b/kafka/client.py
@@ -137,7 +137,7 @@ class SimpleClient(object):
kafka.errors.check_error(resp)
# Otherwise return the BrokerMetadata
- return BrokerMetadata(resp.nodeId, resp.host, resp.port)
+ return BrokerMetadata(resp.nodeId, resp.host, resp.port, None)
def _next_id(self):
"""Generate a new correlation id"""
@@ -525,7 +525,7 @@ class SimpleClient(object):
log.debug('Updating broker metadata: %s', resp.brokers)
log.debug('Updating topic metadata: %s', [topic for _, topic, _ in resp.topics])
- self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port))
+ self.brokers = dict([(nodeId, BrokerMetadata(nodeId, host, port, None))
for nodeId, host, port in resp.brokers])
for error, topic, partitions in resp.topics:
@@ -577,7 +577,7 @@ class SimpleClient(object):
# (not sure how this could happen. server could be in bad state)
else:
self.topics_to_brokers[topic_part] = BrokerMetadata(
- leader, None, None
+ leader, None, None, None
)
def send_metadata_request(self, payloads=[], fail_on_error=True,
diff --git a/kafka/client_async.py b/kafka/client_async.py
index 6fa9434..2700069 100644
--- a/kafka/client_async.py
+++ b/kafka/client_async.py
@@ -6,6 +6,7 @@ import heapq
import itertools
import logging
import random
+import threading
# selectors in stdlib as of py3.4
try:
@@ -158,6 +159,7 @@ class KafkaClient(object):
self._bootstrap_fails = 0
self._wake_r, self._wake_w = socket.socketpair()
self._wake_r.setblocking(False)
+ self._wake_lock = threading.Lock()
self._selector.register(self._wake_r, selectors.EVENT_READ)
self._closed = False
self._bootstrap(collect_hosts(self.config['bootstrap_servers']))
@@ -178,7 +180,11 @@ class KafkaClient(object):
time.sleep(next_at - now)
self._last_bootstrap = time.time()
- metadata_request = MetadataRequest[0]([])
+ if self.config['api_version'] is None or self.config['api_version'] < (0, 10):
+ metadata_request = MetadataRequest[0]([])
+ else:
+ metadata_request = MetadataRequest[1](None)
+
for host, port, afi in hosts:
log.debug("Attempting to bootstrap via node at %s:%s", host, port)
cb = functools.partial(self._conn_state_change, 'bootstrap')
@@ -643,10 +649,17 @@ class KafkaClient(object):
topics = list(self._topics)
if self.cluster.need_all_topic_metadata:
- topics = []
+ if self.config['api_version'] < (0, 10):
+ topics = []
+ else:
+ topics = None
if self._can_send_request(node_id):
- request = MetadataRequest[0](topics)
+ if self.config['api_version'] < (0, 10):
+ api_version = 0
+ else:
+ api_version = 1
+ request = MetadataRequest[api_version](topics)
log.debug("Sending metadata request %s to node %s", request, node_id)
future = self.send(node_id, request)
future.add_callback(self.cluster.update_metadata)
@@ -747,10 +760,12 @@ class KafkaClient(object):
raise Errors.NoBrokersAvailable()
def wakeup(self):
- if self._wake_w.send(b'x') != 1:
- log.warning('Unable to send to wakeup socket!')
+ with self._wake_lock:
+ if self._wake_w.send(b'x') != 1:
+ log.warning('Unable to send to wakeup socket!')
def _clear_wake_fd(self):
+ # reading from wake socket should only happen in a single thread
while True:
try:
self._wake_r.recv(1024)
diff --git a/kafka/cluster.py b/kafka/cluster.py
index 9aabec1..694e115 100644
--- a/kafka/cluster.py
+++ b/kafka/cluster.py
@@ -34,6 +34,8 @@ class ClusterMetadata(object):
self._lock = threading.Lock()
self.need_all_topic_metadata = False
self.unauthorized_topics = set()
+ self.internal_topics = set()
+ self.controller = None
self.config = copy.copy(self.DEFAULT_CONFIG)
for key in self.config:
@@ -150,13 +152,23 @@ class ClusterMetadata(object):
self._future = Future()
return self._future
- def topics(self):
+ def topics(self, exclude_internal_topics=True):
"""Get set of known topics.
+ Arguments:
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to
+ True the only way to receive records from an internal topic is
+ subscribing to it. Default True
+
Returns:
set: {topic (str), ...}
"""
- return set(self._partitions.keys())
+ topics = set(self._partitions.keys())
+ if exclude_internal_topics:
+ return topics - self.internal_topics
+ else:
+ return topics
def failed_update(self, exception):
"""Update cluster state given a failed MetadataRequest."""
@@ -180,23 +192,41 @@ class ClusterMetadata(object):
# In the common case where we ask for a single topic and get back an
# error, we should fail the future
if len(metadata.topics) == 1 and metadata.topics[0][0] != 0:
- error_code, topic, _ = metadata.topics[0]
+ error_code, topic = metadata.topics[0][:2]
error = Errors.for_code(error_code)(topic)
return self.failed_update(error)
if not metadata.brokers:
log.warning("No broker metadata found in MetadataResponse")
- for node_id, host, port in metadata.brokers:
+ for broker in metadata.brokers:
+ if metadata.API_VERSION == 0:
+ node_id, host, port = broker
+ rack = None
+ else:
+ node_id, host, port, rack = broker
self._brokers.update({
- node_id: BrokerMetadata(node_id, host, port)
+ node_id: BrokerMetadata(node_id, host, port, rack)
})
+ if metadata.API_VERSION == 0:
+ self.controller = None
+ else:
+ self.controller = self._brokers.get(metadata.controller_id)
+
_new_partitions = {}
_new_broker_partitions = collections.defaultdict(set)
_new_unauthorized_topics = set()
+ _new_internal_topics = set()
- for error_code, topic, partitions in metadata.topics:
+ for topic_data in metadata.topics:
+ if metadata.API_VERSION == 0:
+ error_code, topic, partitions = topic_data
+ is_internal = False
+ else:
+ error_code, topic, is_internal, partitions = topic_data
+ if is_internal:
+ _new_internal_topics.add(topic)
error_type = Errors.for_code(error_code)
if error_type is Errors.NoError:
_new_partitions[topic] = {}
@@ -226,6 +256,7 @@ class ClusterMetadata(object):
self._partitions = _new_partitions
self._broker_partitions = _new_broker_partitions
self.unauthorized_topics = _new_unauthorized_topics
+ self.internal_topics = _new_internal_topics
f = None
if self._future:
f = self._future
@@ -272,7 +303,8 @@ class ClusterMetadata(object):
coordinator = BrokerMetadata(
response.coordinator_id,
response.host,
- response.port)
+ response.port,
+ None)
# Assume that group coordinators are just brokers
# (this is true now, but could diverge in future)
@@ -281,12 +313,14 @@ class ClusterMetadata(object):
# If this happens, either brokers have moved without
# changing IDs, or our assumption above is wrong
- elif coordinator != self._brokers[node_id]:
- log.error("GroupCoordinator metadata conflicts with existing"
- " broker metadata. Coordinator: %s, Broker: %s",
- coordinator, self._brokers[node_id])
- self._groups[group] = node_id
- return False
+ else:
+ node = self._brokers[node_id]
+ if coordinator.host != node.host or coordinator.port != node.port:
+ log.error("GroupCoordinator metadata conflicts with existing"
+ " broker metadata. Coordinator: %s, Broker: %s",
+ coordinator, node)
+ self._groups[group] = node_id
+ return False
log.info("Group coordinator for %s is %s", group, coordinator)
self._groups[group] = node_id
diff --git a/kafka/consumer/fetcher.py b/kafka/consumer/fetcher.py
index 34ff4cb..d615848 100644
--- a/kafka/consumer/fetcher.py
+++ b/kafka/consumer/fetcher.py
@@ -729,6 +729,8 @@ class Fetcher(six.Iterator):
else:
raise error_type('Unexpected error while fetching data')
+ # Because we are currently decompressing messages lazily, the sensors here
+ # will get compressed bytes / message set stats when compression is enabled
self._sensors.bytes_fetched.record(total_bytes)
self._sensors.records_fetched.record(total_count)
if response.API_VERSION >= 1:
@@ -774,12 +776,12 @@ class FetchManagerMetrics(object):
'The maximum throttle time in ms'), Max())
def record_topic_fetch_metrics(self, topic, num_bytes, num_records):
- metric_tags = {'topic': topic.replace('.', '_')}
-
# record bytes fetched
name = '.'.join(['topic', topic, 'bytes-fetched'])
bytes_fetched = self.metrics.get_sensor(name)
if not bytes_fetched:
+ metric_tags = {'topic': topic.replace('.', '_')}
+
bytes_fetched = self.metrics.sensor(name)
bytes_fetched.add(self.metrics.metric_name('fetch-size-avg',
self.group_name,
@@ -799,6 +801,8 @@ class FetchManagerMetrics(object):
name = '.'.join(['topic', topic, 'records-fetched'])
records_fetched = self.metrics.get_sensor(name)
if not records_fetched:
+ metric_tags = {'topic': topic.replace('.', '_')}
+
records_fetched = self.metrics.sensor(name)
records_fetched.add(self.metrics.metric_name('records-per-request-avg',
self.group_name,
diff --git a/kafka/consumer/group.py b/kafka/consumer/group.py
index 8fa43bc..b2114d8 100644
--- a/kafka/consumer/group.py
+++ b/kafka/consumer/group.py
@@ -12,7 +12,7 @@ from kafka.consumer.subscription_state import SubscriptionState
from kafka.coordinator.consumer import ConsumerCoordinator
from kafka.coordinator.assignors.range import RangePartitionAssignor
from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
-from kafka.metrics import DictReporter, MetricConfig, Metrics
+from kafka.metrics import MetricConfig, Metrics
from kafka.protocol.offset import OffsetResetStrategy
from kafka.structs import TopicPartition
from kafka.version import __version__
@@ -171,12 +171,12 @@ class KafkaConsumer(six.Iterator):
in classes that will be notified of new metric creation. Default: []
metrics_num_samples (int): The number of samples maintained to compute
metrics. Default: 2
- metrics_sample_window_ms (int): The number of samples maintained to
- compute metrics. Default: 30000
+ metrics_sample_window_ms (int): The maximum age in milliseconds of
+ samples used to compute metrics. Default: 30000
Note:
Configuration parameters are described in more detail at
- https://kafka.apache.org/090/configuration.html#newconsumerconfigs
+ https://kafka.apache.org/0100/configuration.html#newconsumerconfigs
"""
DEFAULT_CONFIG = {
'bootstrap_servers': 'localhost',
@@ -241,7 +241,6 @@ class KafkaConsumer(six.Iterator):
time_window_ms=self.config['metrics_sample_window_ms'],
tags=metrics_tags)
reporters = [reporter() for reporter in self.config['metric_reporters']]
- reporters.append(DictReporter('kafka.consumer'))
self._metrics = Metrics(metric_config, reporters)
metric_group_prefix = 'consumer'
# TODO _metrics likely needs to be passed to KafkaClient, etc.
@@ -760,6 +759,21 @@ class KafkaConsumer(six.Iterator):
self._client.set_topics([])
log.debug("Unsubscribed all topics or patterns and assigned partitions")
+ def metrics(self, raw=False):
+ """Warning: this is an unstable interface.
+ It may change in future releases without warning"""
+ if raw:
+ return self._metrics.metrics
+
+ metrics = {}
+ for k, v in self._metrics.metrics.items():
+ if k.group not in metrics:
+ metrics[k.group] = {}
+ if k.name not in metrics[k.group]:
+ metrics[k.group][k.name] = {}
+ metrics[k.group][k.name] = v.value()
+ return metrics
+
def _use_consumer_group(self):
"""Return True iff this consumer can/should join a broker-coordinated group."""
if self.config['api_version'] < (0, 9):
diff --git a/kafka/metrics/stats/sensor.py b/kafka/metrics/stats/sensor.py
index b0bf4db..72bacfc 100644
--- a/kafka/metrics/stats/sensor.py
+++ b/kafka/metrics/stats/sensor.py
@@ -55,15 +55,15 @@ class Sensor(object):
Record a value at a known time.
Arguments:
value (double): The value we are recording
- time_ms (int): The current POSIX time in milliseconds
+ time_ms (int): A POSIX timestamp in milliseconds.
+ Default: The time when record() is evaluated (now)
Raises:
QuotaViolationException: if recording this value moves a
metric beyond its configured maximum or minimum bound
"""
- now = time.time() * 1000
if time_ms is None:
- time_ms = now
+ time_ms = time.time() * 1000
self._last_record_time = time_ms
with self._lock: # XXX high volume, might be performance issue
# increment all the stats
diff --git a/kafka/producer/buffer.py b/kafka/producer/buffer.py
index 5fcb35f..de5f0e7 100644
--- a/kafka/producer/buffer.py
+++ b/kafka/producer/buffer.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+from __future__ import absolute_import, division
import collections
import io
@@ -55,6 +55,8 @@ class MessageSetBuffer(object):
self._batch_size = batch_size
self._closed = False
self._messages = 0
+ self._bytes_written = 4 # Int32 header is 4 bytes
+ self._final_size = None
def append(self, offset, message):
"""Apend a Message to the MessageSet.
@@ -62,6 +64,8 @@ class MessageSetBuffer(object):
Arguments:
offset (int): offset of the message
message (Message or bytes): message struct or encoded bytes
+
+ Returns: bytes written
"""
if isinstance(message, Message):
encoded = message.encode()
@@ -70,6 +74,8 @@ class MessageSetBuffer(object):
msg = Int64.encode(offset) + Int32.encode(len(encoded)) + encoded
self._buffer.write(msg)
self._messages += 1
+ self._bytes_written += len(msg)
+ return len(msg)
def has_room_for(self, key, value):
if self._closed:
@@ -107,16 +113,20 @@ class MessageSetBuffer(object):
self._buffer.write(Int32.encode(len(encoded)))
self._buffer.write(encoded)
- # Update the message set size, and return ready for full read()
- size = self._buffer.tell() - 4
+ # Update the message set size (less the 4 byte header),
+ # and return with buffer ready for full read()
+ self._final_size = self._buffer.tell()
self._buffer.seek(0)
- self._buffer.write(Int32.encode(size))
+ self._buffer.write(Int32.encode(self._final_size - 4))
self._buffer.seek(0)
self._closed = True
def size_in_bytes(self):
- return self._buffer.tell()
+ return self._final_size or self._buffer.tell()
+
+ def compression_rate(self):
+ return self.size_in_bytes() / self._bytes_written
def buffer(self):
return self._buffer
diff --git a/kafka/producer/kafka.py b/kafka/producer/kafka.py
index f5c5d19..af07154 100644
--- a/kafka/producer/kafka.py
+++ b/kafka/producer/kafka.py
@@ -9,6 +9,7 @@ import weakref
from .. import errors as Errors
from ..client_async import KafkaClient
+from ..metrics import MetricConfig, Metrics
from ..partitioner.default import DefaultPartitioner
from ..protocol.message import Message, MessageSet
from ..structs import TopicPartition
@@ -220,10 +221,17 @@ class KafkaProducer(object):
api_version_auto_timeout_ms (int): number of milliseconds to throw a
timeout exception from the constructor when checking the broker
api version. Only applies if api_version set to 'auto'
+ metric_reporters (list): A list of classes to use as metrics reporters.
+ Implementing the AbstractMetricsReporter interface allows plugging
+ in classes that will be notified of new metric creation. Default: []
+ metrics_num_samples (int): The number of samples maintained to compute
+ metrics. Default: 2
+ metrics_sample_window_ms (int): The maximum age in milliseconds of
+ samples used to compute metrics. Default: 30000
Note:
Configuration parameters are described in more detail at
- https://kafka.apache.org/090/configuration.html#producerconfigs
+ https://kafka.apache.org/0100/configuration.html#producerconfigs
"""
_DEFAULT_CONFIG = {
'bootstrap_servers': 'localhost',
@@ -255,7 +263,10 @@ class KafkaProducer(object):
'ssl_keyfile': None,
'ssl_crlfile': None,
'api_version': None,
- 'api_version_auto_timeout_ms': 2000
+ 'api_version_auto_timeout_ms': 2000,
+ 'metric_reporters': [],
+ 'metrics_num_samples': 2,
+ 'metrics_sample_window_ms': 30000,
}
def __init__(self, **configs):
@@ -285,6 +296,14 @@ class KafkaProducer(object):
log.warning('use api_version=%s (%s is deprecated)',
str(self.config['api_version']), deprecated)
+ # Configure metrics
+ metrics_tags = {'client-id': self.config['client_id']}
+ metric_config = MetricConfig(samples=self.config['metrics_num_samples'],
+ time_window_ms=self.config['metrics_sample_window_ms'],
+ tags=metrics_tags)
+ reporters = [reporter() for reporter in self.config['metric_reporters']]
+ self._metrics = Metrics(metric_config, reporters)
+
client = KafkaClient(**self.config)
# Get auto-discovered version from client if necessary
@@ -298,7 +317,8 @@ class KafkaProducer(object):
self._accumulator = RecordAccumulator(message_version=message_version, **self.config)
self._metadata = client.cluster
guarantee_message_order = bool(self.config['max_in_flight_requests_per_connection'] == 1)
- self._sender = Sender(client, self._metadata, self._accumulator,
+ self._sender = Sender(client, self._metadata,
+ self._accumulator, self._metrics,
guarantee_message_order=guarantee_message_order,
**self.config)
self._sender.daemon = True
@@ -382,6 +402,7 @@ class KafkaProducer(object):
if not invoked_from_callback:
self._sender.join()
+ self._metrics.close()
try:
self.config['key_serializer'].close()
except AttributeError:
@@ -581,3 +602,18 @@ class KafkaProducer(object):
return self.config['partitioner'](serialized_key,
all_partitions,
available)
+
+ def metrics(self, raw=False):
+ """Warning: this is an unstable interface.
+ It may change in future releases without warning"""
+ if raw:
+ return self._metrics.metrics
+
+ metrics = {}
+ for k, v in self._metrics.metrics.items():
+ if k.group not in metrics:
+ metrics[k.group] = {}
+ if k.name not in metrics[k.group]:
+ metrics[k.group][k.name] = {}
+ metrics[k.group][k.name] = v.value()
+ return metrics
diff --git a/kafka/producer/record_accumulator.py b/kafka/producer/record_accumulator.py
index 566bf6f..7ea579a 100644
--- a/kafka/producer/record_accumulator.py
+++ b/kafka/producer/record_accumulator.py
@@ -38,7 +38,7 @@ class AtomicInteger(object):
class RecordBatch(object):
def __init__(self, tp, records, message_version=0):
self.record_count = 0
- #self.max_record_size = 0 # for metrics only
+ self.max_record_size = 0
now = time.time()
self.created = now
self.drained = None
@@ -56,8 +56,8 @@ class RecordBatch(object):
return None
msg = Message(value, key=key, magic=self.message_version)
- self.records.append(self.record_count, msg)
- # self.max_record_size = max(self.max_record_size, Record.record_size(key, value)) # for metrics only
+ record_size = self.records.append(self.record_count, msg)
+ self.max_record_size = max(self.max_record_size, record_size)
self.last_append = time.time()
future = FutureRecordMetadata(self.produce_future, self.record_count,
timestamp_ms)
diff --git a/kafka/producer/sender.py b/kafka/producer/sender.py
index 958e165..c1d0905 100644
--- a/kafka/producer/sender.py
+++ b/kafka/producer/sender.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+from __future__ import absolute_import, division
import collections
import copy
@@ -8,9 +8,11 @@ import threading
import six
from .. import errors as Errors
+from ..metrics.measurable import AnonMeasurable
+from ..metrics.stats import Avg, Count, Max, Rate
+from ..protocol.produce import ProduceRequest
from ..structs import TopicPartition
from ..version import __version__
-from ..protocol.produce import ProduceRequest
log = logging.getLogger(__name__)
@@ -31,7 +33,7 @@ class Sender(threading.Thread):
'api_version': (0, 8, 0),
}
- def __init__(self, client, metadata, accumulator, **configs):
+ def __init__(self, client, metadata, accumulator, metrics, **configs):
super(Sender, self).__init__()
self.config = copy.copy(self._DEFAULT_CONFIG)
for key in self.config:
@@ -45,6 +47,7 @@ class Sender(threading.Thread):
self._running = True
self._force_close = False
self._topics_to_add = set()
+ self._sensors = SenderMetrics(metrics, self._client, self._metadata)
def run(self):
"""The main run loop for the sender thread."""
@@ -119,7 +122,10 @@ class Sender(threading.Thread):
expired_batches = self._accumulator.abort_expired_batches(
self.config['request_timeout_ms'], self._metadata)
+ for expired_batch in expired_batches:
+ self._sensors.record_errors(expired_batch.topic_partition.topic, expired_batch.record_count)
+ self._sensors.update_produce_request_metrics(batches_by_node)
requests = self._create_produce_requests(batches_by_node)
# If we have any nodes that are ready to send + have sendable data,
# poll with 0 timeout so this can immediately loop and try sending more
@@ -223,6 +229,7 @@ class Sender(threading.Thread):
self.config['retries'] - batch.attempts - 1,
error)
self._accumulator.reenqueue(batch)
+ self._sensors.record_retries(batch.topic_partition.topic, batch.record_count)
else:
if error is Errors.TopicAuthorizationFailedError:
error = error(batch.topic_partition.topic)
@@ -230,6 +237,8 @@ class Sender(threading.Thread):
# tell the user the result of their request
batch.done(base_offset, timestamp_ms, error)
self._accumulator.deallocate(batch)
+ if error is not None:
+ self._sensors.record_errors(batch.topic_partition.topic, batch.record_count)
if getattr(error, 'invalid_metadata', False):
self._metadata.request_update()
@@ -296,3 +305,200 @@ class Sender(threading.Thread):
def wakeup(self):
"""Wake up the selector associated with this send thread."""
self._client.wakeup()
+
+
+class SenderMetrics(object):
+
+ def __init__(self, metrics, client, metadata):
+ self.metrics = metrics
+ self._client = client
+ self._metadata = metadata
+
+ sensor_name = 'batch-size'
+ self.batch_size_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('batch-size-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average number of bytes sent per partition per-request.')
+ self.add_metric('batch-size-max', Max(),
+ sensor_name=sensor_name,
+ description='The max number of bytes sent per partition per-request.')
+
+ sensor_name = 'compression-rate'
+ self.compression_rate_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('compression-rate-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average compression rate of record batches.')
+
+ sensor_name = 'queue-time'
+ self.queue_time_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('record-queue-time-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average time in ms record batches spent in the record accumulator.')
+ self.add_metric('record-queue-time-max', Max(),
+ sensor_name=sensor_name,
+ description='The maximum time in ms record batches spent in the record accumulator.')
+
+ sensor_name = 'request-time'
+ self.request_time_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('request-latency-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average request latency in ms')
+ self.add_metric('request-latency-max', Max(),
+ sensor_name=sensor_name,
+ description='The maximum request latency in ms')
+
+ sensor_name = 'produce-throttle-time'
+ self.produce_throttle_time_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('produce-throttle-time-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average throttle time in ms')
+ self.add_metric('produce-throttle-time-max', Max(),
+ sensor_name=sensor_name,
+ description='The maximum throttle time in ms')
+
+ sensor_name = 'records-per-request'
+ self.records_per_request_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('record-send-rate', Rate(),
+ sensor_name=sensor_name,
+ description='The average number of records sent per second.')
+ self.add_metric('records-per-request-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average number of records per request.')
+
+ sensor_name = 'bytes'
+ self.byte_rate_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('byte-rate', Rate(),
+ sensor_name=sensor_name,
+ description='The average number of bytes sent per second.')
+
+ sensor_name = 'record-retries'
+ self.retry_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('record-retry-rate', Rate(),
+ sensor_name=sensor_name,
+ description='The average per-second number of retried record sends')
+
+ sensor_name = 'errors'
+ self.error_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('record-error-rate', Rate(),
+ sensor_name=sensor_name,
+ description='The average per-second number of record sends that resulted in errors')
+
+ sensor_name = 'record-size-max'
+ self.max_record_size_sensor = self.metrics.sensor(sensor_name)
+ self.add_metric('record-size-max', Max(),
+ sensor_name=sensor_name,
+ description='The maximum record size across all batches')
+ self.add_metric('record-size-avg', Avg(),
+ sensor_name=sensor_name,
+ description='The average maximum record size per batch')
+
+ self.add_metric('requests-in-flight',
+ AnonMeasurable(lambda *_: self._client.in_flight_request_count()),
+ description='The current number of in-flight requests awaiting a response.')
+
+ self.add_metric('metadata-age',
+ AnonMeasurable(lambda _, now: (now - self._metadata._last_successful_refresh_ms) / 1000),
+ description='The age in seconds of the current producer metadata being used.')
+
+ def add_metric(self, metric_name, measurable, group_name='producer-metrics',
+ description=None, tags=None,
+ sensor_name=None):
+ m = self.metrics
+ metric = m.metric_name(metric_name, group_name, description, tags)
+ if sensor_name:
+ sensor = m.sensor(sensor_name)
+ sensor.add(metric, measurable)
+ else:
+ m.add_metric(metric, measurable)
+
+ def maybe_register_topic_metrics(self, topic):
+
+ def sensor_name(name):
+ return 'topic.{0}.{1}'.format(topic, name)
+
+ # if one sensor of the metrics has been registered for the topic,
+ # then all other sensors should have been registered; and vice versa
+ if not self.metrics.get_sensor(sensor_name('records-per-batch')):
+
+ self.add_metric('record-send-rate', Rate(),
+ sensor_name=sensor_name('records-per-batch'),
+ group_name='producer-topic-metrics.' + topic,
+ description= 'Records sent per second for topic ' + topic)
+
+ self.add_metric('byte-rate', Rate(),
+ sensor_name=sensor_name('bytes'),
+ group_name='producer-topic-metrics.' + topic,
+ description='Bytes per second for topic ' + topic)
+
+ self.add_metric('compression-rate', Avg(),
+ sensor_name=sensor_name('compression-rate'),
+ group_name='producer-topic-metrics.' + topic,
+ description='Average Compression ratio for topic ' + topic)
+
+ self.add_metric('record-retry-rate', Rate(),
+ sensor_name=sensor_name('record-retries'),
+ group_name='producer-topic-metrics.' + topic,
+ description='Record retries per second for topic ' + topic)
+
+ self.add_metric('record-error-rate', Rate(),
+ sensor_name=sensor_name('record-errors'),
+ group_name='producer-topic-metrics.' + topic,
+ description='Record errors per second for topic ' + topic)
+
+ def update_produce_request_metrics(self, batches_map):
+ for node_batch in batches_map.values():
+ records = 0
+ total_bytes = 0
+ for batch in node_batch:
+ # register all per-topic metrics at once
+ topic = batch.topic_partition.topic
+ self.maybe_register_topic_metrics(topic)
+
+ # per-topic record send rate
+ topic_records_count = self.metrics.get_sensor(
+ 'topic.' + topic + '.records-per-batch')
+ topic_records_count.record(batch.record_count)
+
+ # per-topic bytes send rate
+ topic_byte_rate = self.metrics.get_sensor(
+ 'topic.' + topic + '.bytes')
+ topic_byte_rate.record(batch.records.size_in_bytes())
+
+ # per-topic compression rate
+ topic_compression_rate = self.metrics.get_sensor(
+ 'topic.' + topic + '.compression-rate')
+ topic_compression_rate.record(batch.records.compression_rate())
+
+ # global metrics
+ self.batch_size_sensor.record(batch.records.size_in_bytes())
+ if batch.drained:
+ self.queue_time_sensor.record(batch.drained - batch.created)
+ self.compression_rate_sensor.record(batch.records.compression_rate())
+ self.max_record_size_sensor.record(batch.max_record_size)
+ records += batch.record_count
+ total_bytes += batch.records.size_in_bytes()
+
+ self.records_per_request_sensor.record(records)
+ self.byte_rate_sensor.record(total_bytes)
+
+ def record_retries(self, topic, count):
+ self.retry_sensor.record(count)
+ sensor = self.metrics.get_sensor('topic.' + topic + '.record-retries')
+ if sensor:
+ sensor.record(count)
+
+ def record_errors(self, topic, count):
+ self.error_sensor.record(count)
+ sensor = self.metrics.get_sensor('topic.' + topic + '.record-errors')
+ if sensor:
+ sensor.record(count)
+
+ def record_latency(self, latency, node=None):
+ self.request_time_sensor.record(latency)
+ if node:
+ sensor = self.metrics.get_sensor('node-' + node + '.latency')
+ if sensor:
+ sensor.record(latency)
+
+ def record_throttle_time(self, throttle_time_ms, node=None):
+ self.produce_throttle_time_sensor.record(throttle_time_ms)
diff --git a/kafka/protocol/metadata.py b/kafka/protocol/metadata.py
index 8063dda..2711abb 100644
--- a/kafka/protocol/metadata.py
+++ b/kafka/protocol/metadata.py
@@ -1,5 +1,5 @@
from .struct import Struct
-from .types import Array, Int16, Int32, Schema, String
+from .types import Array, Boolean, Int16, Int32, Schema, String
class MetadataResponse_v0(Struct):
@@ -22,14 +22,46 @@ class MetadataResponse_v0(Struct):
)
+class MetadataResponse_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ SCHEMA = Schema(
+ ('brokers', Array(
+ ('node_id', Int32),
+ ('host', String('utf-8')),
+ ('port', Int32),
+ ('rack', String('utf-8')))),
+ ('controller_id', Int32),
+ ('topics', Array(
+ ('error_code', Int16),
+ ('topic', String('utf-8')),
+ ('is_internal', Boolean),
+ ('partitions', Array(
+ ('error_code', Int16),
+ ('partition', Int32),
+ ('leader', Int32),
+ ('replicas', Array(Int32)),
+ ('isr', Array(Int32))))))
+ )
+
+
class MetadataRequest_v0(Struct):
API_KEY = 3
API_VERSION = 0
RESPONSE_TYPE = MetadataResponse_v0
SCHEMA = Schema(
- ('topics', Array(String('utf-8')))
+ ('topics', Array(String('utf-8'))) # Empty Array (len 0) for all topics
+ )
+
+
+class MetadataRequest_v1(Struct):
+ API_KEY = 3
+ API_VERSION = 1
+ RESPONSE_TYPE = MetadataResponse_v1
+ SCHEMA = Schema(
+ ('topics', Array(String('utf-8'))) # Null Array (len -1) for all topics
)
-MetadataRequest = [MetadataRequest_v0]
-MetadataResponse = [MetadataResponse_v0]
+MetadataRequest = [MetadataRequest_v0, MetadataRequest_v1]
+MetadataResponse = [MetadataResponse_v0, MetadataResponse_v1]
diff --git a/kafka/protocol/types.py b/kafka/protocol/types.py
index 18aaca1..da10326 100644
--- a/kafka/protocol/types.py
+++ b/kafka/protocol/types.py
@@ -99,6 +99,16 @@ class Bytes(AbstractType):
return value
+class Boolean(AbstractType):
+ @classmethod
+ def encode(cls, value):
+ return _pack('>?', value)
+
+ @classmethod
+ def decode(cls, data):
+ return _unpack('>?', data.read(1))
+
+
class Schema(AbstractType):
def __init__(self, *fields):
if fields:
@@ -145,6 +155,8 @@ class Array(AbstractType):
raise ValueError('Array instantiated with no array_of type')
def encode(self, items):
+ if items is None:
+ return Int32.encode(-1)
return b''.join(
[Int32.encode(len(items))] +
[self.array_of.encode(item) for item in items]
@@ -152,7 +164,11 @@ class Array(AbstractType):
def decode(self, data):
length = Int32.decode(data)
+ if length == -1:
+ return None
return [self.array_of.decode(data) for _ in range(length)]
def repr(self, list_of_items):
+ if list_of_items is None:
+ return 'NULL'
return '[' + ', '.join([self.array_of.repr(item) for item in list_of_items]) + ']'
diff --git a/kafka/structs.py b/kafka/structs.py
index 5902930..3188516 100644
--- a/kafka/structs.py
+++ b/kafka/structs.py
@@ -58,7 +58,7 @@ TopicPartition = namedtuple("TopicPartition",
["topic", "partition"])
BrokerMetadata = namedtuple("BrokerMetadata",
- ["nodeId", "host", "port"])
+ ["nodeId", "host", "port", "rack"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
| getting "This socket is already used by another greenlet"
Hi,
I know you are not testing kafka-python with gevent, anyway it seems work quite good with gevent.
We currently experiencing only one issue with it. From time to time we getting following exception:
```python
File "brain/series/writer.py", line 555, in submit
kafka_async_producer.send('seriesupdates', su)
File "kafka/producer/kafka.py", line 414, in send
self._sender.wakeup()
File "kafka/producer/sender.py", line 293, in wakeup
self._client.wakeup()
File "kafka/client_async.py", line 686, in wakeup
if self._wake_w.send(b'x') != 1:
File "gevent/_socket2.py", line 325, in send
self._wait(self._write_event)
File "gevent/_socket2.py", line 173, in _wait
raise _socketcommon.ConcurrentObjectUseError('This socket is already used by another greenlet: %r' % (watcher.callback, ))
```
Could you please look into this? Probably some thread lock could solve it?
Thanks! | dpkp/kafka-python | diff --git a/test/test_client.py b/test/test_client.py
index 660af61..79ac8be 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -1,6 +1,7 @@
import socket
from mock import ANY, MagicMock, patch
+from operator import itemgetter
import six
from . import unittest
@@ -117,9 +118,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_1', [
@@ -137,7 +139,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 2, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# client loads metadata at init
client = SimpleClient(hosts=['broker_1:4567'])
@@ -167,9 +169,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -179,7 +182,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -197,9 +200,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_still_creating', []),
@@ -209,7 +213,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -230,14 +234,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', [])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -249,7 +254,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 0, 0, [0, 1], [0, 1])
])
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
# calling _get_leader_for_partition (from any broker aware request)
# will try loading metadata again for the same topic
@@ -267,15 +272,16 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_LEADER, 'topic_no_partitions', []),
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_unknown', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -294,9 +300,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -304,7 +311,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
self.assertDictEqual(
@@ -330,7 +337,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_ERROR, 1, 1, [1, 0], [1, 0])
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
self.assertEqual(brokers[0], client._get_leader_for_partition('topic_noleader', 0))
self.assertEqual(brokers[1], client._get_leader_for_partition('topic_noleader', 1))
@@ -340,9 +347,10 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(NO_ERROR, 'topic_noleader', [
@@ -350,7 +358,7 @@ class TestSimpleClient(unittest.TestCase):
(NO_LEADER, 1, -1, [], []),
]),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
@@ -368,14 +376,15 @@ class TestSimpleClient(unittest.TestCase):
mock_conn(conn)
brokers = [
- BrokerMetadata(0, 'broker_1', 4567),
- BrokerMetadata(1, 'broker_2', 5678)
+ BrokerMetadata(0, 'broker_1', 4567, None),
+ BrokerMetadata(1, 'broker_2', 5678, None)
]
+ resp0_brokers = list(map(itemgetter(0, 1, 2), brokers))
topics = [
(UNKNOWN_TOPIC_OR_PARTITION, 'topic_doesnt_exist', []),
]
- protocol.decode_metadata_response.return_value = MetadataResponse[0](brokers, topics)
+ protocol.decode_metadata_response.return_value = MetadataResponse[0](resp0_brokers, topics)
client = SimpleClient(hosts=['broker_1:4567'])
diff --git a/test/test_client_async.py b/test/test_client_async.py
index dfe11ea..aa91704 100644
--- a/test/test_client_async.py
+++ b/test/test_client_async.py
@@ -53,8 +53,8 @@ def test_bootstrap_success(conn):
conn.connect.assert_called_with()
conn.send.assert_called_once_with(MetadataRequest[0]([]))
assert cli._bootstrap_fails == 0
- assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12),
- BrokerMetadata(1, 'bar', 34)])
+ assert cli.cluster.brokers() == set([BrokerMetadata(0, 'foo', 12, None),
+ BrokerMetadata(1, 'bar', 34, None)])
def test_bootstrap_failure(conn):
conn.state = ConnectionStates.DISCONNECTED
diff --git a/test/test_sender.py b/test/test_sender.py
index 44105e2..cf911e1 100644
--- a/test/test_sender.py
+++ b/test/test_sender.py
@@ -7,12 +7,13 @@ import pytest
from kafka.client_async import KafkaClient
from kafka.cluster import ClusterMetadata
-from kafka.producer.buffer import MessageSetBuffer
-from kafka.producer.sender import Sender
-from kafka.producer.record_accumulator import RecordAccumulator, RecordBatch
import kafka.errors as Errors
from kafka.future import Future
+from kafka.metrics import Metrics
+from kafka.producer.buffer import MessageSetBuffer
from kafka.protocol.produce import ProduceRequest
+from kafka.producer.record_accumulator import RecordAccumulator, RecordBatch
+from kafka.producer.sender import Sender
from kafka.structs import TopicPartition, OffsetAndMetadata
@@ -29,8 +30,13 @@ def accumulator():
@pytest.fixture
-def sender(client, accumulator):
- return Sender(client, client.cluster, accumulator)
+def metrics():
+ return Metrics()
+
+
[email protected]
+def sender(client, accumulator, metrics):
+ return Sender(client, client.cluster, accumulator, metrics)
@pytest.mark.parametrize(("api_version", "produce_version"), [
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 13
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-pylint",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==2.11.7
attrs==22.2.0
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
dill==0.3.4
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==5.10.1
-e git+https://github.com/dpkp/kafka-python.git@3666b66a21776d620f68d2f7ff2fed1bc18b94e5#egg=kafka_python
lazy-object-proxy==1.7.1
lz4tools==1.3.1.2
mccabe==0.7.0
mock==5.2.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-pylint==0.18.0
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
wrapt==1.16.0
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==2.11.7
- attrs==22.2.0
- coverage==6.2
- cramjam==2.5.0
- dill==0.3.4
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==5.10.1
- lazy-object-proxy==1.7.1
- lz4tools==1.3.1.2
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-pylint==0.18.0
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- wrapt==1.16.0
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_client.py::TestSimpleClient::test_ensure_topic_exists",
"test/test_client.py::TestSimpleClient::test_get_leader_exceptions_when_noleader",
"test/test_client.py::TestSimpleClient::test_get_leader_for_partitions_reloads_metadata",
"test/test_client.py::TestSimpleClient::test_get_leader_for_unassigned_partitions",
"test/test_client.py::TestSimpleClient::test_has_metadata_for_topic",
"test/test_client.py::TestSimpleClient::test_load_metadata",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_noleader",
"test/test_client.py::TestSimpleClient::test_send_produce_request_raises_when_topic_unknown",
"test/test_client_async.py::test_bootstrap_success",
"test/test_sender.py::test_produce_request[api_version0-2]",
"test/test_sender.py::test_produce_request[api_version1-1]",
"test/test_sender.py::test_produce_request[api_version2-0]"
]
| []
| [
"test/test_client.py::TestSimpleClient::test_correlation_rollover",
"test/test_client.py::TestSimpleClient::test_init_with_csv",
"test/test_client.py::TestSimpleClient::test_init_with_list",
"test/test_client.py::TestSimpleClient::test_init_with_unicode_csv",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request",
"test/test_client.py::TestSimpleClient::test_send_broker_unaware_request_fail",
"test/test_client_async.py::test_bootstrap_servers[None-expected_hosts0]",
"test/test_client_async.py::test_bootstrap_servers[foobar:1234-expected_hosts1]",
"test/test_client_async.py::test_bootstrap_servers[fizzbuzz-expected_hosts2]",
"test/test_client_async.py::test_bootstrap_servers[foo:12,bar:34-expected_hosts3]",
"test/test_client_async.py::test_bootstrap_servers[bootstrap4-expected_hosts4]",
"test/test_client_async.py::test_bootstrap_failure",
"test/test_client_async.py::test_can_connect",
"test/test_client_async.py::test_maybe_connect",
"test/test_client_async.py::test_conn_state_change",
"test/test_client_async.py::test_ready",
"test/test_client_async.py::test_is_ready",
"test/test_client_async.py::test_close",
"test/test_client_async.py::test_is_disconnected",
"test/test_client_async.py::test_send",
"test/test_client_async.py::test_poll",
"test/test_client_async.py::test__poll",
"test/test_client_async.py::test_in_flight_request_count",
"test/test_client_async.py::test_least_loaded_node",
"test/test_client_async.py::test_set_topics",
"test/test_client_async.py::test_maybe_refresh_metadata_ttl",
"test/test_client_async.py::test_maybe_refresh_metadata_backoff",
"test/test_client_async.py::test_maybe_refresh_metadata_in_progress",
"test/test_client_async.py::test_maybe_refresh_metadata_update",
"test/test_client_async.py::test_maybe_refresh_metadata_failure",
"test/test_client_async.py::test_schedule",
"test/test_client_async.py::test_unschedule"
]
| []
| Apache License 2.0 | 640 | [
"kafka/cluster.py",
"kafka/consumer/group.py",
"kafka/producer/buffer.py",
"kafka/metrics/stats/sensor.py",
"kafka/client_async.py",
"kafka/producer/kafka.py",
"kafka/protocol/types.py",
"load_example.py",
"kafka/client.py",
"benchmarks/consumer_performance.py",
"kafka/consumer/fetcher.py",
"kafka/producer/record_accumulator.py",
"kafka/producer/sender.py",
"benchmarks/producer_performance.py",
"kafka/structs.py",
"kafka/protocol/metadata.py"
]
| [
"kafka/cluster.py",
"kafka/consumer/group.py",
"kafka/producer/buffer.py",
"kafka/metrics/stats/sensor.py",
"kafka/client_async.py",
"kafka/producer/kafka.py",
"kafka/protocol/types.py",
"kafka/consumer/fetcher.py",
"kafka/client.py",
"benchmarks/consumer_performance.py",
"kafka/producer/record_accumulator.py",
"benchmarks/load_example.py",
"kafka/producer/sender.py",
"benchmarks/producer_performance.py",
"kafka/structs.py",
"kafka/protocol/metadata.py"
]
|
|
dpkp__kafka-python-766 | 506d023978e7273bd323c0750e3f77af259d257b | 2016-07-17 06:19:25 | 709ee3b59aff8ab205f0e09c33f4ec8391664228 | diff --git a/kafka/consumer/group.py b/kafka/consumer/group.py
index 9ebfe02..db0022d 100644
--- a/kafka/consumer/group.py
+++ b/kafka/consumer/group.py
@@ -176,6 +176,10 @@ class KafkaConsumer(six.Iterator):
selector (selectors.BaseSelector): Provide a specific selector
implementation to use for I/O multiplexing.
Default: selectors.DefaultSelector
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to True
+ the only way to receive records from an internal topic is
+ subscribing to it. Requires 0.10+ Default: True
Note:
Configuration parameters are described in more detail at
@@ -222,6 +226,7 @@ class KafkaConsumer(six.Iterator):
'metrics_num_samples': 2,
'metrics_sample_window_ms': 30000,
'selector': selectors.DefaultSelector,
+ 'exclude_internal_topics': True,
}
def __init__(self, *topics, **configs):
diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py
index 083a36a..a18329c 100644
--- a/kafka/coordinator/consumer.py
+++ b/kafka/coordinator/consumer.py
@@ -36,6 +36,7 @@ class ConsumerCoordinator(BaseCoordinator):
'heartbeat_interval_ms': 3000,
'retry_backoff_ms': 100,
'api_version': (0, 9),
+ 'exclude_internal_topics': True,
}
def __init__(self, client, subscription, metrics, metric_group_prefix,
@@ -70,6 +71,10 @@ class ConsumerCoordinator(BaseCoordinator):
using Kafka's group managementment facilities. Default: 30000
retry_backoff_ms (int): Milliseconds to backoff when retrying on
errors. Default: 100.
+ exclude_internal_topics (bool): Whether records from internal topics
+ (such as offsets) should be exposed to the consumer. If set to
+ True the only way to receive records from an internal topic is
+ subscribing to it. Requires 0.10+. Default: True
"""
super(ConsumerCoordinator, self).__init__(client, **configs)
self.config = copy.copy(self.DEFAULT_CONFIG)
@@ -81,7 +86,8 @@ class ConsumerCoordinator(BaseCoordinator):
assert self.config['assignors'], 'Coordinator requires assignors'
self._subscription = subscription
- self._partitions_per_topic = {}
+ self._metadata_snapshot = {}
+ self._assignment_snapshot = None
self._cluster = client.cluster
self._cluster.request_update()
self._cluster.add_listener(WeakMethod(self._handle_metadata_update))
@@ -131,13 +137,12 @@ class ConsumerCoordinator(BaseCoordinator):
def _handle_metadata_update(self, cluster):
# if we encounter any unauthorized topics, raise an exception
- # TODO
- #if self._cluster.unauthorized_topics:
- # raise TopicAuthorizationError(self._cluster.unauthorized_topics)
+ if cluster.unauthorized_topics:
+ raise Errors.TopicAuthorizationFailedError(cluster.unauthorized_topics)
if self._subscription.subscribed_pattern:
topics = []
- for topic in cluster.topics():
+ for topic in cluster.topics(self.config['exclude_internal_topics']):
if self._subscription.subscribed_pattern.match(topic):
topics.append(topic)
@@ -146,7 +151,7 @@ class ConsumerCoordinator(BaseCoordinator):
# check if there are any changes to the metadata which should trigger
# a rebalance
- if self._subscription_metadata_changed():
+ if self._subscription_metadata_changed(cluster):
if (self.config['api_version'] >= (0, 9)
and self.config['group_id'] is not None):
@@ -159,20 +164,20 @@ class ConsumerCoordinator(BaseCoordinator):
self._subscription.assign_from_subscribed([
TopicPartition(topic, partition)
for topic in self._subscription.subscription
- for partition in self._partitions_per_topic[topic]
+ for partition in self._metadata_snapshot[topic]
])
- def _subscription_metadata_changed(self):
+ def _subscription_metadata_changed(self, cluster):
if not self._subscription.partitions_auto_assigned():
return False
- old_partitions_per_topic = self._partitions_per_topic
- self._partitions_per_topic = {}
+ metadata_snapshot = {}
for topic in self._subscription.group_subscription():
- partitions = self._cluster.partitions_for_topic(topic) or []
- self._partitions_per_topic[topic] = set(partitions)
+ partitions = cluster.partitions_for_topic(topic) or []
+ metadata_snapshot[topic] = set(partitions)
- if self._partitions_per_topic != old_partitions_per_topic:
+ if self._metadata_snapshot != metadata_snapshot:
+ self._metadata_snapshot = metadata_snapshot
return True
return False
@@ -184,8 +189,15 @@ class ConsumerCoordinator(BaseCoordinator):
def _on_join_complete(self, generation, member_id, protocol,
member_assignment_bytes):
+ # if we were the assignor, then we need to make sure that there have
+ # been no metadata updates since the rebalance begin. Otherwise, we
+ # won't rebalance again until the next metadata change
+ if self._assignment_snapshot and self._assignment_snapshot != self._metadata_snapshot:
+ self._subscription.mark_for_reassignment()
+ return
+
assignor = self._lookup_assignor(protocol)
- assert assignor, 'invalid assignment protocol: %s' % protocol
+ assert assignor, 'Coordinator selected invalid assignment protocol: %s' % protocol
assignment = ConsumerProtocol.ASSIGNMENT.decode(member_assignment_bytes)
@@ -235,6 +247,11 @@ class ConsumerCoordinator(BaseCoordinator):
self._subscription.group_subscribe(all_subscribed_topics)
self._client.set_topics(self._subscription.group_subscription())
+ # keep track of the metadata used for assignment so that we can check
+ # after rebalance completion whether anything has changed
+ self._cluster.request_update()
+ self._assignment_snapshot = self._metadata_snapshot
+
log.debug("Performing assignment for group %s using strategy %s"
" with subscriptions %s", self.group_id, assignor.name,
member_metadata)
@@ -264,6 +281,7 @@ class ConsumerCoordinator(BaseCoordinator):
" for group %s failed on_partitions_revoked",
self._subscription.listener, self.group_id)
+ self._assignment_snapshot = None
self._subscription.mark_for_reassignment()
def need_rejoin(self):
| KAFKA-3117: handle metadata updates during consumer rebalance | dpkp/kafka-python | diff --git a/test/test_coordinator.py b/test/test_coordinator.py
index 3435292..280fa70 100644
--- a/test/test_coordinator.py
+++ b/test/test_coordinator.py
@@ -85,7 +85,7 @@ def test_pattern_subscription(coordinator, api_version):
coordinator.config['api_version'] = api_version
coordinator._subscription.subscribe(pattern='foo')
assert coordinator._subscription.subscription == set([])
- assert coordinator._subscription_metadata_changed() is False
+ assert coordinator._subscription_metadata_changed({}) is False
assert coordinator._subscription.needs_partition_assignment is False
cluster = coordinator._client.cluster
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
cramjam==2.5.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/dpkp/kafka-python.git@506d023978e7273bd323c0750e3f77af259d257b#egg=kafka_python
lz4tools==1.3.1.2
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
six==1.17.0
termcolor==1.1.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
xxhash==3.2.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- cramjam==2.5.0
- lz4tools==1.3.1.2
- mock==5.2.0
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- six==1.17.0
- termcolor==1.1.0
- tomli==1.2.3
- xxhash==3.2.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_coordinator.py::test_pattern_subscription[api_version0]",
"test/test_coordinator.py::test_pattern_subscription[api_version1]",
"test/test_coordinator.py::test_pattern_subscription[api_version2]",
"test/test_coordinator.py::test_pattern_subscription[api_version3]"
]
| []
| [
"test/test_coordinator.py::test_init",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]",
"test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]",
"test/test_coordinator.py::test_protocol_type",
"test/test_coordinator.py::test_group_protocols",
"test/test_coordinator.py::test_lookup_assignor",
"test/test_coordinator.py::test_join_complete",
"test/test_coordinator.py::test_subscription_listener",
"test/test_coordinator.py::test_subscription_listener_failure",
"test/test_coordinator.py::test_perform_assignment",
"test/test_coordinator.py::test_on_join_prepare",
"test/test_coordinator.py::test_need_rejoin",
"test/test_coordinator.py::test_refresh_committed_offsets_if_needed",
"test/test_coordinator.py::test_fetch_committed_offsets",
"test/test_coordinator.py::test_close",
"test/test_coordinator.py::test_commit_offsets_async",
"test/test_coordinator.py::test_commit_offsets_sync",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-False-None-False-False-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-error6-True-True-True-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-foobar-True-error7-True-True-False-True]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version8-foobar-True-None-True-True-False-False]",
"test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version9-None-True-None-False-False-True-False]",
"test/test_coordinator.py::test_send_offset_commit_request_fail",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]",
"test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]",
"test/test_coordinator.py::test_send_offset_commit_request_failure",
"test/test_coordinator.py::test_send_offset_commit_request_success",
"test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response7-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response8-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response9-CommitFailedError-False-True]",
"test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False-False]",
"test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_send_offset_fetch_request_fail",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]",
"test/test_coordinator.py::test_send_offset_fetch_request_failure",
"test/test_coordinator.py::test_send_offset_fetch_request_success",
"test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False-True]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False-False]",
"test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False-False]",
"test/test_coordinator.py::test_heartbeat"
]
| []
| Apache License 2.0 | 641 | [
"kafka/consumer/group.py",
"kafka/coordinator/consumer.py"
]
| [
"kafka/consumer/group.py",
"kafka/coordinator/consumer.py"
]
|
|
projectmesa__mesa-285 | 558b052b46c447eb1ee16da06ffb4603ec24178e | 2016-07-17 15:56:07 | 6db9efde7c659b9338fc8cf551f066cdba7031c3 | diff --git a/examples/Basic/Readme.md b/examples/Basic/Readme.md
new file mode 100644
index 00000000..d4ac4b96
--- /dev/null
+++ b/examples/Basic/Readme.md
@@ -0,0 +1,41 @@
+# Basic Grid with two agents
+
+## Summary
+
+A very basic example model to showcase the visulaization on web browser.
+
+A simple grid is dispalyed on browesr with two agents. The example does not
+have any agent motion involved. This example does not have any movenment of
+agents so as to keep it to the simplest of level possible.
+
+This model showcases following features:
+
+* A rectangular grid
+* Text Overlay on the agent's shape on CanvasGrid
+* ArrowHead shaped agent for displaying heading of the agent on CanvasGrid
+
+## Installation
+
+To install the dependencies use pip and the requirements.txt in this directory.
+e.g.
+
+```
+ $ pip install -r requirements.txt
+```
+
+## How to Run
+
+To run the model interactively, run ``run.py`` in this directory. e.g.
+
+```
+ $ python shapes_viz.py
+```
+
+Then open your browser to [http://127.0.0.1:8888/](http://127.0.0.1:8888/) and
+press Reset, then Run.
+
+## Files
+
+* ``basic/model.py: Defines the Basic model and agents.
+* ``basic/server.py``: Sets up the interactive visualization server.
+* ``run.py``: Launches a model visualization server.
diff --git a/examples/Basic/basic/model.py b/examples/Basic/basic/model.py
new file mode 100644
index 00000000..db832772
--- /dev/null
+++ b/examples/Basic/basic/model.py
@@ -0,0 +1,44 @@
+import random
+
+from mesa import Model, Agent
+from mesa.space import SingleGrid
+from mesa.time import RandomActivation
+
+
+class Walker(Agent):
+ def __init__(self, unique_id, pos, heading=(1, 0)):
+ self.unique_id = unique_id
+ self.pos = pos
+ self.heading = heading
+ self.headings = {(1, 0), (0, 1), (-1, 0), (0, -1)}
+
+
+class ShapesModel(Model):
+ def __init__(self, N, width=20, height=10):
+ self.running = True
+ self.N = N # num of agents
+ self.headings = ((1, 0), (0, 1), (-1, 0), (0, -1)) # tuples are fast
+ self.grid = SingleGrid(width, height, torus=False)
+ self.schedule = RandomActivation(self)
+ self.make_walker_agents()
+
+ def make_walker_agents(self):
+ unique_id = 0
+ while True:
+ if unique_id == self.N:
+ break
+ x = random.randrange(self.grid.width)
+ y = random.randrange(self.grid.height)
+ pos = (x, y)
+ heading = random.choice(self.headings)
+ # heading = (1, 0)
+ if self.grid.is_cell_empty(pos):
+ print("Creating agent {2} at ({0}, {1})"
+ .format(x, y, unique_id))
+ a = Walker(unique_id, pos, heading)
+ self.schedule.add(a)
+ self.grid.place_agent(a, pos)
+ unique_id += 1
+
+ def step(self):
+ self.schedule.step()
diff --git a/examples/Basic/basic/server.py b/examples/Basic/basic/server.py
new file mode 100644
index 00000000..11df9f62
--- /dev/null
+++ b/examples/Basic/basic/server.py
@@ -0,0 +1,45 @@
+import random
+
+from basic.model import Walker, ShapesModel
+from mesa.visualization.modules import CanvasGrid
+from mesa.visualization.ModularVisualization import ModularServer
+
+
+def agent_draw(agent):
+ portrayal = None
+ if agent is None:
+ # Actually this if part is unnecessary, but still keeping it for
+ # aesthetics
+ pass
+ elif isinstance(agent, Walker):
+ print("Uid: {0}, Heading: {1}".format(agent.unique_id, agent.heading))
+ portrayal = {"Shape": "arrowHead",
+ "Filled": "true",
+ "Layer": 2,
+ "Color": "green",
+ "Filled": "true",
+ "heading0": agent.heading[0],
+ "heading1": agent.heading[1],
+ "text": agent.unique_id,
+ "text_color": "white",
+ "scale": 0.8,
+ }
+ return portrayal
+
+
+def launch_basic():
+ width = 15
+ height = 10
+ num_agents = 2
+ pixel_ratio = 50
+ grid = CanvasGrid(agent_draw, width, height,
+ width*pixel_ratio, height*pixel_ratio)
+ server = ModularServer(ShapesModel, [grid], "Basic Example",
+ num_agents, width, height)
+ server.max_steps = 0
+ server.port = 8888
+ server.launch()
+
+if __name__ == "__main__":
+ random.seed(3)
+ launch_basic()
diff --git a/examples/Basic/requirements.txt b/examples/Basic/requirements.txt
new file mode 100644
index 00000000..da0b5b95
--- /dev/null
+++ b/examples/Basic/requirements.txt
@@ -0,0 +1,1 @@
+mesa
diff --git a/examples/Basic/run.py b/examples/Basic/run.py
new file mode 100644
index 00000000..23edc675
--- /dev/null
+++ b/examples/Basic/run.py
@@ -0,0 +1,3 @@
+from basic.server import launch_basic
+
+launch_basic()
diff --git a/examples/Readme.md b/examples/Readme.md
index 91399dc1..d7a8e056 100644
--- a/examples/Readme.md
+++ b/examples/Readme.md
@@ -2,7 +2,6 @@
This directory contains example models meant to test and demonstrate Mesa's features, and provide demonstrations for how to build and analyze agent-based models. For more information on each model, see its own Readme and documentation.
-
### Color Patches
A cellular automaton model where agents opinions are influenced by that of their neighbors. As the model evolves, color patches representing the prevailing opinion in a given area expand, contract, and sometimes disappear.
@@ -30,3 +29,6 @@ Completed code to go along with the [tutorial]() on making a simple model of how
### WolfSheep
Implementation of an ecological model of predation and reproduction, based on the NetLogo [Wolf Sheep Predation model](http://ccl.northwestern.edu/netlogo/models/WolfSheepPredation).
+
+### Basic
+Example of grid display and direction showing agents in the form of arrow-head shape.
diff --git a/mesa/__init__.py b/mesa/__init__.py
index 7d35c10d..d370701e 100644
--- a/mesa/__init__.py
+++ b/mesa/__init__.py
@@ -5,57 +5,14 @@ Mesa Agent-Based Modeling Framework
Core Objects: Model, and Agent.
"""
-import datetime as dt
-import random
+
+from .model import Model
+from .agent import Agent
+
+
+__all__ = ["Model", "Agent"]
__title__ = 'mesa'
__version__ = '0.7.5'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Project Mesa Team'
-
-
-class Model:
- """ Base class for models. """
- def __init__(self, seed=None):
- """ Create a new model. Overload this method with the actual code to
- start the model.
-
- Args:
- seed: seed for the random number generator
-
- Attributes:
- schedule: schedule object
- running: a bool indicating if the model should continue running
-
- """
- if seed is None:
- self.seed = dt.datetime.now()
- else:
- self.seed = seed
- random.seed(seed)
- self.running = True
- self.schedule = None
-
- def run_model(self):
- """ Run the model until the end condition is reached. Overload as
- needed.
-
- """
- while self.running:
- self.step()
-
- def step(self):
- """ A single step. Fill in here. """
- pass
-
-
-class Agent:
- """ Base class for a model agent. """
- def __init__(self, unique_id, model):
- """ Create a new agent. """
- self.unique_id = unique_id
- self.model = model
-
- def step(self, model):
- """ A single step of the agent. """
- pass
diff --git a/mesa/agent.py b/mesa/agent.py
new file mode 100644
index 00000000..caa872be
--- /dev/null
+++ b/mesa/agent.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+"""
+The agent class for Mesa framework.
+
+Core Objects: Agent
+
+"""
+
+
+class Agent:
+ """ Base class for a model agent. """
+ def __init__(self, unique_id, model):
+ """ Create a new agent. """
+ self.unique_id = unique_id
+ self.model = model
+
+ def step(self, model):
+ """ A single step of the agent. """
+ pass
diff --git a/mesa/model.py b/mesa/model.py
new file mode 100644
index 00000000..6e4d4241
--- /dev/null
+++ b/mesa/model.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+"""
+The model class for Mesa framework.
+
+Core Objects: Model
+
+"""
+import datetime as dt
+import random
+
+
+class Model:
+ """ Base class for models. """
+ def __init__(self, seed=None):
+ """ Create a new model. Overload this method with the actual code to
+ start the model.
+
+ Args:
+ seed: seed for the random number generator
+
+ Attributes:
+ schedule: schedule object
+ running: a bool indicating if the model should continue running
+
+ """
+ if seed is None:
+ self.seed = dt.datetime.now()
+ else:
+ self.seed = seed
+ random.seed(seed)
+ self.running = True
+ self.schedule = None
+
+ def run_model(self):
+ """ Run the model until the end condition is reached. Overload as
+ needed.
+
+ """
+ while self.running:
+ self.step()
+
+ def step(self):
+ """ A single step. Fill in here. """
+ pass
diff --git a/mesa/space.py b/mesa/space.py
index 4695b953..4e96a21b 100644
--- a/mesa/space.py
+++ b/mesa/space.py
@@ -41,9 +41,9 @@ def accept_tuple_argument(wrapped_function):
class Grid:
""" Base class for a square grid.
- Grid cells are indexed by [y][x], where [0][0] is assumed to be -- top-left
- and [height-1][width-1] is the bottom-right. If a grid is toroidal, the top
- and bottom, and left and right, edges wrap to each other
+ Grid cells are indexed by [x][y], where [0][0] is assumed to be the
+ bottom-left and [width-1][height-1] is the top-right. If a grid is
+ toroidal, the top and bottom, and left and right, edges wrap to each other
Properties:
width, height: The grid's width and height.
@@ -59,21 +59,24 @@ class Grid:
coord_iter: Returns coordinates as well as cell contents.
place_agent: Positions an agent on the grid, and set its pos variable.
move_agent: Moves an agent from its current position to a new position.
- iter_neighborhood: Returns an iterator over cell coordinates that are in the
- neighborhood of a certain point.
+ iter_neighborhood: Returns an iterator over cell coordinates that are
+ in the neighborhood of a certain point.
torus_adj: Converts coordinate, handles torus looping.
- out_of_bounds: Determines whether position is off the grid, returns the out of bounds coordinate.
- iter_cell_list_contents: Returns an iterator of the contents of the cells identified in cell_list.
- get_cell_list_contents: Returns a list of the contents of the cells identified in cell_list.
+ out_of_bounds: Determines whether position is off the grid, returns
+ the out of bounds coordinate.
+ iter_cell_list_contents: Returns an iterator of the contents of the
+ cells identified in cell_list.
+ get_cell_list_contents: Returns a list of the contents of the cells
+ identified in cell_list.
remove_agent: Removes an agent from the grid.
is_cell_empty: Returns a bool of the contents of a cell.
"""
- def __init__(self, height, width, torus):
+ def __init__(self, width, height, torus):
""" Create a new grid.
Args:
- height, width: The height and width of the grid
+ width, height: The width and height of the grid
torus: Boolean whether the grid wraps or not.
"""
@@ -83,11 +86,11 @@ class Grid:
self.grid = []
- for y in range(self.height):
- row = []
- for x in range(self.width):
- row.append(self.default_val())
- self.grid.append(row)
+ for x in range(self.width):
+ col = []
+ for y in range(self.height):
+ col.append(self.default_val())
+ self.grid.append(col)
@staticmethod
def default_val():
@@ -104,9 +107,9 @@ class Grid:
def coord_iter(self):
""" An iterator that returns coordinates as well as cell contents. """
- for row in range(self.height):
- for col in range(self.width):
- yield self.grid[row][col], col, row # agent, x, y
+ for row in range(self.width):
+ for col in range(self.height):
+ yield self.grid[row][col], row, col # agent, x, y
def neighbor_iter(self, pos, moore=True):
""" Iterate over position neighbors.
@@ -136,10 +139,10 @@ class Grid:
radius: radius, in cells, of neighborhood to get.
Returns:
- A list of coordinate tuples representing the neighborhood;
- With radius 1, at most 9 if
- Moore, 5 if Von Neumann
- (8 and 4 if not including the center).
+ A list of coordinate tuples representing the neighborhood. For
+ example with radius 1, it will return list with number of elements
+ equals at most 9 (8) if Moore, 5 (4) if Von Neumann (if not
+ including the center).
"""
x, y = pos
@@ -156,7 +159,7 @@ class Grid:
continue
# Skip if not a torus and new coords out of bounds.
if not self.torus and (not (0 <= dx + x < self.width) or
- not (0 <= dy + y < self.height)):
+ not (0 <= dy + y < self.height)):
continue
px = self.torus_adj(x + dx, self.width)
@@ -250,7 +253,10 @@ class Grid:
return coord
def out_of_bounds(self, pos):
- """ Determines whether position is off the grid, returns the out of bounds coordinate."""
+ """
+ Determines whether position is off the grid, returns the out of
+ bounds coordinate.
+ """
x, y = pos
return x < 0 or x >= self.width or y < 0 or y >= self.height
@@ -265,7 +271,7 @@ class Grid:
"""
return (
- self[y][x] for x, y in cell_list if not self.is_cell_empty((x, y)))
+ self[x][y] for x, y in cell_list if not self.is_cell_empty((x, y)))
@accept_tuple_argument
def get_cell_list_contents(self, cell_list):
@@ -301,32 +307,32 @@ class Grid:
def _place_agent(self, pos, agent):
""" Place the agent at the correct location. """
x, y = pos
- self.grid[y][x] = agent
+ self.grid[x][y] = agent
def _remove_agent(self, pos, agent):
""" Remove the agent from the given location. """
x, y = pos
- self.grid[y][x] = None
+ self.grid[x][y] = None
def is_cell_empty(self, pos):
""" Returns a bool of the contents of a cell. """
x, y = pos
- return True if self.grid[y][x] == self.default_val() else False
+ return True if self.grid[x][y] == self.default_val() else False
class SingleGrid(Grid):
""" Grid where each cell contains exactly at most one object. """
empties = []
- def __init__(self, height, width, torus):
+ def __init__(self, width, height, torus):
""" Create a new single-item grid.
Args:
- height, width: The height and width of the grid
+ width, height: The width and width of the grid
torus: Boolean whether the grid wraps or not.
"""
- super().__init__(height, width, torus)
+ super().__init__(width, height, torus)
# Add all cells to the empties list.
self.empties = list(itertools.product(
*(range(self.width), range(self.height))))
@@ -388,9 +394,9 @@ class SingleGrid(Grid):
class MultiGrid(Grid):
""" Grid where each cell can contain more than one object.
- Grid cells are indexed by [y][x], where [0][0] is assumed to be -- top-left
- and [height-1][width-1] is the bottom-right. If a grid is toroidal, the top
- and bottom, and left and right, edges wrap to each other.
+ Grid cells are indexed by [x][y], where [0][0] is assumed to be at
+ bottom-left and [width-1][height-1] is the top-right. If a grid is
+ toroidal, the top and bottom, and left and right, edges wrap to each other.
Each grid cell holds a set object.
@@ -412,12 +418,12 @@ class MultiGrid(Grid):
def _place_agent(self, pos, agent):
""" Place the agent at the correct location. """
x, y = pos
- self.grid[y][x].add(agent)
+ self.grid[x][y].add(agent)
def _remove_agent(self, pos, agent):
""" Remove the agent from the given location. """
x, y = pos
- self.grid[y][x].remove(agent)
+ self.grid[x][y].remove(agent)
@accept_tuple_argument
def iter_cell_list_contents(self, cell_list):
@@ -430,7 +436,7 @@ class MultiGrid(Grid):
"""
return itertools.chain.from_iterable(
- self[y][x] for x, y in cell_list if not self.is_cell_empty((x, y)))
+ self[x][y] for x, y in cell_list if not self.is_cell_empty((x, y)))
class ContinuousSpace:
@@ -471,7 +477,7 @@ class ContinuousSpace:
self.cell_width = (self.x_max - self.x_min) / grid_width
self.cell_height = (self.y_max - self.y_min) / grid_height
- self._grid = MultiGrid(grid_height, grid_width, torus)
+ self._grid = MultiGrid(grid_width, grid_height, torus)
def place_agent(self, agent, pos):
""" Place a new agent in the space.
@@ -525,7 +531,7 @@ class ContinuousSpace:
cell_radius = math.ceil(radius / scale)
cell_pos = self._point_to_cell(pos)
possible_objs = self._grid.get_neighbors(cell_pos,
- True, True, cell_radius)
+ True, True, cell_radius)
neighbors = []
# Iterate over candidates and check actual distance.
for obj in possible_objs:
diff --git a/mesa/time.py b/mesa/time.py
index ab510fae..23c354b8 100644
--- a/mesa/time.py
+++ b/mesa/time.py
@@ -142,7 +142,7 @@ class StagedActivation(BaseScheduler):
stage_time = 1
def __init__(self, model, stage_list=["step"], shuffle=False,
- shuffle_between_stages=False):
+ shuffle_between_stages=False):
""" Create an empty Staged Activation schedule.
Args:
diff --git a/mesa/visualization/ModularVisualization.md b/mesa/visualization/ModularVisualization.md
index 23f1ebe9..f0623ba6 100644
--- a/mesa/visualization/ModularVisualization.md
+++ b/mesa/visualization/ModularVisualization.md
@@ -28,15 +28,21 @@ Mesa already comes with some pre-built modules. Using the built-ins allow you to
One built-in module is **CanvasGrid**, which you can use to visualize objects located on grid cells. The CanvasGrid will cover a majority of agent-based models, particularly the simpler ones.
CanvasGrid iterates over every object in every cell of your model's grid (it assumes that your model has a grid named **grid**) and converts it into a dictionary which defines how it will be drawn. It does this via a **portrayal_method**: a function which the user defines, which takes an object as an input and outputs a dictionary with the following keys:
- "Shape": Can be either "circle" or "rect"
+ "Shape": Can be "circle", "rect" or "arrowHead"
For Circles:
"r": The radius, defined as a fraction of cell size. r=1 will fill the entire cell.
For rectangles:
"w", "h": The width and height of the rectangle, which are in fractions of cell width and height.
+ For arrowHead:
+ "scale": Proportion scaling as a fraction of cell size. shape=0.5 will fit the arrowHead into half of the cell.
+ "heading0": represents x direction unit vector.
+ "heading1": represents y direction unit vector.
"Color": The color to draw the shape in; needs to be a valid HTML color, e.g."Red" or "#AA08F8"
"Filled": either "true" or "false", and determines whether the shape is filled or not.
"Layer": Layer number of 0 or above; higher-numbered layers are drawn above lower-numbered layers.
- (Shapes also have "x" and "x" coordinates, for the x and y of the grid cell in which it is, but CanvasGrid adds those automatically).
+ "text": Text to overlay on top of the shape. Normally, agent's unique_id is used .
+ "text_color": Color of the text overlay.
+ (Shapes also have "x" and "y" coordinates, for the x and y of the grid cell in which it is, but CanvasGrid adds those automatically).
For example, suppose for a Schelling model, we want to draw all agents as circles; red ones for the majority (agent type=0), and blue ones for the minority (agent type=1). The function to do this might look like this:
@@ -150,6 +156,3 @@ Data to visualize arrive over the websocket as a list. For each index of the lis
Currently, module JavaScript files live in the *mesa/visualization/templates* directory, and the Python files live in *mesa/visualization/modules*.
When creating a new module, the Python and JavaScript code need to be written in synch: the module Python-side **render** method needs to output data in the exact same format that the JavaScript **render** function receives as an input.
-
-
-
diff --git a/mesa/visualization/modules/CanvasGridVisualization.py b/mesa/visualization/modules/CanvasGridVisualization.py
index 450c3b39..5f0a70fc 100644
--- a/mesa/visualization/modules/CanvasGridVisualization.py
+++ b/mesa/visualization/modules/CanvasGridVisualization.py
@@ -52,26 +52,26 @@ class CanvasGrid(VisualizationElement):
"""
package_includes = ["GridDraw.js", "CanvasModule.js"]
portrayal_method = None # Portrayal function
- canvas_height = 500
canvas_width = 500
+ canvas_height = 500
- def __init__(self, portrayal_method, grid_height, grid_width,
- canvas_height=500, canvas_width=500):
+ def __init__(self, portrayal_method, grid_width, grid_height,
+ canvas_width=500, canvas_height=500):
""" Instantiate a new CanvasGrid.
Args:
portrayal_method: function to convert each object on the grid to
a portrayal, as described above.
- grid_height, grid_width: Size of the grid, in cells.
+ grid_width, grid_height: Size of the grid, in cells.
canvas_height, canvas_width: Size of the canvas to draw in the
client, in pixels. (default: 500x500)
"""
self.portrayal_method = portrayal_method
- self.grid_height = grid_height
self.grid_width = grid_width
- self.canvas_height = canvas_height
+ self.grid_height = grid_height
self.canvas_width = canvas_width
+ self.canvas_height = canvas_height
new_element = ("new CanvasModule({}, {}, {}, {})"
.format(self.canvas_width, self.canvas_height,
@@ -81,8 +81,8 @@ class CanvasGrid(VisualizationElement):
def render(self, model):
grid_state = defaultdict(list)
- for y in range(model.grid.height):
- for x in range(model.grid.width):
+ for x in range(model.grid.width):
+ for y in range(model.grid.height):
cell_objects = model.grid.get_cell_list_contents([(x, y)])
for obj in cell_objects:
portrayal = self.portrayal_method(obj)
diff --git a/mesa/visualization/templates/CanvasModule.js b/mesa/visualization/templates/CanvasModule.js
index 43e537b8..4861c3e3 100644
--- a/mesa/visualization/templates/CanvasModule.js
+++ b/mesa/visualization/templates/CanvasModule.js
@@ -11,7 +11,7 @@ var CanvasModule = function(canvas_width, canvas_height, grid_width, grid_height
// Create the context and the drawing controller:
var context = canvas.getContext("2d");
- var canvasDraw = new GridVisualization(canvas_width, canvas_height, grid_height, grid_width, context);
+ var canvasDraw = new GridVisualization(canvas_width, canvas_height, grid_width, grid_height, context);
this.render = function(data) {
canvasDraw.resetCanvas();
@@ -24,4 +24,4 @@ var CanvasModule = function(canvas_width, canvas_height, grid_width, grid_height
canvasDraw.resetCanvas();
};
-};
\ No newline at end of file
+};
diff --git a/mesa/visualization/templates/GridDraw.js b/mesa/visualization/templates/GridDraw.js
index a195c6b5..3ffd5cd3 100644
--- a/mesa/visualization/templates/GridDraw.js
+++ b/mesa/visualization/templates/GridDraw.js
@@ -1,77 +1,82 @@
-/*
+/**
Mesa Canvas Grid Visualization
====================================================================
-This is JavaScript code to visualize a Mesa Grid or MultiGrid state using the
+This is JavaScript code to visualize a Mesa Grid or MultiGrid state using the
HTML5 Canvas. Here's how it works:
On the server side, the model developer will have assigned a portrayal to each
agent type. The visualization then loops through the grid, for each object adds
-a JSON object to an inner list (keyed on layer) of lists to be sent to the browser.
+a JSON object to an inner list (keyed on layer) of lists to be sent to the
+browser.
-Each JSON object to be drawn contains the following fields: Shape (currently
-only rectanges and circles are supported), x, y, Color, Filled (boolean), Layer;
-circles also get a Radius, while rectangles get x and y sizes. The latter values
-are all between [0, 1] and get scaled to the grid cell.
+Each JSON object to be drawn contains the following fields: Shape (currently
+only rectanges and circles are supported), x, y, Color, Filled (boolean),
+Layer; circles also get a Radius, while rectangles get x and y sizes. The
+latter values are all between [0, 1] and get scaled to the grid cell.
-The browser (this code, in fact) then iteratively draws them in, one layer at a time. Thus, it
-should be possible to turn different layers on and off.
+The browser (this code, in fact) then iteratively draws them in, one layer at a
+time. Thus, it should be possible to turn different layers on and off.
-Here's a sample input, for a 2x2 grid with one layer being cell colors and the other agent
-locations, represented by circles:
+Here's a sample input, for a 2x2 grid with one layer being cell colors and the
+other agent locations, represented by circles:
{"Shape": "rect", "x": 0, "y": 0, "Color": "#00aa00", "Filled": "true", "Layer": 0}
-{0:[{"Shape": "rect", "x": 0, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
- {"Shape": "rect", "x": 0, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
- {"Shape": "rect", "x": 0, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
- {"Shape": "rect", "x": 0, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0}
+{0:[
+ {"Shape": "rect", "x": 0, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
+ {"Shape": "rect", "x": 0, "y": 1, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
+ {"Shape": "rect", "x": 1, "y": 0, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0},
+ {"Shape": "rect", "x": 1, "y": 1, "w": 1, "h": 1, "Color": "#00aa00", "Filled": "true", "Layer": 0}
],
- 1: [
+ 1:[
{"Shape": "circle", "x": 0, "y": 0, "r": 0.5, "Color": "#AAAAAA", "Filled": "true", "Layer": 1, "text": 'A', "text_color": "white"},
{"Shape": "circle", "x": 1, "y": 1, "r": 0.5, "Color": "#AAAAAA", "Filled": "true", "Layer": 1, "text": 'B', "text_color": "white"}
+ {"Shape": "arrowHead", "x": 1, "y": 0, "heading0": -1, heading1: 0, "scale": 0.5, "Color": "green", "Filled": "true", "Layer": 1, "text": 'C', "text_color": "white"}
]
}
*/
-var GridVisualization = function(height, width, gridHeight, gridWidth, context) {
- var height = height;
+var GridVisualization = function(width, height, gridWidth, gridHeight, context) {
var width = width;
- var gridHeight = gridHeight;
+ var height = height;
var gridWidth = gridWidth;
+ var gridHeight = gridHeight;
var context = context;
// Find cell size:
- var cellHeight = Math.floor(height / gridHeight);
var cellWidth = Math.floor(width / gridWidth);
+ var cellHeight = Math.floor(height / gridHeight);
+ // Find max radius of the circle that can be inscribed (fit) into the
+ // cell of the grid.
var maxR = Math.min(cellHeight, cellWidth)/2 - 1;
-
- this.drawLayer = function(portrayalLayer) {
+ // Calls the appropriate shape(agent)
+ this.drawLayer = function(portrayalLayer) {
for (var i in portrayalLayer) {
var p = portrayalLayer[i];
if (p.Shape == "rect")
- this.drawRectange(p.x, p.y, p.w, p.h, p.Color, p.Filled, p.text, p.text_color);
- if (p.Shape == "circle")
+ this.drawRectangle(p.x, p.y, p.w, p.h, p.Color, p.Filled, p.text, p.text_color);
+ else if (p.Shape == "circle")
this.drawCircle(p.x, p.y, p.r, p.Color, p.Filled, p.text, p.text_color);
+ else if (p.Shape == "arrowHead")
+ this.drawArrowHead(p.x, p.y, p.heading0, p.heading1, p.scale, p.Color, p.Filled, p.text, p.text_color);
}
};
+ // DRAWING METHODS
+ // =====================================================================
-
- // DRAWING METHODS
- // =====================================================================
-
/**
- Draw a circle in the specified grid cell.
- x, y: Grid coords
- r: Radius, as a multiple of cell size
- color: Code for the fill color
- fill: Boolean for whether or not to fill the circle.
- text: Inscribed text in rectangle.
- text_color: Color of the inscribed text.
+ Draw a circle in the specified grid cell.
+ x, y: Grid coords
+ r: Radius, as a multiple of cell size
+ color: Code for the fill color
+ fill: Boolean for whether or not to fill the circle.
+ text: Inscribed text in rectangle.
+ text_color: Color of the inscribed text.
*/
this.drawCircle = function(x, y, radius, color, fill, text, text_color) {
var cx = (x + 0.5) * cellWidth;
@@ -89,7 +94,7 @@ var GridVisualization = function(height, width, gridHeight, gridWidth, context)
context.fillStyle = color;
context.fill();
}
-
+
// This part draws the text inside the Circle
if (text !== undefined) {
context.fillStyle = text_color;
@@ -101,15 +106,16 @@ var GridVisualization = function(height, width, gridHeight, gridWidth, context)
};
/**
- Draw a rectangle in the specified grid cell.
- x, y: Grid coords
- w, h: Width and height, [0, 1]
- color: Color for the rectangle
- fill: Boolean, whether to fill or not.
- text: Inscribed text in rectangle.
- text_color: Color of the inscribed text.
+ Draw a rectangle in the specified grid cell.
+ x, y: Grid coords
+ w, h: Width and height, [0, 1]
+ color: Color for the rectangle
+ fill: Boolean, whether to fill or not.
+ text: Inscribed text in rectangle.
+ text_color: Color of the inscribed text.
*/
- this.drawRectange = function(x, y, w, h, color, fill, text, text_color) {
+ this.drawRectangle = function(x, y, w, h, color, fill, text, text_color) {
+ y = gridHeight - y - 1;
context.beginPath();
var dx = w * cellWidth;
var dy = h * cellHeight;
@@ -136,8 +142,89 @@ var GridVisualization = function(height, width, gridHeight, gridWidth, context)
}
};
+ /**
+ Draw an arrow head in the specified grid cell.
+ x, y: Grid coords
+ s: Scaling of the arrowHead with respect to cell size[0, 1]
+ color: Color for the shape
+ fill: Boolean, whether to fill or not.
+ text: Inscribed text in shape.
+ text_color: Color of the inscribed text.
+ */
+ this.drawArrowHead = function(x, y, heading0, heading1, scale, color, fill, text, text_color) {
+ y = gridHeight - y -1;
+ arrowR = maxR * scale;
+ var cx = (x + 0.5) * cellWidth;
+ var cy = (y + 0.5) * cellHeight;
+ if (heading0 === 0 && heading1 === 1) {
+ p1_x = cx;
+ p1_y = cy - arrowR;
+ p2_x = cx - arrowR;
+ p2_y = cy + arrowR;
+ p3_x = cx;
+ p3_y = cy + 0.8*arrowR;
+ p4_x = cx + arrowR;
+ p4_y = cy + arrowR;
+ }
+ else if (heading0 === 1 && heading1 === 0) {
+ p1_x = cx + arrowR;
+ p1_y = cy;
+ p2_x = cx - arrowR;
+ p2_y = cy - arrowR;
+ p3_x = cx - 0.8*arrowR;
+ p3_y = cy;
+ p4_x = cx - arrowR;
+ p4_y = cy + arrowR;
+ }
+ else if (heading0 === 0 && heading1 === (-1)) {
+ p1_x = cx;
+ p1_y = cy + arrowR;
+ p2_x = cx - arrowR;
+ p2_y = cy - arrowR;
+ p3_x = cx;
+ p3_y = cy - 0.8*arrowR;
+ p4_x = cx + arrowR;
+ p4_y = cy - arrowR;
+ }
+ else if (heading0 === (-1) && heading1 === 0) {
+ p1_x = cx - arrowR;
+ p1_y = cy;
+ p2_x = cx + arrowR;
+ p2_y = cy - arrowR;
+ p3_x = cx + 0.8*arrowR;
+ p3_y = cy;
+ p4_x = cx + arrowR;
+ p4_y = cy + arrowR;
+ }
+
+ context.beginPath();
+ context.moveTo(p1_x, p1_y);
+ context.lineTo(p2_x, p2_y);
+ context.lineTo(p3_x, p3_y);
+ context.lineTo(p4_x, p4_y);
+ context.closePath();
+
+ context.strokeStyle = color;
+ context.stroke();
+
+ if (fill) {
+ context.fillStyle = color;
+ context.fill();
+ }
+
+ // This part draws the text inside the ArrowHead
+ if (text !== undefined) {
+ var cx = (x + 0.5) * cellWidth;
+ var cy = (y + 0.5) * cellHeight;
+ context.fillStyle = text_color
+ context.textAlign = 'center';
+ context.textBaseline= 'middle';
+ context.fillText(text, cx, cy);
+ }
+ };
+
/**
- Draw Grid lines in the full gird
+ Draw Grid lines in the full gird
*/
this.drawGridLines = function() {
@@ -161,7 +248,7 @@ var GridVisualization = function(height, width, gridHeight, gridWidth, context)
};
this.resetCanvas = function() {
- context.clearRect(0, 0, height, width);
+ context.clearRect(0, 0, width, height);
context.beginPath();
};
| Add new shapes especially for direction indication in CanvasGrid
Presently Rectangle and Circle shapes are only supported in CanvasGrid. Sometime, there is a requirement of showing the heading (direction s.t. N/S/E/W) of the agent as well.Some more shapes might be introduced for this very purpose. It would be nice to have support of images as well. | projectmesa/mesa | diff --git a/tests/test_batchrunner.py b/tests/test_batchrunner.py
index fb917fea..a900f6f4 100644
--- a/tests/test_batchrunner.py
+++ b/tests/test_batchrunner.py
@@ -61,7 +61,7 @@ class TestBatchRunner(unittest.TestCase):
self.params = {
'model_param': range(3),
'agent_param': [1, 8],
- }
+ }
self.iterations = 17
self.batch = BatchRunner(
MockModel,
diff --git a/tests/test_grid.py b/tests/test_grid.py
index 20f9806f..e2fd7f27 100644
--- a/tests/test_grid.py
+++ b/tests/test_grid.py
@@ -6,10 +6,19 @@ import unittest
from mesa.space import Grid, SingleGrid, MultiGrid
# Initial agent positions for testing
-# X ---- >
-TEST_GRID = [[0, 1, 0, 1, 0], # Y
- [0, 1, 1, 0, 0], # |
- [0, 0, 0, 1, 0]] # V
+#
+# --- visual aid ----
+# 0 0 0
+# 1 1 0
+# 0 1 0
+# 1 0 1
+# 0 0 1
+# -------------------
+TEST_GRID = [
+ [0, 1, 0, 1, 0],
+ [0, 0, 1, 1, 0],
+ [1, 1, 0, 0, 0]
+]
class MockAgent:
@@ -32,12 +41,14 @@ class TestBaseGrid(unittest.TestCase):
'''
Create a test non-toroidal grid and populate it with Mock Agents
'''
- self.grid = Grid(3, 5, self.torus)
+ width = 3 # width of grid
+ height = 5 # height of grid
+ self.grid = Grid(width, height, self.torus)
self.agents = []
counter = 0
- for y in range(3):
- for x in range(5):
- if TEST_GRID[y][x] == 0:
+ for x in range(width):
+ for y in range(height):
+ if TEST_GRID[x][y] == 0:
continue
counter += 1
# Create and place the mock agent
@@ -51,7 +62,7 @@ class TestBaseGrid(unittest.TestCase):
'''
for agent in self.agents:
x, y = agent.pos
- assert self.grid[y][x] == agent
+ assert self.grid[x][y] == agent
def test_cell_agent_reporting(self):
'''
@@ -98,7 +109,10 @@ class TestBaseGrid(unittest.TestCase):
neighborhood = self.grid.get_neighborhood((1, 1), moore=True)
assert len(neighborhood) == 8
- neighborhood = self.grid.get_neighborhood((4, 1), moore=True)
+ neighborhood = self.grid.get_neighborhood((1, 4), moore=False)
+ assert len(neighborhood) == 3
+
+ neighborhood = self.grid.get_neighborhood((1, 4), moore=True)
assert len(neighborhood) == 5
neighborhood = self.grid.get_neighborhood((0, 0), moore=False)
@@ -108,14 +122,14 @@ class TestBaseGrid(unittest.TestCase):
assert len(neighbors) == 0
neighbors = self.grid.get_neighbors((4, 1), moore=True)
- assert len(neighbors) == 2
+ assert len(neighbors) == 0
neighbors = self.grid.get_neighbors((1, 1), moore=False,
include_center=True)
assert len(neighbors) == 3
- neighbors = self.grid.get_neighbors((3, 1), moore=False, radius=2)
- assert len(neighbors) == 4
+ neighbors = self.grid.get_neighbors((1, 3), moore=False, radius=2)
+ assert len(neighbors) == 2
def test_coord_iter(self):
ci = self.grid.coord_iter()
@@ -129,9 +143,9 @@ class TestBaseGrid(unittest.TestCase):
# first agent in the second space
second = next(ci)
assert second[0].unique_id == 1
- assert second[0].pos == (1, 0)
- assert second[1] == 1
- assert second[2] == 0
+ assert second[0].pos == (0, 1)
+ assert second[1] == 0
+ assert second[2] == 1
class TestBaseGridTorus(TestBaseGrid):
@@ -149,24 +163,24 @@ class TestBaseGridTorus(TestBaseGrid):
neighborhood = self.grid.get_neighborhood((1, 1), moore=True)
assert len(neighborhood) == 8
- neighborhood = self.grid.get_neighborhood((4, 1), moore=True)
+ neighborhood = self.grid.get_neighborhood((1, 4), moore=True)
assert len(neighborhood) == 8
neighborhood = self.grid.get_neighborhood((0, 0), moore=False)
assert len(neighborhood) == 4
- neighbors = self.grid.get_neighbors((4, 1), moore=False)
- assert len(neighbors) == 0
+ neighbors = self.grid.get_neighbors((1, 4), moore=False)
+ assert len(neighbors) == 1
- neighbors = self.grid.get_neighbors((4, 1), moore=True)
- assert len(neighbors) == 2
+ neighbors = self.grid.get_neighbors((1, 4), moore=True)
+ assert len(neighbors) == 3
neighbors = self.grid.get_neighbors((1, 1), moore=False,
include_center=True)
assert len(neighbors) == 3
- neighbors = self.grid.get_neighbors((3, 1), moore=False, radius=2)
- assert len(neighbors) == 4
+ neighbors = self.grid.get_neighbors((1, 3), moore=False, radius=2)
+ assert len(neighbors) == 2
class TestSingleGrid(unittest.TestCase):
@@ -181,12 +195,14 @@ class TestSingleGrid(unittest.TestCase):
'''
Create a test non-toroidal grid and populate it with Mock Agents
'''
- self.grid = SingleGrid(3, 5, True)
+ width = 3
+ height = 5
+ self.grid = SingleGrid(width, height, True)
self.agents = []
counter = 0
- for y in range(3):
- for x in range(5):
- if TEST_GRID[y][x] == 0:
+ for x in range(width):
+ for y in range(height):
+ if TEST_GRID[x][y] == 0:
continue
counter += 1
# Create and place the mock agent
@@ -199,21 +215,23 @@ class TestSingleGrid(unittest.TestCase):
Test the SingleGrid empty count and enforcement.
'''
- assert len(self.grid.empties) == 10
+ assert len(self.grid.empties) == 9
a = MockAgent(100, None)
with self.assertRaises(Exception):
- self.grid._place_agent((1, 0), a)
+ self.grid._place_agent((0, 1), a)
# Place the agent in an empty cell
self.grid.position_agent(a)
+ # Test whether after placing, the empty cells are reduced by 1
assert a.pos not in self.grid.empties
- assert len(self.grid.empties) == 9
+ assert len(self.grid.empties) == 8
for i in range(10):
self.grid.move_to_empty(a)
- assert len(self.grid.empties) == 9
+ assert len(self.grid.empties) == 8
# Place agents until the grid is full
- for i in range(9):
+ empty_cells = len(self.grid.empties)
+ for i in range(empty_cells):
a = MockAgent(101 + i, None)
self.grid.position_agent(a)
assert len(self.grid.empties) == 0
@@ -224,7 +242,17 @@ class TestSingleGrid(unittest.TestCase):
with self.assertRaises(Exception):
self.move_to_empty(self.agents[0])
+
# Number of agents at each position for testing
+# Initial agent positions for testing
+#
+# --- visual aid ----
+# 0 0 0
+# 2 0 3
+# 0 5 0
+# 1 1 0
+# 0 0 0
+# -------------------
TEST_MULTIGRID = [[0, 1, 0, 2, 0],
[0, 1, 5, 0, 0],
[0, 0, 0, 3, 0]]
@@ -241,12 +269,14 @@ class TestMultiGrid(unittest.TestCase):
'''
Create a test non-toroidal grid and populate it with Mock Agents
'''
- self.grid = MultiGrid(3, 5, self.torus)
+ width = 3
+ height = 5
+ self.grid = MultiGrid(width, height, self.torus)
self.agents = []
counter = 0
- for y in range(3):
- for x in range(5):
- for i in range(TEST_MULTIGRID[y][x]):
+ for x in range(width):
+ for y in range(height):
+ for i in range(TEST_MULTIGRID[x][y]):
counter += 1
# Create and place the mock agent
a = MockAgent(counter, None)
@@ -259,7 +289,7 @@ class TestMultiGrid(unittest.TestCase):
'''
for agent in self.agents:
x, y = agent.pos
- assert agent in self.grid[y][x]
+ assert agent in self.grid[x][y]
def test_neighbors(self):
'''
@@ -269,21 +299,21 @@ class TestMultiGrid(unittest.TestCase):
neighborhood = self.grid.get_neighborhood((1, 1), moore=True)
assert len(neighborhood) == 8
- neighborhood = self.grid.get_neighborhood((4, 1), moore=True)
+ neighborhood = self.grid.get_neighborhood((1, 4), moore=True)
assert len(neighborhood) == 8
neighborhood = self.grid.get_neighborhood((0, 0), moore=False)
assert len(neighborhood) == 4
- neighbors = self.grid.get_neighbors((4, 1), moore=False)
+ neighbors = self.grid.get_neighbors((1, 4), moore=False)
assert len(neighbors) == 0
- neighbors = self.grid.get_neighbors((4, 1), moore=True)
+ neighbors = self.grid.get_neighbors((1, 4), moore=True)
assert len(neighbors) == 5
neighbors = self.grid.get_neighbors((1, 1), moore=False,
include_center=True)
assert len(neighbors) == 7
- neighbors = self.grid.get_neighbors((3, 1), moore=False, radius=2)
+ neighbors = self.grid.get_neighbors((1, 3), moore=False, radius=2)
assert len(neighbors) == 11
diff --git a/tests/test_time.py b/tests/test_time.py
index 7c3164f9..81fae185 100644
--- a/tests/test_time.py
+++ b/tests/test_time.py
@@ -5,7 +5,8 @@ Test the advanced schedulers.
from unittest import TestCase
from unittest.mock import patch
from mesa import Model, Agent
-from mesa.time import BaseScheduler, StagedActivation, RandomActivation, SimultaneousActivation
+from mesa.time import (BaseScheduler, StagedActivation, RandomActivation,
+ SimultaneousActivation)
RANDOM = 'random'
STAGED = 'staged'
@@ -37,13 +38,14 @@ class MockModel(Model):
Creates a Model instance with a schedule
Args:
- shuffle (Bool): whether or not to instantiate a scheduler with
- shuffling.
- This option is only used for StagedActivation schedulers.
+ shuffle (Bool): whether or not to instantiate a scheduler
+ with shuffling.
+ This option is only used for
+ StagedActivation schedulers.
activation (str): which kind of scheduler to use.
- 'random' will create a RandomActivation scheduler.
- 'staged' will create a StagedActivation scheduler.
+ 'random' creates a RandomActivation scheduler.
+ 'staged' creates a StagedActivation scheduler.
The default scheduler is a BaseScheduler.
'''
self.log = []
@@ -51,7 +53,8 @@ class MockModel(Model):
# Make scheduler
if activation == STAGED:
model_stages = ["stage_one", "stage_two"]
- self.schedule = StagedActivation(self, model_stages, shuffle=shuffle)
+ self.schedule = StagedActivation(self, model_stages,
+ shuffle=shuffle)
elif activation == RANDOM:
self.schedule = RandomActivation(self)
elif activation == SIMULTANEOUS:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 8
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
mccabe==0.7.0
-e git+https://github.com/projectmesa/mesa.git@558b052b46c447eb1ee16da06ffb4603ec24178e#egg=Mesa
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli==2.2.1
tornado==6.4.2
tzdata==2025.2
| name: mesa
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- mccabe==0.7.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tomli==2.2.1
- tornado==6.4.2
- tzdata==2025.2
prefix: /opt/conda/envs/mesa
| [
"tests/test_grid.py::TestBaseGrid::test_agent_positions",
"tests/test_grid.py::TestBaseGrid::test_cell_agent_reporting",
"tests/test_grid.py::TestBaseGrid::test_coord_iter",
"tests/test_grid.py::TestBaseGrid::test_iter_cell_agent_reporting",
"tests/test_grid.py::TestBaseGrid::test_listfree_cell_agent_reporting",
"tests/test_grid.py::TestBaseGrid::test_listfree_iter_cell_agent_reporting",
"tests/test_grid.py::TestBaseGrid::test_neighbors",
"tests/test_grid.py::TestBaseGridTorus::test_agent_positions",
"tests/test_grid.py::TestBaseGridTorus::test_cell_agent_reporting",
"tests/test_grid.py::TestBaseGridTorus::test_coord_iter",
"tests/test_grid.py::TestBaseGridTorus::test_iter_cell_agent_reporting",
"tests/test_grid.py::TestBaseGridTorus::test_listfree_cell_agent_reporting",
"tests/test_grid.py::TestBaseGridTorus::test_listfree_iter_cell_agent_reporting",
"tests/test_grid.py::TestBaseGridTorus::test_neighbors",
"tests/test_grid.py::TestSingleGrid::test_enforcement",
"tests/test_grid.py::TestMultiGrid::test_agent_positions",
"tests/test_grid.py::TestMultiGrid::test_neighbors"
]
| []
| [
"tests/test_batchrunner.py::TestBatchRunner::test_agent_level_vars",
"tests/test_batchrunner.py::TestBatchRunner::test_model_level_vars",
"tests/test_time.py::TestStagedActivation::test_no_shuffle",
"tests/test_time.py::TestStagedActivation::test_remove",
"tests/test_time.py::TestStagedActivation::test_shuffle",
"tests/test_time.py::TestStagedActivation::test_shuffle_shuffles_agents",
"tests/test_time.py::TestRandomActivation::test_random_activation_step_increments_step_and_time_counts",
"tests/test_time.py::TestRandomActivation::test_random_activation_step_shuffles",
"tests/test_time.py::TestRandomActivation::test_random_activation_step_steps_each_agent",
"tests/test_time.py::TestSimultaneousActivation::test_simultaneous_activation_step_steps_and_advances_each_agent"
]
| []
| Apache License 2.0 | 642 | [
"mesa/agent.py",
"mesa/visualization/ModularVisualization.md",
"mesa/visualization/modules/CanvasGridVisualization.py",
"mesa/visualization/templates/CanvasModule.js",
"examples/Basic/basic/server.py",
"examples/Basic/Readme.md",
"examples/Readme.md",
"examples/Basic/run.py",
"mesa/space.py",
"mesa/visualization/templates/GridDraw.js",
"examples/Basic/basic/model.py",
"examples/Basic/requirements.txt",
"mesa/time.py",
"mesa/__init__.py",
"mesa/model.py"
]
| [
"mesa/agent.py",
"mesa/visualization/ModularVisualization.md",
"mesa/visualization/modules/CanvasGridVisualization.py",
"mesa/visualization/templates/CanvasModule.js",
"examples/Basic/basic/server.py",
"examples/Basic/Readme.md",
"examples/Readme.md",
"examples/Basic/run.py",
"mesa/space.py",
"mesa/visualization/templates/GridDraw.js",
"examples/Basic/basic/model.py",
"examples/Basic/requirements.txt",
"mesa/time.py",
"mesa/__init__.py",
"mesa/model.py"
]
|
|
setokinto__slack-shogi-17 | 9f9abc3d50a69627e21274a482a8613f97958ad0 | 2016-07-18 05:56:58 | 9f9abc3d50a69627e21274a482a8613f97958ad0 | diff --git a/app/modules/shogi.py b/app/modules/shogi.py
index 4c2b930..3373116 100644
--- a/app/modules/shogi.py
+++ b/app/modules/shogi.py
@@ -51,9 +51,11 @@ class Koma(Enum):
if self.value & 0x10:
return False
return True
-
-class ShogiCantMoveException(Exception):
- pass
+ def promote(self):
+ try:
+ return Koma(self.value | 0x10)
+ except ValueError:
+ return self
class Shogi:
# TODO: implement komaochi
@@ -61,6 +63,8 @@ class Shogi:
self.first = True
self.first_tegoma = []
self.second_tegoma = []
+ self.last_move_x = None
+ self.last_move_y = None
self.board = [
[
Koma.opponent_kyosha,
@@ -163,20 +167,21 @@ class Shogi:
],
]
def move(self, from_x, from_y, to_x, to_y, promote):
- """
- if from_x and from_y is -1, the koma is from komadai
- """
koma = self.board[from_y][from_x]
- # TODO: check for movable
- if False: # not movable:
- raise ShogiCantMoveException()
koma_for_komadai = self.board[to_y][to_x]
if koma_for_komadai is not Koma.empty:
- # TODO: move to komadai
- pass
+ if self.first:
+ self.first_tegoma.append(koma_for_komadai)
+ else:
+ self.second_tegoma.append(koma_for_komadai)
self.board[from_y][from_x] = Koma.empty
- self.board[to_y][to_x] = koma # TODO: in case promote is true
+ if promote:
+ self.board[to_y][to_x] = koma.promote()
+ else:
+ self.board[to_y][to_x] = koma
self.first = not self.first
+ self.last_move_x = to_x
+ self.last_move_y = to_y
def movable(self, from_x, from_y, to_x, to_y, promote):
board = self.board
@@ -213,6 +218,48 @@ class Shogi:
return True
return False
+ def drop(self, koma, to_x, to_y):
+ if self.first:
+ self.first_tegoma.remove(koma)
+ else:
+ self.second_tegoma.remove(koma)
+ koma_for_komadai = self.board[to_y][to_x]
+ self.board[to_y][to_x] = koma
+ if koma_for_komadai is not Koma.empty:
+ if self.first:
+ self.first_tegoma.append(koma_for_komadai)
+ else:
+ self.second_tegoma.append(koma_for_komadai)
+ self.first = not self.first
+ self.last_move_x = to_x
+ self.last_move_y = to_y
+
+ def droppable(self, koma, to_x, to_y):
+ if self.first:
+ tegoma = self.first_tegoma
+ else:
+ tegoma = self.second_tegoma
+ if koma in tegoma and self.board[to_y][to_x] is Koma.empty:
+ if koma is Koma.fu or koma is Koma.opponent_fu:
+ for y_board in self.board:
+ if y_board[to_x] is koma:
+ # 2fu
+ return False
+ if koma is Koma.fu or koma is Koma.kyosha:
+ if to_y == 0:
+ return False
+ if koma is Koma.keima:
+ if to_y <= 1:
+ return False
+ if koma is Koma.opponent_fu or koma is Koma.opponent_kyosha:
+ if to_y == 8:
+ return False
+ if koma is Koma.opponent_keima:
+ if to_y >= 7:
+ return False
+ return True
+ return False
+
def checkObstacle(self, from_x, from_y, to_x, to_y):
if self.board[from_y][from_x].is_keima():
return True
| 将棋盤モジュールを作る
将棋盤モジュールを作る
- 入力と入力とそれに対するエラーのインターフェースを作る
- 盤面を表示する
- 将棋盤の盤面を入力に従って管理する | setokinto/slack-shogi | diff --git a/test/modules/shogi_test.py b/test/modules/shogi_test.py
index 9538c50..d768dde 100644
--- a/test/modules/shogi_test.py
+++ b/test/modules/shogi_test.py
@@ -145,10 +145,6 @@ class ShogiTest(unittest.TestCase):
movable = shogi.movable(3, 6, 2, 8, True)
self.assertTrue(movable)
-
-
-
-
shogi.first = True
# 81 kyo narazu
movable = shogi.movable(1, 1, 1, 0, False)
@@ -234,6 +230,107 @@ class ShogiTest(unittest.TestCase):
movable = shogi.movable(5, 8, 5, 7, True)
self.assertFalse(movable)
+ def test_drop(self):
+ shogi = Shogi()
+ shogi.first_tegoma = [Koma.kin]
+ shogi.second_tegoma = [Koma.opponent_kin]
+ shogi.drop(Koma.kin, 4, 4)
+ self.assertEqual(shogi.board[4][4], Koma.kin)
+ shogi.drop(Koma.opponent_kin, 3, 3)
+ self.assertEqual(shogi.board[3][3], Koma.opponent_kin)
+
+ def test_droppable(self):
+ shogi = Shogi()
+ shogi.first_tegoma = [Koma.kin]
+ shogi.second_tegoma = [Koma.opponent_kin]
+
+ shogi.first = True
+ # have
+ droppable = shogi.droppable(Koma.kin, 4, 4)
+ self.assertTrue(droppable)
+ # opponent
+ droppable = shogi.droppable(Koma.opponent_kin, 4, 4)
+ self.assertFalse(droppable)
+ # not have
+ droppable = shogi.droppable(Koma.gin, 4, 4)
+ self.assertFalse(droppable)
+
+ shogi.first = False
+ droppable = shogi.droppable(Koma.opponent_kin, 4, 4)
+ self.assertTrue(droppable)
+ # opponent
+ droppable = shogi.droppable(Koma.kin, 4, 4)
+ self.assertFalse(droppable)
+ # not have
+ droppable = shogi.droppable(Koma.opponent_gin, 4, 4)
+ self.assertFalse(droppable)
+
+ def test_droppable_nifu(self):
+ shogi = Shogi()
+ shogi.first_tegoma = [Koma.fu]
+ shogi.second_tegoma = [Koma.opponent_fu]
+
+ shogi.first = True
+ droppable = shogi.droppable(Koma.fu, 4, 4)
+ self.assertFalse(droppable)
+ shogi.first = False
+ droppable = shogi.droppable(Koma.opponent_fu, 4, 4)
+ self.assertFalse(droppable)
+
+ def test_droppable_fu_kyo_kei(self):
+ shogi = Shogi()
+ shogi.board[0][0] = Koma.empty
+ shogi.board[2][0] = Koma.empty
+ shogi.board[6][0] = Koma.empty
+ shogi.board[8][0] = Koma.empty
+ shogi.first_tegoma = [Koma.fu, Koma.kyosha, Koma.keima]
+ shogi.second_tegoma = [Koma.opponent_fu, Koma.opponent_kyosha, Koma.opponent_keima]
+ shogi.first = True
+ # keima
+ droppable = shogi.droppable(Koma.keima, 0, 1)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.keima, 0, 2)
+ self.assertTrue(droppable)
+ droppable = shogi.droppable(Koma.opponent_keima, 1, 2)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.keima, 0, 3)
+ self.assertTrue(droppable)
+ # fu
+ droppable = shogi.droppable(Koma.fu, 0, 0)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.fu, 0, 1)
+ self.assertTrue(droppable)
+ # kyo
+ droppable = shogi.droppable(Koma.kyosha, 0, 0)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.kyosha, 0, 1)
+ self.assertTrue(droppable)
+
+ shogi.first = False
+ # keima
+ droppable = shogi.droppable(Koma.opponent_keima, 0, 7)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.opponent_keima, 0, 6)
+ self.assertTrue(droppable)
+ droppable = shogi.droppable(Koma.keima, 1, 6)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.opponent_keima, 0, 5)
+ self.assertTrue(droppable)
+ # fu
+ droppable = shogi.droppable(Koma.opponent_fu, 0, 7)
+ self.assertTrue(droppable)
+ droppable = shogi.droppable(Koma.opponent_fu, 0, 8)
+ self.assertFalse(droppable)
+ droppable = shogi.droppable(Koma.opponent_fu, 0, 0)
+ self.assertTrue(droppable)
+ droppable = shogi.droppable(Koma.opponent_fu, 0, 1)
+ self.assertTrue(droppable)
+ # kyosha
+ droppable = shogi.droppable(Koma.opponent_kyosha, 0, 7)
+ self.assertTrue(droppable)
+ droppable = shogi.droppable(Koma.opponent_kyosha, 0, 8)
+ self.assertFalse(droppable)
+
def test_find_koma(self):
shogi = Shogi()
@@ -247,4 +344,14 @@ class ShogiTest(unittest.TestCase):
koma_positions = shogi.find_koma(Koma.hisha)
self.assertIn([7, 7], koma_positions)
+
+ def test_last_move(self):
+ shogi = Shogi()
+ shogi.second_tegoma = [Koma.opponent_fu]
+ shogi.move(0, 6, 0, 5, False)
+ self.assertEqual(shogi.last_move_x, 0)
+ self.assertEqual(shogi.last_move_y, 5)
+ shogi.drop(Koma.opponent_fu, 5, 5)
+ self.assertEqual(shogi.last_move_x, 5)
+ self.assertEqual(shogi.last_move_y, 5)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "slackbot",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | Brotli @ file:///croot/brotli-split_1736182456865/work
certifi @ file:///croot/certifi_1738623731865/work/certifi
charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work
coverage==7.8.0
exceptiongroup==1.2.2
idna @ file:///croot/idna_1714398848350/work
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work
pytest==8.3.5
pytest-cov==6.0.0
requests @ file:///croot/requests_1730999120400/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
-e git+https://github.com/setokinto/slack-shogi.git@9f9abc3d50a69627e21274a482a8613f97958ad0#egg=Slack_Shogi
slackbot @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_slackbot_1737572202/work
slacker @ file:///home/conda/feedstock_root/build_artifacts/slacker_1735042418524/work
tomli==2.2.1
urllib3 @ file:///croot/urllib3_1737133630106/work
websocket-client @ file:///tmp/build/80754af9/websocket-client_1614803975924/work
| name: slack-shogi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- brotli-python=1.0.9=py39h6a678d5_9
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2025.1.31=py39h06a4308_0
- charset-normalizer=3.3.2=pyhd3eb1b0_0
- idna=3.7=py39h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- pysocks=1.7.1=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- requests=2.32.3=py39h06a4308_1
- setuptools=75.8.0=py39h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- slackbot=1.0.5=pyh29332c3_0
- slacker=0.14.0=pyhd8ed1ab_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- urllib3=2.3.0=py39h06a4308_0
- websocket-client=0.58.0=py39h06a4308_4
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/slack-shogi
| [
"test/modules/shogi_test.py::ShogiTest::test_drop",
"test/modules/shogi_test.py::ShogiTest::test_droppable",
"test/modules/shogi_test.py::ShogiTest::test_droppable_fu_kyo_kei",
"test/modules/shogi_test.py::ShogiTest::test_droppable_nifu",
"test/modules/shogi_test.py::ShogiTest::test_last_move"
]
| []
| [
"test/modules/shogi_test.py::ShogiTest::test_find_koma",
"test/modules/shogi_test.py::ShogiTest::test_movable_for_empty",
"test/modules/shogi_test.py::ShogiTest::test_movable_for_enemy",
"test/modules/shogi_test.py::ShogiTest::test_movable_for_fu",
"test/modules/shogi_test.py::ShogiTest::test_movable_for_kaku",
"test/modules/shogi_test.py::ShogiTest::test_movable_for_kyo",
"test/modules/shogi_test.py::ShogiTest::test_move_76fu",
"test/modules/shogi_test.py::ShogiTest::test_move_for_promote"
]
| []
| MIT License | 643 | [
"app/modules/shogi.py"
]
| [
"app/modules/shogi.py"
]
|
|
simphony__simphony-remote-117 | 61ec23ffe44463cbc41f6fa54b4247963093ed79 | 2016-07-18 09:48:08 | 61ec23ffe44463cbc41f6fa54b4247963093ed79 | diff --git a/remoteappmanager/restresources/container.py b/remoteappmanager/restresources/container.py
index 864e1fd..3191c6b 100644
--- a/remoteappmanager/restresources/container.py
+++ b/remoteappmanager/restresources/container.py
@@ -202,7 +202,7 @@ class Container(Resource):
server_url = "http://{}:{}{}/".format(
container.ip,
container.port,
- url_path_join(self.application.command_line_config.base_url,
+ url_path_join(self.application.command_line_config.base_urlpath,
container.urlpath))
yield _wait_for_http_server_2xx(
| remoteapprest app start error due to unfound `base_url`
```
(simremote)kit@kit-virtual-machine:jupyterhub$ remoteapprest app start 629b1d86d69bfb4b400dfee204f5e3a0
[W 160718 10:37:39 container:154] HOME (None) is not available for kit
[I 160718 10:37:39 container_manager:218] Got container image: simphonyproject/simphonic-mayavi
[I 2016-07-18 10:37:39.576 JupyterHub log:100] 200 GET /hub/api/authorizations/cookie/jupyter-hub-token-kit/[secret] ([email protected]) 8.24ms
[E 160718 10:37:39 container_manager:417] Container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' is gone
[I 160718 10:37:39 container_manager:260] Mounting these volumes:
/appdata/image_name/common -> /appdata
[I 160718 10:37:39 container_manager:293] Created container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' (id: 2dcdd6621f8f736322969b25449d5c75566cf617632cb9481f865aeaf919b863) from image simphonyproject/simphonic-mayavi
[I 160718 10:37:39 container_manager:330] Started container 'remoteexec-kit-629b1d86d69bfb4b400dfee204f5e3a0' (id: 2dcdd6621f8f736322969b25449d5c75566cf617632cb9481f865aeaf919b863). Exported port reachable at 127.0.0.1:32769
[E 160718 10:37:39 rest_handler:71] Internal error during POST operation on containers
Traceback (most recent call last):
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/rest/rest_handler.py", line 60, in post
resource_id = yield res_handler.create(data)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1021, in run
yielded = self.gen.throw(*exc_info)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/restresources/container.py", line 38, in create
yield self._wait_for_container_ready(container)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 285, in wrapper
yielded = next(result)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/restresources/container.py", line 205, in _wait_for_container_ready
url_path_join(self.application.command_line_config.base_url,
AttributeError: 'CommandLineConfig' object has no attribute 'base_url'
``` | simphony/simphony-remote | diff --git a/tests/restmodel/test_container.py b/tests/restmodel/test_container.py
index f9862e9..ba87141 100644
--- a/tests/restmodel/test_container.py
+++ b/tests/restmodel/test_container.py
@@ -29,7 +29,7 @@ class TestContainer(AsyncHTTPTestCase):
app.file_config = Mock()
app.file_config.network_timeout = 5
app.command_line_config = Mock()
- app.command_line_config.base_url = "http://127.0.0.1:8000/"
+ app.command_line_config.base_urlpath = "/"
app.reverse_proxy = Mock()
app.reverse_proxy.add_container = mock_coro_factory()
app.reverse_proxy.remove_container = mock_coro_factory()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@61ec23ffe44463cbc41f6fa54b4247963093ed79#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/restmodel/test_container.py::TestContainer::test_create"
]
| []
| [
"tests/restmodel/test_container.py::TestContainer::test_delete",
"tests/restmodel/test_container.py::TestContainer::test_items",
"tests/restmodel/test_container.py::TestContainer::test_retrieve"
]
| []
| BSD 3-Clause "New" or "Revised" License | 645 | [
"remoteappmanager/restresources/container.py"
]
| [
"remoteappmanager/restresources/container.py"
]
|
|
setokinto__slack-shogi-20 | fc18f787e9f0306f6925e737ce68f5b84232ce2b | 2016-07-18 10:30:56 | fc18f787e9f0306f6925e737ce68f5b84232ce2b | diff --git a/app/modules/shogi.py b/app/modules/shogi.py
index 3373116..78da9de 100644
--- a/app/modules/shogi.py
+++ b/app/modules/shogi.py
@@ -56,6 +56,16 @@ class Koma(Enum):
return Koma(self.value | 0x10)
except ValueError:
return self
+ def unpromote(self):
+ try:
+ return Koma(self.value & 0x2F)
+ except ValueError:
+ return self
+ def go_enemy(self):
+ if self.is_first():
+ return Koma(self.value + 0x20)
+ else:
+ return Koma(self.value - 0x20)
class Shogi:
# TODO: implement komaochi
@@ -170,6 +180,7 @@ class Shogi:
koma = self.board[from_y][from_x]
koma_for_komadai = self.board[to_y][to_x]
if koma_for_komadai is not Koma.empty:
+ koma_for_komadai = koma_for_komadai.unpromote().go_enemy()
if self.first:
self.first_tegoma.append(koma_for_komadai)
else:
@@ -226,6 +237,7 @@ class Shogi:
koma_for_komadai = self.board[to_y][to_x]
self.board[to_y][to_x] = koma
if koma_for_komadai is not Koma.empty:
+ koma_for_komadai = koma_for_komadai.unpromote().go_enemy()
if self.first:
self.first_tegoma.append(koma_for_komadai)
else:
diff --git a/app/modules/shogi_input.py b/app/modules/shogi_input.py
index 262f32a..f110571 100644
--- a/app/modules/shogi_input.py
+++ b/app/modules/shogi_input.py
@@ -1,7 +1,11 @@
import uuid
-from app.slack_utils.user import User
+import random
+
+from app.slack_utils.user import User as UserFinder
from app.modules.shogi import Shogi as ShogiModule
+from app.modules.parse_input import ParseInput
+
class ShogiManager:
def __init__(self):
@@ -11,9 +15,9 @@ class ShogiManager:
return False
else:
return True
- def create(self, channel_id, user_ids):
+ def create(self, channel_id, users):
if self.is_creatable(channel_id):
- shogi = Shogi(channel_id, user_ids)
+ shogi = Shogi(channel_id, users)
self.shogi[channel_id] = shogi
return shogi
else:
@@ -32,16 +36,16 @@ class ShogiManager:
class ShogiInput:
manager = ShogiManager()
@staticmethod
- def init(channel_id, user_ids):
- if ShogiInput.creatable_new_shogi(channel_id, user_ids):
- shogi = ShogiInput.manager.create(channel_id, user_ids)
+ def init(channel_id, users):
+ if ShogiInput.creatable_new_shogi(channel_id, users):
+ shogi = ShogiInput.manager.create(channel_id, users)
return shogi
else:
return None
@staticmethod
- def creatable_new_shogi(channel_id, user_ids):
- for user_id in user_ids:
- if user_id is None:
+ def creatable_new_shogi(channel_id, users):
+ for user in users:
+ if user["id"] is None:
return False
if ShogiInput.manager.is_creatable(channel_id):
return True
@@ -58,41 +62,76 @@ class ShogiInput:
def clear(channel_id):
ShogiInput.manager.clear(channel_id)
@staticmethod
- def move(position, koma, sub_position, promote):
- # TODO:
- return False
+ def move(movement_str, channel_id, user_id):
+ shogi = ShogiInput.manager.get_shogi(channel_id)
+ if shogi.first:
+ if not shogi.first_user_id == user_id:
+ return False # TODO: DifferentUserException
+ else:
+ if not shogi.second_user_id == user_id:
+ return False # TODO: DifferentUserException
+ movement = ParseInput.parse(movement_str, shogi.shogi) # TODO: use Shogi object in this file and test
+ if movement == False:
+ return False
+ else:
+ from_x, from_y, to_x, to_y, promote, koma = movement
+
+ if from_x == -1 and from_y == -1 and shogi.droppable(koma, to_x, to_y):
+ shogi.drop(koma, to_x, to_y)
+ return True
+ elif shogi.movable(from_x, from_y, to_x, to_y, promote):
+ shogi.move(from_x, from_y, to_x, to_y, promote)
+ return True
+ else:
+ return False
@staticmethod
def basic_move(channel_id, from_x, from_y, to_x, to_y, promote):
shogi = ShogiInput.manager.get_shogi(channel_id)
shogi.move(from_x, from_y, to_x, to_y, promote)
@staticmethod
def get_shogi_board(channel_id):
- # TODO:
shogi = ShogiInput.manager.get_shogi(channel_id)
if shogi is None:
return None
return {
- "first":[],
- "second": [],
+ "first": shogi.shogi.first_tegoma,
+ "second": shogi.shogi.second_tegoma,
"board": shogi.board,
"info": {
"first": {
- "name": "millay",
+ "id": shogi.first_user_id,
+ "name": shogi.first_user_name,
},
"second": {
- "name": "not millay",
+ "id": shogi.second_user_id,
+ "name": shogi.second_user_name,
}
- }
+ },
+ "_shogi": shogi,
}
class Shogi:
- def __init__(self, channel_id, user_ids):
+ def __init__(self, channel_id, users):
self.shogi = ShogiModule()
self.channel_id = channel_id
- self.user_ids = user_ids
+ self.user_ids = [ x["id"] for x in users]
+ random.shuffle(users)
+ self.first_user_id = users[0]["id"]
+ self.first_user_name = users[0]["name"]
+ self.second_user_id = users[1]["id"]
+ self.second_user_name = users[1]["name"]
self.id = uuid.uuid4().hex
def move(self, from_x, from_y, to_x, to_y, promote):
self.shogi.move(from_x, from_y, to_x, to_y, promote)
+ def drop(self, koma, to_x, to_y):
+ self.shogi.drop(koma, to_x, to_y)
+ def movable(self, from_x, from_y, to_x, to_y, promote):
+ return self.shogi.movable(from_x, from_y, to_x, to_y, promote)
+ def droppable(self, koma, to_x, to_y):
+ return self.shogi.droppable(koma, to_x, to_y)
+ @property
+ def first(self):
+ return self.shogi.first
@property
def board(self):
return self.shogi.board
diff --git a/app/shogi.py b/app/shogi.py
index 92ca51f..2b7d78c 100644
--- a/app/shogi.py
+++ b/app/shogi.py
@@ -27,15 +27,22 @@ def start_shogi(message, opponent_name):
message.reply("Error, sorry. Opponent is not found in this channel")
return
- shogi = ShogiInput.init(channel_id=channel_id, user_ids=[
- own_id,
- opponent_id,
+ shogi = ShogiInput.init(channel_id=channel_id, users=[{
+ "id": own_id,
+ "name": user.id_to_username(own_id),
+ }, {
+ "id": opponent_id,
+ "name": user.id_to_username(opponent_id),
+ }
])
if shogi is None:
message.reply("Error, sorry")
else:
message.reply("Shogi started: " + shogi.id)
+ board = ShogiInput.get_shogi_board(channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
koma_names = [
"歩",
@@ -66,19 +73,48 @@ koma_names += list(map(lambda n: "成"+n, koma_names))
koma_names_string_regex = "|".join(koma_names)
@respond_to("([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?")
-def koma_move(message, position, koma, sub_position, promote):
+def koma_move(message, position, koma, sub_position=None, promote=None):
+ movement_str = "".join([x for x in [position, koma, sub_position, promote] if x is not None])
channel_id = message.body["channel"]
if not ShogiInput.exists(channel_id):
message.reply("start withから初めてね")
return
own_id = message.body["user"]
- if ShogiInput.koma_is_movable(channel_id, own_id, position, koma, sub_position, promote):
- ShogiInput.move(position, koma, sub_position, promote)
+
+ if ShogiInput.move(movement_str, channel_id, own_id):
board = ShogiInput.get_shogi_board(channel_id)
board_str = ShogiOutput.make_board_emoji(board)
message.send(board_str)
else:
message.reply("You cannot move this!!")
+ board = ShogiInput.get_shogi_board(channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
+
+@respond_to("今?.*の?.*状態.*を?教.*え?て?")
+@respond_to("現局面.*")
+@respond_to("局面.*")
+@respond_to("board")
+def board_info(message):
+ channel_id = message.body["channel"]
+ board = ShogiInput.get_shogi_board(channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
+
+@respond_to(".*降参.*")
+@respond_to(".*resign.*")
+@respond_to(".*負けました.*")
+@respond_to(".*まけました.*")
+@respond_to(".*まいりました.*")
+@respond_to(".*参りました.*")
+@respond_to(".*ありません.*")
+def resign(message):
+ channel_id = message.body["channel"]
+ message.send("最終局面")
+ board = ShogiInput.get_shogi_board(channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
+ ShogiInput.clear(channel_id)
@respond_to("([123456789][123456789][123456789][123456789]成?)")
def koma_move_basic(message, movement):
diff --git a/app/slack_utils/user.py b/app/slack_utils/user.py
index 1e8febd..0a69c19 100644
--- a/app/slack_utils/user.py
+++ b/app/slack_utils/user.py
@@ -1,17 +1,23 @@
-class User:
+class User: # TODO: rename this class
def __init__(self, slacker):
self._slacker = slacker
+ self.users = self._slacker.users.list().body["members"]
def username_to_id(self, username):
""" return string user_id or None"""
- users = self._slacker.users.list().body["members"] # TODO: cache
+ users = self.users
if username[0] == "@":
username = username[1:]
for user in users:
if user["name"] == username:
return user["id"]
return None
+ def id_to_username(self, id_):
+ users = self.users
+ for user in users:
+ if user["id"] == id_:
+ return user["name"]
def user_in_channel(self, user_id, channel_id):
users = self._slacker.channels.info(channel_id).body["channel"]["members"]
for user in users:
| inputモジュールを作る
botからinputするモジュールを作るよーーー
- @shogibot: start with kouki_dan みたいなコマンドを受け付けて将棋盤モジュール.startみたいなのをやる
- @shogibot: 23金右成 とかを受け付け、将棋盤モジュールにinputする
- ↑がもしエラーになった場合にいい感じに返す
| setokinto/slack-shogi | diff --git a/test/modules/shogi_input_test.py b/test/modules/shogi_input_test.py
index c4ea1d7..3647d9b 100644
--- a/test/modules/shogi_input_test.py
+++ b/test/modules/shogi_input_test.py
@@ -6,14 +6,35 @@ class ShogiTest(unittest.TestCase):
pass
def test_shogi_input_is_initable(self):
- shogi = ShogiInput.init("channel_id", ["user1", "user2"])
+ shogi = ShogiInput.init("channel_id", [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }
+ ])
self.assertEqual(shogi.channel_id, "channel_id")
- shogi = ShogiInput.init("channel_id", ["user1", "user2"])
+ shogi = ShogiInput.init("channel_id", [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }
+ ])
self.assertIsNone(shogi)
ShogiInput.clear("channel_id")
- shogi = ShogiInput.init("channel_id", ["user1", "user2"])
+ shogi = ShogiInput.init("channel_id",[{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }
+ ])
self.assertEqual(shogi.channel_id, "channel_id")
diff --git a/test/slack_utils/user_test.py b/test/slack_utils/user_test.py
index 5053243..d5168de 100644
--- a/test/slack_utils/user_test.py
+++ b/test/slack_utils/user_test.py
@@ -110,6 +110,13 @@ class UserTest(unittest.TestCase):
user_id2 = self.user.username_to_id("bobby2")
self.assertEqual(user_id2, "U023BECGA")
+ def test_find_username_from_userid(self):
+ username = self.user.id_to_username("U023BECGF")
+ self.assertEqual(username, "bobby")
+
+ username = self.user.id_to_username("U023BECGA")
+ self.assertEqual(username, "bobby2")
+
def test_return_None_non_exists_user_name(self):
user_id = self.user.username_to_id("not_bobby")
self.assertEqual(user_id, None)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "slackbot",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | Brotli @ file:///croot/brotli-split_1736182456865/work
certifi @ file:///croot/certifi_1738623731865/work/certifi
charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work
exceptiongroup==1.2.2
idna @ file:///croot/idna_1714398848350/work
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work
pytest==8.3.5
requests @ file:///croot/requests_1730999120400/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
-e git+https://github.com/setokinto/slack-shogi.git@fc18f787e9f0306f6925e737ce68f5b84232ce2b#egg=Slack_Shogi
slackbot @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_slackbot_1737572202/work
slacker @ file:///home/conda/feedstock_root/build_artifacts/slacker_1735042418524/work
tomli==2.2.1
urllib3 @ file:///croot/urllib3_1737133630106/work
websocket-client @ file:///tmp/build/80754af9/websocket-client_1614803975924/work
| name: slack-shogi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- brotli-python=1.0.9=py39h6a678d5_9
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2025.1.31=py39h06a4308_0
- charset-normalizer=3.3.2=pyhd3eb1b0_0
- idna=3.7=py39h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- pysocks=1.7.1=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- requests=2.32.3=py39h06a4308_1
- setuptools=75.8.0=py39h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- slackbot=1.0.5=pyh29332c3_0
- slacker=0.14.0=pyhd8ed1ab_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- urllib3=2.3.0=py39h06a4308_0
- websocket-client=0.58.0=py39h06a4308_4
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/slack-shogi
| [
"test/slack_utils/user_test.py::UserTest::test_find_username_from_userid"
]
| []
| [
"test/modules/shogi_input_test.py::ShogiTest::test_clear_for_non_exists_channnel",
"test/modules/shogi_input_test.py::ShogiTest::test_shogi_input_is_initable",
"test/slack_utils/user_test.py::UserTest::test_find_userid_from_username",
"test/slack_utils/user_test.py::UserTest::test_find_userid_with_atmark_prefix",
"test/slack_utils/user_test.py::UserTest::test_return_None_non_exists_user_name",
"test/slack_utils/user_test.py::UserTest::test_user_in_channel_return_False_when_user_not_exists",
"test/slack_utils/user_test.py::UserTest::test_user_in_channel_return_True_when_user_exists"
]
| []
| MIT License | 646 | [
"app/shogi.py",
"app/modules/shogi_input.py",
"app/slack_utils/user.py",
"app/modules/shogi.py"
]
| [
"app/shogi.py",
"app/modules/shogi_input.py",
"app/slack_utils/user.py",
"app/modules/shogi.py"
]
|
|
simphony__simphony-remote-120 | 05225db5d853d5b00bc1135763affe0f9935ea99 | 2016-07-18 12:29:36 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/application.py b/remoteappmanager/application.py
index 7666a2a..abfcecf 100644
--- a/remoteappmanager/application.py
+++ b/remoteappmanager/application.py
@@ -19,6 +19,7 @@ from remoteappmanager.services.reverse_proxy import ReverseProxy
from remoteappmanager import rest
from remoteappmanager.rest import registry
from remoteappmanager import restresources
+from remoteappmanager.utils import url_path_join
class Application(web.Application, LoggingMixin):
@@ -103,7 +104,6 @@ class Application(web.Application, LoggingMixin):
return ReverseProxy(
endpoint_url=self.command_line_config.proxy_api_url,
auth_token=auth_token,
- base_urlpath=self.command_line_config.base_urlpath
)
@default("hub")
@@ -149,6 +149,16 @@ class Application(web.Application, LoggingMixin):
tornado.ioloop.IOLoop.current().start()
+ def urlpath_for_object(self, object):
+ """
+ Resolves the absolute url path of a given object.
+ The object must have a urlpath property.
+ """
+
+ return url_path_join(
+ self.command_line_config.base_urlpath,
+ object.urlpath)
+
# Private
def _get_handlers(self):
"""Returns the registered handlers"""
diff --git a/remoteappmanager/handlers/home_handler.py b/remoteappmanager/handlers/home_handler.py
index c1648de..8936f28 100644
--- a/remoteappmanager/handlers/home_handler.py
+++ b/remoteappmanager/handlers/home_handler.py
@@ -112,7 +112,11 @@ class HomeHandler(BaseHandler):
# The server is up and running. Now contact the proxy and add
# the container url to it.
- urlpath = yield self.application.reverse_proxy.add_container(container)
+ urlpath = url_path_join(
+ self.application.command_line_config.base_urlpath,
+ container.urlpath)
+ yield self.application.reverse_proxy.register(
+ urlpath, container.host_url)
# Redirect the user
self.log.info('Redirecting to {}'.format(urlpath))
@@ -136,7 +140,11 @@ class HomeHandler(BaseHandler):
yield self._wait_for_container_ready(container)
# in case the reverse proxy is not already set up
- urlpath = yield self.application.reverse_proxy.add_container(container)
+ urlpath = url_path_join(
+ self.application.command_line_config.base_urlpath,
+ container.urlpath)
+ yield self.application.reverse_proxy.register(
+ urlpath, container.host_url)
self.log.info('Redirecting to {}'.format(urlpath))
self.redirect(urlpath)
@@ -155,13 +163,10 @@ class HomeHandler(BaseHandler):
self.finish("Unable to view the application")
return
- try:
- yield app.reverse_proxy.remove_container(container)
- except HTTPError as http_error:
- # The reverse proxy may be absent to start with
- if http_error.code != 404:
- raise http_error
-
+ urlpath = url_path_join(
+ self.application.command_line_config.base_urlpath,
+ container.urlpath)
+ yield app.reverse_proxy.unregister(urlpath)
yield container_manager.stop_and_remove_container(container.docker_id)
# We don't have fancy stuff at the moment to change the button, so
diff --git a/remoteappmanager/restresources/container.py b/remoteappmanager/restresources/container.py
index 3191c6b..cbf224a 100644
--- a/remoteappmanager/restresources/container.py
+++ b/remoteappmanager/restresources/container.py
@@ -36,7 +36,11 @@ class Container(Resource):
policy,
mapping_id)
yield self._wait_for_container_ready(container)
- yield self.application.reverse_proxy.add_container(container)
+ urlpath = url_path_join(
+ self.application.command_line_config.base_urlpath,
+ container.urlpath)
+ yield self.application.reverse_proxy.register(urlpath,
+ container.host_url)
return container.url_id
@@ -56,13 +60,15 @@ class Container(Resource):
@gen.coroutine
def delete(self, identifier):
"""Stop the container."""
- app = self.application
container = yield self._container_from_url_id(identifier)
if not container:
raise exceptions.NotFound()
- yield app.reverse_proxy.remove_container(container)
- yield app.container_manager.stop_and_remove_container(
+ urlpath = url_path_join(
+ self.application.command_line_config.base_urlpath,
+ container.urlpath)
+ yield self.application.reverse_proxy.unregister(urlpath)
+ yield self.application.container_manager.stop_and_remove_container(
container.docker_id)
@gen.coroutine
diff --git a/remoteappmanager/services/reverse_proxy.py b/remoteappmanager/services/reverse_proxy.py
index 64807c3..4bff151 100644
--- a/remoteappmanager/services/reverse_proxy.py
+++ b/remoteappmanager/services/reverse_proxy.py
@@ -5,7 +5,6 @@ from jupyterhub import orm as jupyterhub_orm
from traitlets import HasTraits, Unicode
from remoteappmanager.logging.logging_mixin import LoggingMixin
-from remoteappmanager.utils import url_path_join
class ReverseProxy(LoggingMixin, HasTraits):
@@ -17,9 +16,6 @@ class ReverseProxy(LoggingMixin, HasTraits):
#: The authorization token to authenticate the request
auth_token = Unicode()
- #: The prefix for the url added to the passed object relative .url()
- base_urlpath = Unicode('/')
-
def __init__(self, *args, **kwargs):
"""Initializes the reverse proxy connection object."""
super().__init__(*args, **kwargs)
@@ -31,73 +27,59 @@ class ReverseProxy(LoggingMixin, HasTraits):
api_server=_server_from_url(self.endpoint_url)
)
- self.log.info("Reverse proxy setup on {} with base url {}".format(
+ self.log.info("Reverse proxy setup on {}".format(
self.endpoint_url,
- self.base_urlpath
))
@gen.coroutine
- def remove_container(self, container):
- """Removes a container from the reverse proxy at the associated url.
+ def register(self, urlpath, target_host_url):
+ """Register a given urlpath to redirect to a different target host.
+ The operation is idempotent.
Parameters
----------
- container : remoteappmanager.docker.container.Container
- A container object.
+ urlpath: str
+ The absolute path of the url (e.g. /my/internal/service/)"
+
+ target_host_url:
+ The host to redirect to, e.g. http://127.0.0.1:31233/service/
"""
- proxy = self._reverse_proxy
+ self.log.info("Registering {} redirection to {}".format(
+ urlpath,
+ target_host_url))
- urlpath = url_path_join(self.base_urlpath, container.urlpath)
- self.log.info("Unregistering url {} to {} on reverse proxy.".format(
+ yield self._reverse_proxy.api_request(
urlpath,
- container.host_url
- ))
+ method='POST',
+ body=dict(
+ target=target_host_url,
+ )
+ )
+
+ @gen.coroutine
+ def unregister(self, urlpath):
+ """Unregisters a previously registered urlpath.
+ If the urlpath is not found in the reverse proxy, it will not raise
+ an error, but it will log the unexpected circumstance.
+
+ Parameters
+ ----------
+ urlpath: str
+ The absolute path of the url (e.g. /my/internal/service/"
+ """
+ self.log.info("Deregistering {} redirection".format(urlpath))
try:
- yield proxy.api_request(urlpath, method='DELETE')
+ yield self._reverse_proxy.api_request(urlpath, method='DELETE')
except httpclient.HTTPError as e:
if e.code == 404:
self.log.warning("Could not find urlpath {} when removing"
" container. In any case, the reverse proxy"
" does not map the url. Continuing".format(
- urlpath))
+ urlpath))
else:
raise e
- @gen.coroutine
- def add_container(self, container):
- """Adds the url associated to a given container on the reverse proxy.
-
- Parameters
- ----------
- container : remoteappmanager.docker.container.Container
- A container object.
-
- Returns
- -------
- urlpath : str
- The absolute url path of the container as registered on the reverse
- proxy.
- """
-
- proxy = self._reverse_proxy
- urlpath = url_path_join(self.base_urlpath, container.urlpath)
-
- self.log.info("Registering url {} to {} on reverse proxy.".format(
- urlpath,
- container.host_url
- ))
-
- yield proxy.api_request(
- urlpath,
- method='POST',
- body=dict(
- target=container.host_url,
- )
- )
-
- return urlpath
-
def _server_from_url(url):
"""Creates a orm.Server from a given url"""
| Simplify the ReverseProxy
A reverse proxy does not really need to have remove and add container methods. All that is necessary is to map urls. | simphony/simphony-remote | diff --git a/tests/handlers/test_home_handler.py b/tests/handlers/test_home_handler.py
index 95807b8..596b8db 100644
--- a/tests/handlers/test_home_handler.py
+++ b/tests/handlers/test_home_handler.py
@@ -7,10 +7,10 @@ from remoteappmanager.docker.container import Container
from remoteappmanager.docker.container_manager import ContainerManager
from remoteappmanager.docker.image import Image
from remoteappmanager.services.hub import Hub
-from remoteappmanager.services.reverse_proxy import ReverseProxy
from remoteappmanager.application import Application
from tests import utils
+from tests.mock.mock_reverse_proxy import MockReverseProxy
from tests.temp_mixin import TempMixin
@@ -38,9 +38,7 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
file_config.accounting_kwargs = {'url': "sqlite:///"+sqlite_file_path}
app = Application(command_line_config, file_config)
- app.reverse_proxy = mock.Mock(spec=ReverseProxy)
- app.reverse_proxy.add_container = utils.mock_coro_factory("/")
- app.reverse_proxy.remove_container = utils.mock_coro_factory()
+ app.reverse_proxy = MockReverseProxy()
app.hub = mock.Mock(spec=Hub)
app.hub.verify_token = utils.mock_coro_factory({
'pending': None,
@@ -107,7 +105,7 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
".HomeHandler"
".redirect") as redirect:
- self.assertFalse(self._app.reverse_proxy.add_container.called)
+ self.assertFalse(self._app.reverse_proxy.register.called)
self.fetch("/user/username/",
method="POST",
headers={
@@ -115,7 +113,7 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
},
body=body)
- self.assertTrue(self._app.reverse_proxy.add_container.called)
+ self.assertTrue(self._app.reverse_proxy.register.called)
self.assertTrue(redirect.called)
def test_post_failed_auth(self):
@@ -156,7 +154,7 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
},
body=body)
- self.assertTrue(self._app.reverse_proxy.remove_container.called)
+ self.assertTrue(self._app.reverse_proxy.unregister.called)
self.assertTrue(redirect.called)
def test_post_view(self):
@@ -184,5 +182,5 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
},
body=body)
- self.assertTrue(self._app.reverse_proxy.add_container.called)
+ self.assertTrue(self._app.reverse_proxy.register.called)
self.assertTrue(redirect.called)
diff --git a/tests/mock/__init__.py b/tests/mock/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/mock/mock_reverse_proxy.py b/tests/mock/mock_reverse_proxy.py
new file mode 100644
index 0000000..ee624a6
--- /dev/null
+++ b/tests/mock/mock_reverse_proxy.py
@@ -0,0 +1,15 @@
+from unittest import mock
+
+from remoteappmanager.services.reverse_proxy import ReverseProxy
+from tests import utils
+
+
+def MockReverseProxy():
+ """Constructor. Returns a mock reverse proxy implementation.
+ Named as a class for potential future expansion in mock implementation."""
+
+ mock_revproxy = mock.Mock(spec=ReverseProxy)
+ mock_revproxy.register = utils.mock_coro_factory("/")
+ mock_revproxy.unregister = utils.mock_coro_factory()
+
+ return mock_revproxy
diff --git a/tests/restmodel/test_container.py b/tests/restmodel/test_container.py
index ba87141..7ae9a96 100644
--- a/tests/restmodel/test_container.py
+++ b/tests/restmodel/test_container.py
@@ -2,6 +2,7 @@ from unittest.mock import Mock, patch
from remoteappmanager.restresources import Container
from tests import utils
+from tests.mock.mock_reverse_proxy import MockReverseProxy
from tornado import web, escape
from remoteappmanager import rest
@@ -28,11 +29,10 @@ class TestContainer(AsyncHTTPTestCase):
app = web.Application(handlers=handlers)
app.file_config = Mock()
app.file_config.network_timeout = 5
+ app.urlpath_for_object = Mock(return_value="/urlpath_for_object/")
app.command_line_config = Mock()
app.command_line_config.base_urlpath = "/"
- app.reverse_proxy = Mock()
- app.reverse_proxy.add_container = mock_coro_factory()
- app.reverse_proxy.remove_container = mock_coro_factory()
+ app.reverse_proxy = MockReverseProxy()
container = Mock()
container.urlpath = "containers/12345"
container.url_id = "12345"
diff --git a/tests/services/test_reverse_proxy.py b/tests/services/test_reverse_proxy.py
index 39ff093..71b0298 100644
--- a/tests/services/test_reverse_proxy.py
+++ b/tests/services/test_reverse_proxy.py
@@ -4,8 +4,6 @@ from jupyterhub import orm
from remoteappmanager.services.reverse_proxy import ReverseProxy
from tornado import gen, testing
-from remoteappmanager.docker.container import Container
-
class TestReverseProxy(testing.AsyncTestCase):
@testing.gen_test
@@ -22,11 +20,11 @@ class TestReverseProxy(testing.AsyncTestCase):
reverse_proxy._reverse_proxy = Mock(spec=orm.Proxy)
reverse_proxy._reverse_proxy.api_request = mock_api_request
- container = Container(docker_id="12345", base_urlpath="/foo/")
- yield reverse_proxy.add_container(container)
+ yield reverse_proxy.register("/hello/from/me/",
+ "http://localhost:12312/")
self.assertEqual(coroutine_out["kwargs"]["method"], "POST")
- yield reverse_proxy.remove_container(container)
+ yield reverse_proxy.unregister("/hello/from/me/")
self.assertEqual(coroutine_out["kwargs"]["method"], "DELETE")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
escapism==1.0.1
exceptiongroup==1.2.2
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
numpy==2.0.2
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
pandas==2.2.3
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@05225db5d853d5b00bc1135763affe0f9935ea99#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- escapism==1.0.1
- exceptiongroup==1.2.2
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- numpy==2.0.2
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- pandas==2.2.3
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_start",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_stop",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_view",
"tests/restmodel/test_container.py::TestContainer::test_create",
"tests/restmodel/test_container.py::TestContainer::test_delete"
]
| [
"tests/services/test_reverse_proxy.py::TestReverseProxy::test_reverse_proxy_operations"
]
| [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_failed_auth",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_home",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_failed_auth",
"tests/restmodel/test_container.py::TestContainer::test_items",
"tests/restmodel/test_container.py::TestContainer::test_retrieve"
]
| []
| BSD 3-Clause "New" or "Revised" License | 647 | [
"remoteappmanager/services/reverse_proxy.py",
"remoteappmanager/restresources/container.py",
"remoteappmanager/handlers/home_handler.py",
"remoteappmanager/application.py"
]
| [
"remoteappmanager/services/reverse_proxy.py",
"remoteappmanager/restresources/container.py",
"remoteappmanager/handlers/home_handler.py",
"remoteappmanager/application.py"
]
|
|
simphony__simphony-remote-121 | 05225db5d853d5b00bc1135763affe0f9935ea99 | 2016-07-18 13:14:05 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/handlers/home_handler.py b/remoteappmanager/handlers/home_handler.py
index c1648de..5b50efc 100644
--- a/remoteappmanager/handlers/home_handler.py
+++ b/remoteappmanager/handlers/home_handler.py
@@ -1,6 +1,5 @@
import socket
import os
-import uuid
from datetime import timedelta
import errno
@@ -45,12 +44,11 @@ class HomeHandler(BaseHandler):
try:
yield handler(options)
- except Exception as e:
+ except Exception as exc:
# Create a random reference number for support
- ref = str(uuid.uuid1())
- self.log.exception("Failed with POST action: {0}. {1} "
- "Ref: {2}".format(
- action, str(e), ref))
+ ref = self.log.issue(
+ "Failed with POST action {}".format(action),
+ exc)
images_info = yield self._get_images_info()
@@ -63,7 +61,7 @@ class HomeHandler(BaseHandler):
images_info=images_info,
error_message=message.format(
action=action,
- error_type=type(e).__name__,
+ error_type=type(exc).__name__,
ref=ref))
# Subhandling after post
diff --git a/remoteappmanager/logging/logging_mixin.py b/remoteappmanager/logging/logging_mixin.py
index 6339a43..fddc6f6 100644
--- a/remoteappmanager/logging/logging_mixin.py
+++ b/remoteappmanager/logging/logging_mixin.py
@@ -1,3 +1,6 @@
+import uuid
+import types
+
from traitlets import (
HasTraits,
Instance,
@@ -5,6 +8,25 @@ from traitlets import (
)
+def issue(self, message, exc=None):
+ """Accepts a message that will be logged with an additional reference
+ code for easy log lookup.
+
+ The identifier will be returned for inclusion in user-visible
+ error messages.
+ """
+ ref = str(uuid.uuid1())
+
+ if exc is None:
+ err_message = "{}. Ref: {}".format(message, ref)
+ else:
+ err_message = "{} : {}. Ref: {}".format(
+ message, str(exc), ref)
+
+ self.error(err_message)
+ return ref
+
+
class LoggingMixin(HasTraits):
"""A HasTrait class that provides logging. Used as a mixin.
"""
@@ -13,5 +35,9 @@ class LoggingMixin(HasTraits):
@default('log')
def _log_default(self):
- from traitlets import log
- return log.get_logger()
+ from tornado.log import app_log
+
+ # monkey patch the logger to provide an additional method that handles
+ # issues
+ app_log.issue = types.MethodType(issue, app_log)
+ return app_log
| Formalise error handling with the logger and the refnumber for the issue
Currently when an error takes place in a number of places we generate a `refnumber` that can be displayed in the user when an error takes place. This `refnumber` is at the same time logged next to the related error. This will help to pinpoint the place in the logs where a specific error (reported by a user) took place and simplify debugging.
To achieve this I would suggest one of the following:
- Wrap the python `Logger` logging functions so that they generate a refnumber, pass it to the logger and then return it to the caller.
- Provide a generic `log(self, level, msg, *args, **kwargs)` function in the `BaseHandler` that will generate the refnumber pass it to a python logger and return it.
I think that the second implementation is simpler and easier to test.
| simphony/simphony-remote | diff --git a/tests/logging/__init__.py b/tests/logging/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/logging/test_logging_mixin.py b/tests/logging/test_logging_mixin.py
new file mode 100644
index 0000000..833562e
--- /dev/null
+++ b/tests/logging/test_logging_mixin.py
@@ -0,0 +1,19 @@
+import unittest
+from unittest import mock
+
+from remoteappmanager.logging.logging_mixin import LoggingMixin
+
+
+class Logged(LoggingMixin):
+ pass
+
+
+class TestLoggingMixin(unittest.TestCase):
+ def test_issue(self):
+ l = Logged()
+ with mock.patch("tornado.log.app_log.error") as mock_error:
+ ref = l.log.issue("hello")
+ self.assertIn("hello. Ref: "+str(ref), mock_error.call_args[0][0])
+ ref = l.log.issue("hello", Exception("Boom!"))
+ self.assertIn("hello : Boom!. Ref: "+str(ref),
+ mock_error.call_args[0][0])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8",
"sphinx",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@05225db5d853d5b00bc1135763affe0f9935ea99#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/logging/test_logging_mixin.py::TestLoggingMixin::test_issue"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 648 | [
"remoteappmanager/logging/logging_mixin.py",
"remoteappmanager/handlers/home_handler.py"
]
| [
"remoteappmanager/logging/logging_mixin.py",
"remoteappmanager/handlers/home_handler.py"
]
|
|
simphony__simphony-remote-123 | ae2c07cdf3906952600c57b4439d57d7ff4b2cc1 | 2016-07-18 14:47:10 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/handlers/home_handler.py b/remoteappmanager/handlers/home_handler.py
index c21824b..7d5ce97 100644
--- a/remoteappmanager/handlers/home_handler.py
+++ b/remoteappmanager/handlers/home_handler.py
@@ -131,8 +131,10 @@ class HomeHandler(BaseHandler):
container_manager = self.application.container_manager
container = yield container_manager.container_from_url_id(url_id)
if not container:
- self.finish("Unable to view the application")
- return
+ self.log.warning("Could not find container for url_id {}".format(
+ url_id
+ ))
+ raise ValueError("Unable to view container for specified url_id")
# make sure the container is actually running and working
yield self._wait_for_container_ready(container)
@@ -158,8 +160,10 @@ class HomeHandler(BaseHandler):
container = yield container_manager.container_from_url_id(url_id)
if not container:
- self.finish("Unable to view the application")
- return
+ self.log.warning("Could not find container for url_id {}".format(
+ url_id
+ ))
+ raise ValueError("Unable to view container for specified url_id")
urlpath = url_path_join(
self.application.command_line_config.base_urlpath,
| HomeHandler should not 'finish' in View and Stop but raise
Let view and stop raise and then the HomeHandler.post will handle the logging.
https://github.com/simphony/simphony-remote/blob/98ee374756694ef1855a9d38d78d3561ec6cc54e/remoteappmanager/handlers/home_handler.py#L155
https://github.com/simphony/simphony-remote/blob/98ee374756694ef1855a9d38d78d3561ec6cc54e/remoteappmanager/handlers/home_handler.py#L132 | simphony/simphony-remote | diff --git a/tests/handlers/test_home_handler.py b/tests/handlers/test_home_handler.py
index 596b8db..7a49d7f 100644
--- a/tests/handlers/test_home_handler.py
+++ b/tests/handlers/test_home_handler.py
@@ -184,3 +184,34 @@ class TestHomeHandler(TempMixin, utils.AsyncHTTPTestCase):
self.assertTrue(self._app.reverse_proxy.register.called)
self.assertTrue(redirect.called)
+
+ def test_container_manager_does_not_return_container(self):
+ self._app.container_manager.container_from_url_id = \
+ utils.mock_coro_factory(None)
+ res = self.fetch(
+ "/user/username/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=urllib.parse.urlencode({
+ 'action': 'view',
+ 'url_id': '12345'
+ })
+ )
+
+ self.assertIn("ValueError", str(res.body))
+
+ res = self.fetch(
+ "/user/username/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=urllib.parse.urlencode({
+ 'action': 'stop',
+ 'url_id': '12345'
+ })
+ )
+
+ self.assertIn("ValueError", str(res.body))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@ae2c07cdf3906952600c57b4439d57d7ff4b2cc1#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_container_manager_does_not_return_container"
]
| []
| [
"tests/handlers/test_home_handler.py::TestHomeHandler::test_failed_auth",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_home",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_failed_auth",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_start",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_stop",
"tests/handlers/test_home_handler.py::TestHomeHandler::test_post_view"
]
| []
| BSD 3-Clause "New" or "Revised" License | 649 | [
"remoteappmanager/handlers/home_handler.py"
]
| [
"remoteappmanager/handlers/home_handler.py"
]
|
|
ifosch__accloudtant-104 | 8fdb36de3e41cc719b1e1df95c1083a9ea427392 | 2016-07-19 10:07:22 | 33f90ff0bc1639c9fe793afd837eee80170caf3e | diff --git a/accloudtant/aws/prices.py b/accloudtant/aws/prices.py
index 44435af..1cb74ef 100644
--- a/accloudtant/aws/prices.py
+++ b/accloudtant/aws/prices.py
@@ -143,7 +143,8 @@ def process_model(url, instances=None):
instances = {}
js_name = url.split('/')[-1]
pricing = requests.get(url)
- for js_line in io.StringIO(pricing.content.decode("utf-8").replace("\n", "")):
+ content = pricing.content.decode("utf-8").replace("\n", "")
+ for js_line in io.StringIO(content):
if 'callback' in js_line:
data = fix_lazy_json(re.sub(r".*callback\((.+)\).*",
r"\1", js_line))
diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py
index 65a433e..0f56ea5 100644
--- a/accloudtant/aws/reports.py
+++ b/accloudtant/aws/reports.py
@@ -24,7 +24,7 @@ import sys
class Reports(object):
- def __init__(self, logger = None):
+ def __init__(self, logger=None):
if logger is None:
self.logger = getLogger('accloudtant.report')
self.logger.setLevel(DEBUG)
@@ -33,25 +33,36 @@ class Reports(object):
self.logger = logger
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
- instances_filters = [{
- 'Name': 'instance-state-name',
- 'Values': ['running', ],
- }, ]
- reserved_instances_filters = [{
- 'Name': 'state',
- 'Values': ['active', ],
- }, ]
+ self.counters = {
+ 'instances': {
+ 'total': 0,
+ },
+ 'reserved': {
+ 'total': 0,
+ },
+ }
+ self.instances = []
+ self.reserved_instances = []
try:
- self.instances = [
- Instance(i)
- for i in ec2.instances.filter(Filters=instances_filters)
- ]
- self.reserved_instances = [
- ReservedInstance(i)
- for i in ec2_client.describe_reserved_instances(
- Filters=reserved_instances_filters
- )['ReservedInstances']
- ]
+ for i in ec2.instances.all():
+ instance = Instance(i)
+ if instance.state == "running":
+ self.instances.append(instance)
+ if instance.state not in self.counters['instances']:
+ self.counters['instances'][instance.state] = 0
+ self.counters['instances'][instance.state] += 1
+ self.counters['instances']['total'] += 1
+ ri_key = 'ReservedInstances'
+ reserved_ctrs = self.counters['reserved']
+ for r in ec2_client.describe_reserved_instances()[ri_key]:
+ reserved_instance = ReservedInstance(r)
+ if reserved_instance.state == "active":
+ self.reserved_instances.append(reserved_instance)
+ reserved_ctrs['total'] += reserved_instance.instance_count
+ reserved_ctrs['free'] = reserved_ctrs['total']
+ reserved_ctrs['not_reserved'] = reserved_ctrs['total']
+ reserved_ctrs['used'] = 0
+ reserved_ctrs['not reserved'] = 0
except exceptions.NoCredentialsError:
logger.error("Error: no AWS credentials found")
sys.exit(1)
@@ -72,30 +83,60 @@ class Reports(object):
instance.reserved = 'Yes'
instance.current = reserved.usage_price
reserved.link(instance)
+ self.counters['reserved']['used'] += 1
+ self.counters['reserved']['free'] -= 1
break
+ reserved_counters = self.counters['reserved']
+ instances_counters = self.counters['instances']
+ reserved_counters['not reserved'] = instances_counters['running']
+ reserved_counters['not reserved'] -= reserved_counters['used']
def __repr__(self):
- headers = ['Id',
- 'Name',
- 'Type',
- 'AZ',
- 'OS',
- 'State',
- 'Launch time',
- 'Reserved',
- 'Current hourly price',
- 'Renewed hourly price']
+ headers = [
+ 'Id',
+ 'Name',
+ 'Type',
+ 'AZ',
+ 'OS',
+ 'State',
+ 'Launch time',
+ 'Reserved',
+ 'Current hourly price',
+ 'Renewed hourly price',
+ ]
table = []
for instance in self.instances:
- row = [instance.id,
- instance.name,
- instance.size,
- instance.availability_zone,
- instance.operating_system,
- instance.state,
- instance.launch_time.strftime('%Y-%m-%d %H:%M:%S'),
- instance.reserved,
- instance.current,
- instance.best]
+ row = [
+ instance.id,
+ instance.name,
+ instance.size,
+ instance.availability_zone,
+ instance.operating_system,
+ instance.state,
+ instance.launch_time.strftime('%Y-%m-%d %H:%M:%S'),
+ instance.reserved,
+ instance.current,
+ instance.best,
+ ]
table.append(row)
- return tabulate(table, headers)
+ footer_headers = [
+ 'Running',
+ 'Stopped',
+ 'Total instances',
+ 'Used',
+ 'Free',
+ 'Not reserved',
+ 'Total reserved',
+ ]
+ footer_table = [[
+ self.counters['instances']['running'],
+ self.counters['instances']['stopped'],
+ self.counters['instances']['total'],
+ self.counters['reserved']['used'],
+ self.counters['reserved']['free'],
+ self.counters['reserved']['not reserved'],
+ self.counters['reserved']['total'],
+ ]]
+ inventory = tabulate(table, headers)
+ summary = tabulate(footer_table, footer_headers)
+ return "{}\n\n{}".format(inventory, summary)
diff --git a/accloudtant/utils/__init__.py b/accloudtant/utils/__init__.py
index c1b5df2..1e7211d 100644
--- a/accloudtant/utils/__init__.py
+++ b/accloudtant/utils/__init__.py
@@ -15,7 +15,6 @@
# limitations under the License.
import io
-import codecs
import tokenize
import token
| Create footer for report
When I use the `report` command
I want to get a footer with totals. | ifosch/accloudtant | diff --git a/tests/aws/report_expected.txt b/tests/aws/report_expected.txt
index f2bbb8b..882ff19 100644
--- a/tests/aws/report_expected.txt
+++ b/tests/aws/report_expected.txt
@@ -7,3 +7,7 @@ i-1840273d database1 r2.8xlarge us-east-1c Linux/UNIX stopped
i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No 0.767 0.3892
+
+ Running Stopped Total instances Used Free Not reserved Total reserved
+--------- --------- ----------------- ------ ------ -------------- ----------------
+ 6 1 7 5 1 1 6
diff --git a/tests/aws/report_running_expected.txt b/tests/aws/report_running_expected.txt
index befecd0..e7bd69c 100644
--- a/tests/aws/report_running_expected.txt
+++ b/tests/aws/report_running_expected.txt
@@ -6,3 +6,7 @@ i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running
i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379
i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No 0.767 0.3892
+
+ Running Stopped Total instances Used Free Not reserved Total reserved
+--------- --------- ----------------- ------ ------ -------------- ----------------
+ 6 1 7 5 1 1 6
diff --git a/tests/aws/test_reports.py b/tests/aws/test_reports.py
index d0f6793..57084c3 100644
--- a/tests/aws/test_reports.py
+++ b/tests/aws/test_reports.py
@@ -320,6 +320,39 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
'UsagePrice': 0.611,
'Duration': 31536000,
'State': 'active',
+ }, {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 't2.micro',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
+ 'FixedPrice': 5352.36,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.611,
+ 'Duration': 31536000,
+ 'State': 'active',
}, {
'ProductDescription': 'Linux/UNIX',
'InstanceTenancy': 'default',
diff --git a/tests/aws/test_reserved_instance.py b/tests/aws/test_reserved_instance.py
index 9627ebf..50fd6fc 100644
--- a/tests/aws/test_reserved_instance.py
+++ b/tests/aws/test_reserved_instance.py
@@ -13,7 +13,6 @@
# limitations under the License.
import datetime
-import pytest
from dateutil.tz import tzutc
import accloudtant.aws.reserved_instance
from conftest import MockEC2Instance
diff --git a/tests/test_utils.py b/tests/test_utils.py
index caad5a4..c3b0877 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -19,4 +19,5 @@ import accloudtant.utils
def test_fix_lazy_json():
bad_json = '{ key: "value" }'.encode('utf-8')
good_json = '{"key":"value"}'
- assert(accloudtant.utils.fix_lazy_json(codecs.decode(bad_json)) == good_json)
+ result = accloudtant.utils.fix_lazy_json(codecs.decode(bad_json))
+ assert(result == good_json)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ifosch/accloudtant.git@8fdb36de3e41cc719b1e1df95c1083a9ea427392#egg=accloudtant
boto3==1.1.4
botocore==1.2.10
click==4.1
click-log==0.1.4
docutils==0.21.2
exceptiongroup==1.2.2
futures==2.2.0
iniconfig==2.1.0
jmespath==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.8.1
six==1.17.0
tabulate==0.7.5
tomli==2.2.1
| name: accloudtant
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.1.4
- botocore==1.2.10
- click==4.1
- click-log==0.1.4
- docutils==0.21.2
- exceptiongroup==1.2.2
- futures==2.2.0
- iniconfig==2.1.0
- jmespath==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.8.1
- six==1.17.0
- tabulate==0.7.5
- tomli==2.2.1
prefix: /opt/conda/envs/accloudtant
| [
"tests/aws/test_reports.py::test_reports"
]
| []
| [
"tests/aws/test_reserved_instance.py::test_retired_ri",
"tests/aws/test_reserved_instance.py::test_active_ri",
"tests/aws/test_reserved_instance.py::test_ri_link",
"tests/test_utils.py::test_fix_lazy_json"
]
| []
| null | 650 | [
"accloudtant/utils/__init__.py",
"accloudtant/aws/prices.py",
"accloudtant/aws/reports.py"
]
| [
"accloudtant/utils/__init__.py",
"accloudtant/aws/prices.py",
"accloudtant/aws/reports.py"
]
|
|
zalando-stups__pierone-cli-49 | 9f99c8f5a054c35b623c0601e66da0c15fdb578a | 2016-07-19 12:50:26 | 560cae1b4fc185c7a8aa3a1a50e0a96b2c7dd8e7 | rafaelcaricio: @hjacobs There was some problem with the Travis build. I wanted to restart it, but since I am not admin of this repo I cannot do (I guess), could you please try to re-run it?
jmcs: @rafaelcaricio I re-triggered the travis job
rafaelcaricio: Not sure what to do. The tests pass, but for some reason the flake8 command does not run.
```
The command "python setup.py test" exited with 0.
$ python setup.py flake8
running flake8
Traceback (most recent call last):
File "/opt/python/3.4.2/lib/python3.4/multiprocessing/queues.py", line 242, in _feed
obj = ForkingPickler.dumps(obj)
File "/opt/python/3.4.2/lib/python3.4/multiprocessing/reduction.py", line 50, in dumps
cls(buf, protocol).dump(obj)
_pickle.PicklingError: Can't pickle <class 'pkg_resources._vendor.packaging._structures.NegativeInfinity'>: it's not the same object as pkg_resources._vendor.packaging._structures.NegativeInfinity
No output has been received in the last 10 minutes, this potentially indicates a stalled build or something wrong with the build itself.
The build has been terminate
```
Maybe my code is so badly not compliant with styling that flake8 refuses to even start checking it. 😆 | diff --git a/.travis.yml b/.travis.yml
index e417a33..7c746c5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,6 +4,7 @@ python:
install:
- pip install -r requirements.txt
- pip install coveralls
+ - pip install flake8 # forcing installation of flake8, might be removed after https://gitlab.com/pycqa/flake8/issues/164 gets fixed.
script:
- python setup.py test
- python setup.py flake8
diff --git a/pierone/cli.py b/pierone/cli.py
index 90bb5c2..8918a42 100644
--- a/pierone/cli.py
+++ b/pierone/cli.py
@@ -8,7 +8,9 @@ import pierone
import requests
import stups_cli.config
import zign.api
-from clickclick import AliasedGroup, OutputFormat, UrlType, error, print_table
+from clickclick import (AliasedGroup, OutputFormat, UrlType, error,
+ fatal_error, print_table)
+from requests import RequestException
from .api import (DockerImage, Unauthorized, docker_login, get_image_tags,
get_latest_tag, parse_time, request)
@@ -76,6 +78,17 @@ def print_version(ctx, param, value):
ctx.exit()
+def validate_pierone_url(url: str) -> None:
+ ping_url = url.rstrip('/') + '/swagger.json'
+ try:
+ response = requests.get(ping_url, timeout=5)
+ response.raise_for_status()
+ if 'Pier One API' not in response.text:
+ fatal_error('ERROR: Did not find a valid Pier One registry at {}'.format(url))
+ except RequestException:
+ fatal_error('ERROR: Could not reach {}'.format(ping_url))
+
+
def set_pierone_url(config: dict, url: str) -> None:
'''Read Pier One URL from cli, from config file or from stdin.'''
url = url or config.get('url')
@@ -93,6 +106,7 @@ def set_pierone_url(config: dict, url: str) -> None:
# issue 63: gracefully handle URLs without scheme
url = 'https://{}'.format(url)
+ validate_pierone_url(url)
config['url'] = url
return url
| Pierone login accepts any URL
Would be nice to validate if the pierone URL is actually valid. Maybe pinging the address to see if it works and showing an error. The current behaviour leads to user that committed a typo in the pierone URL to think they are logged-in and getting error from `docker push` later with a not very helpful message.
### Current behaviour
Example of what currently happens:
```
$ pierone login --url registry.does-not-exist.example.com
Getting OAuth2 token "pierone".. OK
Storing Docker client configuration in /home/master/.docker/config.json.. OK
```
Then trying to push image using docker cli:
```
$ docker push pierone.opensource.zalan.do/bus/hello:b17
The push refers to a repository [pierone.opensource.zalan.do/bus/hello]
9c445b8a75e0: Preparing
8a48ff634f1d: Preparing
...
19429b698a22: Waiting
9436069b92a3: Waiting
no basic auth credentials
```
This leads users to think there is a problem with Pierone registry or with Docker which is misleading.
### Suggested behaviour
When trying to login in pierone with a URL of non-pierone server:
```
$ pierone login --url registry.does-not-exist.example.com
ERROR: Not found a valid Pierone registry at registry.does-not-exist.example.com
```
| zalando-stups/pierone-cli | diff --git a/tests/test_cli.py b/tests/test_cli.py
index e76073c..0bdd2fe 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -3,8 +3,17 @@ import os
import re
from unittest.mock import MagicMock
+import pytest
from click.testing import CliRunner
from pierone.cli import cli
+from requests import RequestException
+
+
[email protected](autouse=True)
+def valid_pierone_url(monkeypatch):
+ response = MagicMock()
+ response.text = 'Pier One API'
+ monkeypatch.setattr('requests.get', lambda *args, **kw: response)
def test_version(monkeypatch):
@@ -16,22 +25,47 @@ def test_version(monkeypatch):
def test_login(monkeypatch, tmpdir):
- response = MagicMock()
-
runner = CliRunner()
monkeypatch.setattr('stups_cli.config.load_config', lambda x: {})
monkeypatch.setattr('pierone.api.get_named_token', MagicMock(return_value={'access_token': 'tok123'}))
monkeypatch.setattr('os.path.expanduser', lambda x: x.replace('~', str(tmpdir)))
- monkeypatch.setattr('requests.get', lambda x, timeout: response)
with runner.isolated_filesystem():
result = runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n')
+ assert 'Storing Docker client configuration' in result.output
+ assert result.output.rstrip().endswith('OK')
with open(os.path.join(str(tmpdir), '.docker/config.json')) as fd:
data = json.load(fd)
assert data['auths']['https://pieroneurl']['auth'] == 'b2F1dGgyOnRvazEyMw=='
- assert 'Storing Docker client configuration' in result.output
- assert result.output.rstrip().endswith('OK')
+
+
+def test_invalid_url_for_login(monkeypatch, tmpdir):
+ runner = CliRunner()
+ response = MagicMock()
+
+ monkeypatch.setattr('stups_cli.config.load_config', lambda x: {})
+ monkeypatch.setattr('pierone.api.get_named_token', MagicMock(return_value={'access_token': 'tok123'}))
+ monkeypatch.setattr('os.path.expanduser', lambda x: x.replace('~', str(tmpdir)))
+
+ # Missing Pier One header
+ response.text = 'Not valid API'
+ monkeypatch.setattr('requests.get', lambda *args, **kw: response)
+
+ with runner.isolated_filesystem():
+ result = runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n')
+ assert 'ERROR: Did not find a valid Pier One registry at https://pieroneurl' in result.output
+ assert result.exit_code == 1
+ assert not os.path.exists(os.path.join(str(tmpdir), '.docker/config.json'))
+
+ # Not a valid header
+ response.raise_for_status = MagicMock(side_effect=RequestException)
+ monkeypatch.setattr('requests.get', lambda *args, **kw: response)
+ with runner.isolated_filesystem():
+ result = runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n')
+ assert 'ERROR: Could not reach https://pieroneurl' in result.output
+ assert result.exit_code == 1
+ assert not os.path.exists(os.path.join(str(tmpdir), '.docker/config.json'))
def test_login_arg_user(monkeypatch, tmpdir):
@@ -95,8 +129,6 @@ def test_login_env_user(monkeypatch, tmpdir):
def test_login_given_url_option(monkeypatch, tmpdir):
- response = MagicMock()
-
runner = CliRunner()
config = {}
@@ -108,7 +140,6 @@ def test_login_given_url_option(monkeypatch, tmpdir):
monkeypatch.setattr('stups_cli.config.store_config', store)
monkeypatch.setattr('pierone.api.get_named_token', MagicMock(return_value={'access_token': 'tok123'}))
monkeypatch.setattr('os.path.expanduser', lambda x: x.replace('~', str(tmpdir)))
- monkeypatch.setattr('requests.get', lambda x, timeout: response)
with runner.isolated_filesystem():
runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==2.7.0
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
pytest-cov==6.0.0
PyYAML==6.0.2
requests==2.32.3
stups-cli-support==1.1.22
-e git+https://github.com/zalando-stups/pierone-cli.git@9f99c8f5a054c35b623c0601e66da0c15fdb578a#egg=stups_pierone
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==2.3.0
| name: pierone-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==2.7.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pytest-cov==6.0.0
- pyyaml==6.0.2
- requests==2.32.3
- stups-cli-support==1.1.22
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/pierone-cli
| [
"tests/test_cli.py::test_invalid_url_for_login"
]
| []
| [
"tests/test_cli.py::test_version",
"tests/test_cli.py::test_login",
"tests/test_cli.py::test_login_arg_user",
"tests/test_cli.py::test_login_zign_user",
"tests/test_cli.py::test_login_env_user",
"tests/test_cli.py::test_login_given_url_option",
"tests/test_cli.py::test_scm_source",
"tests/test_cli.py::test_image",
"tests/test_cli.py::test_tags",
"tests/test_cli.py::test_tags_versions_limit",
"tests/test_cli.py::test_cves",
"tests/test_cli.py::test_no_cves_found",
"tests/test_cli.py::test_latest",
"tests/test_cli.py::test_latest_not_found",
"tests/test_cli.py::test_url_without_scheme"
]
| []
| Apache License 2.0 | 651 | [
"pierone/cli.py",
".travis.yml"
]
| [
"pierone/cli.py",
".travis.yml"
]
|
simphony__simphony-remote-133 | af0203df6cb3232a28b50fa1023baf73e234aa6d | 2016-07-20 15:25:43 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/docker/container.py b/remoteappmanager/docker/container.py
index cb484b0..f92304c 100644
--- a/remoteappmanager/docker/container.py
+++ b/remoteappmanager/docker/container.py
@@ -56,7 +56,7 @@ class Container(HasTraits):
")>")
@classmethod
- def from_docker_containers_dict(cls, docker_dict):
+ def from_docker_dict(cls, docker_dict):
"""Returns a Container object with the info given by a
docker Client.
@@ -74,24 +74,66 @@ class Container(HasTraits):
>>> # containers is a list of dict
>>> containers = docker.Client().containers()
- >>> Container.from_docker_containers_dict(containers[0])
+ >>> Container.from_docker_dict(containers[0])
"""
- ip = cls.ip.default_value
- port = cls.port.default_value
-
- if docker_dict.get('Ports'):
- ip = docker_dict['Ports'][0].get('IP', ip)
- port = docker_dict['Ports'][0].get('PublicPort', port)
-
- labels = docker_dict.get("Labels", {})
- mapping_id = labels.get(SIMPHONY_NS+"mapping_id", "")
- url_id = labels.get(SIMPHONY_NS+"url_id", "")
-
- return cls(docker_id=docker_dict.get('Id', ''),
- name=docker_dict.get('Names', ('',))[0],
- image_name=docker_dict.get('Image', ''),
- image_id=docker_dict.get('ImageID', ''),
- mapping_id=mapping_id,
- ip=ip,
- port=port,
- url_id=url_id)
+
+ is_inspect_container_output = ("Config" in docker_dict)
+
+ kwargs = dict(
+ docker_id=docker_dict.get('Id') or '',
+ ip=cls.ip.default_value,
+ port=cls.port.default_value,
+ )
+
+ if is_inspect_container_output:
+ # It's a client.inspect_container() output
+
+ network_settings = docker_dict.get("NetworkSettings") or {}
+ ports = network_settings.get("Ports") or {}
+ # unfortunately, in the case of a running container, we don't have
+ # a single list. Instead, we have a dict where the keys are
+ # the "port identifier" (e.g. 8888/tcp) and the value is a list
+ # of dictionaries.
+ # We assume that we only have one, as above
+ if len(ports) > 1:
+ raise ValueError("Container Ports had more than one element.")
+
+ if len(ports):
+ port_values = list(ports.values())[0]
+ if len(port_values) > 1:
+ raise ValueError("Container Ports values had "
+ "more than one element.")
+
+ if len(port_values):
+ kwargs["ip"] = port_values[0].get("HostIp") or kwargs["ip"]
+ kwargs["port"] = int(port_values[0].get("HostPort") or
+ kwargs["port"])
+
+ config = docker_dict.get("Config", {})
+ labels = config.get("Labels")
+
+ kwargs["image_name"] = config.get("Image")
+ kwargs["image_id"] = docker_dict["Image"]
+ kwargs["name"] = docker_dict["Name"]
+ else:
+ # It's a client.containers() output, so we have different rules.
+ ports = docker_dict.get('Ports') or []
+ if len(ports) > 1:
+ raise ValueError("Container Ports had more than one element.")
+
+ if len(ports):
+ kwargs["ip"] = ports[0].get('IP') or kwargs["ip"]
+ kwargs["port"] = int(ports[0].get('PublicPort') or
+ kwargs["port"])
+
+ labels = docker_dict.get("Labels") or {}
+
+ kwargs["image_name"] = docker_dict.get('Image') or ''
+ kwargs["image_id"] = docker_dict.get("ImageID") or ''
+ names = docker_dict.get("Names") or ('', )
+ kwargs["name"] = names[0]
+
+ kwargs["mapping_id"] = labels.get(SIMPHONY_NS+"mapping_id") or ""
+ kwargs["url_id"] = labels.get(SIMPHONY_NS+"url_id") or ""
+
+ return cls(**kwargs)
diff --git a/remoteappmanager/docker/container_manager.py b/remoteappmanager/docker/container_manager.py
index 1db82e5..7b85d53 100644
--- a/remoteappmanager/docker/container_manager.py
+++ b/remoteappmanager/docker/container_manager.py
@@ -166,8 +166,8 @@ class ContainerManager(LoggingMixin):
infos = yield self.docker_client.containers(filters=filters)
for info in infos:
try:
- container = Container.from_docker_containers_dict(info)
- except Exception:
+ container = Container.from_docker_dict(info)
+ except ValueError:
self.log.exception("Unable to parse container info.")
continue
diff --git a/remoteappmanager/restresources/container.py b/remoteappmanager/restresources/container.py
index c3cd5e8..64bf36a 100644
--- a/remoteappmanager/restresources/container.py
+++ b/remoteappmanager/restresources/container.py
@@ -118,7 +118,7 @@ class Container(Resource):
if not container_dict:
return None
- return DockerContainer.from_docker_containers_dict(container_dict[0])
+ return DockerContainer.from_docker_dict(container_dict[0])
@gen.coroutine
def _start_container(self, user_name, app, policy, mapping_id):
| Image.from_docker_dict and Container.from_docker_dict take inconsistent inputs
`Container.from_docker_dict` assumes inputs from the result of `docker.client.containers` but not `docker.client.inspect_container`, whilst `Image.from_docker_dict` can take inputs from both.
It would be useful and neater if `Container.from_docker_dict` accepts input from the result `docker.client.inspect_container` as well.
Follows #79
| simphony/simphony-remote | diff --git a/tests/docker/test_container.py b/tests/docker/test_container.py
index f5223dc..249eac9 100644
--- a/tests/docker/test_container.py
+++ b/tests/docker/test_container.py
@@ -1,7 +1,8 @@
from unittest import TestCase
from remoteappmanager.docker.container import Container
-from tests.utils import assert_containers_equal
+from tests.utils import assert_containers_equal, \
+ mock_docker_client_with_running_containers
class TestContainer(TestCase):
@@ -42,7 +43,7 @@ class TestContainer(TestCase):
'Status': 'Up 56 minutes'}
# Container with public port
- actual = Container.from_docker_containers_dict(container_dict)
+ actual = Container.from_docker_dict(container_dict)
expected = Container(
docker_id='248e45e717cd740ae763a1c565',
name='/remoteexec-user-empty-ubuntu_3Alatest',
@@ -75,7 +76,7 @@ class TestContainer(TestCase):
'Status': 'Up 56 minutes'}
# Container without public port
- actual = Container.from_docker_containers_dict(container_dict)
+ actual = Container.from_docker_dict(container_dict)
expected = Container(
docker_id='812c765d0549be0ab831ae8348',
name='/remoteexec-user-empty-ubuntu_3Alatest',
@@ -86,3 +87,57 @@ class TestContainer(TestCase):
url_id="8e2fe66d5de74db9bbab50c0d2f92b33")
assert_containers_equal(self, actual, expected)
+
+ def test_from_docker_dict_inspect_container(self):
+ client = mock_docker_client_with_running_containers()
+ actual = Container.from_docker_dict(
+ client.inspect_container("id"))
+
+ expected = Container(
+ docker_id='35d88fe321c3d575ec3be64f54b8967ef49c0dc92395bf4c1e511ed3e6ae0c79', # noqa
+ name='/remoteexec-username-simphonyproject_2Fsimphonic-mayavi_5F1',
+ image_name='simphonyproject/simphonic-mayavi',
+ image_id='sha256:f43b749341ee37b56e7bd8d99f09629f311aaec35a8045a39185b5659edef169', # noqa
+ ip='0.0.0.0',
+ port=32782,
+ url_id="55555555555555555555555555555555",
+ mapping_id="1c08c87878634e90af43d799e90f61d2")
+
+ assert_containers_equal(self, actual, expected)
+
+ def test_multiple_ports_data(self):
+ client = mock_docker_client_with_running_containers()
+ docker_dict = client.inspect_container("id")
+ docker_dict["NetworkSettings"]["Ports"] = {
+ '8888/tcp': [{'HostIp': '0.0.0.0', 'HostPort': '32782'}],
+ '8889/tcp': [{'HostIp': '0.0.0.0', 'HostPort': '32783'}]
+ }
+ with self.assertRaises(ValueError):
+ Container.from_docker_dict(docker_dict)
+
+ docker_dict["NetworkSettings"]["Ports"] = {
+ '8888/tcp': [
+ {'HostIp': '0.0.0.0', 'HostPort': '32782'},
+ {'HostIp': '0.0.0.0', 'HostPort': '32783'}
+ ]
+ }
+ with self.assertRaises(ValueError):
+ Container.from_docker_dict(docker_dict)
+
+ docker_dict = client.containers()[0]
+ docker_dict["Ports"] = [
+ {
+ 'IP': '0.0.0.0',
+ 'PublicIP': 34567,
+ 'PrivatePort': 22,
+ 'Type': 'tcp'
+ },
+ {
+ 'IP': '0.0.0.0',
+ 'PublicIP': 34562,
+ 'PrivatePort': 21,
+ 'Type': 'tcp'
+ }
+ ]
+ with self.assertRaises(ValueError):
+ Container.from_docker_dict(docker_dict)
diff --git a/tests/utils.py b/tests/utils.py
index 9279636..2556bcd 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -110,6 +110,30 @@ def mock_docker_client_with_running_containers():
'State': 'running',
'Status': 'Up About an hour'},
]
+ client.inspect_container.return_value = {
+ 'Config': {
+ 'Image': 'simphonyproject/simphonic-mayavi',
+ 'Labels': {'eu.simphony-project.docker.description': 'Ubuntu '
+ 'machine '
+ 'with '
+ 'simphony '
+ 'framework '
+ 'preinstalled',
+ 'eu.simphony-project.docker.mapping_id': '1c08c87878634e90af43d799e90f61d2',
+ 'eu.simphony-project.docker.ui_name': 'Simphony '
+ 'Framework '
+ '(w/ mayavi)',
+ 'eu.simphony-project.docker.url_id': '55555555555555555555555555555555',
+ 'eu.simphony-project.docker.user': 'username'},
+ },
+ 'Id': '35d88fe321c3d575ec3be64f54b8967ef49c0dc92395bf4c1e511ed3e6ae0c79',
+ 'Image': 'sha256:f43b749341ee37b56e7bd8d99f09629f311aaec35a8045a39185b5659edef169',
+ 'Name': '/remoteexec-username-simphonyproject_2Fsimphonic-mayavi_5F1',
+ 'NetworkSettings': {
+ 'Ports': {'8888/tcp': [{'HostIp': '0.0.0.0',
+ 'HostPort': '32782'}]},
+ },
+ }
return client
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8",
"sphinx",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@af0203df6cb3232a28b50fa1023baf73e234aa6d#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/docker/test_container.py::TestContainer::test_from_docker_dict_inspect_container",
"tests/docker/test_container.py::TestContainer::test_from_docker_dict_with_public_port",
"tests/docker/test_container.py::TestContainer::test_from_docker_dict_without_public_port",
"tests/docker/test_container.py::TestContainer::test_multiple_ports_data"
]
| []
| [
"tests/docker/test_container.py::TestContainer::test_host_url",
"tests/docker/test_container.py::TestContainer::test_url"
]
| []
| BSD 3-Clause "New" or "Revised" License | 652 | [
"remoteappmanager/docker/container.py",
"remoteappmanager/restresources/container.py",
"remoteappmanager/docker/container_manager.py"
]
| [
"remoteappmanager/docker/container.py",
"remoteappmanager/restresources/container.py",
"remoteappmanager/docker/container_manager.py"
]
|
|
simphony__simphony-remote-134 | af0203df6cb3232a28b50fa1023baf73e234aa6d | 2016-07-20 16:23:49 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/file_config.py b/remoteappmanager/file_config.py
index 2f93478..8966ed8 100644
--- a/remoteappmanager/file_config.py
+++ b/remoteappmanager/file_config.py
@@ -3,7 +3,6 @@ import os
import tornado.options
from docker import tls
from traitlets import HasTraits, Int, Unicode, Bool, Dict
-from traitlets.utils.sentinel import Sentinel
from remoteappmanager import paths
from remoteappmanager.traitlets import set_traits_from_dict
@@ -110,12 +109,7 @@ class FileConfig(HasTraits):
for traitlet_name, traitlet in self.traits().items():
# tornado.OptionParser defines an option with a Python type
# and performs type validation.
- # traitlet.default_value may be a Sentinel value (e.g. Tuple,
- # Dict, Instance), in which case we use the repr
- default_value = traitlet.default_value
-
- if type(default_value) is Sentinel:
- default_value = eval(traitlet.default_value_repr())
+ default_value = getattr(self, traitlet_name)
file_line_parser.define(
traitlet_name,
| FileConfig with tls=True or tls_verify=False only, leads to docker TLSParameterError
On Linux, if my file config for the `remoteappmanager_config.py` ONLY contains the following:
```
$ cat remoteappmanager_config.py
tls = True
```
or
```
$ cat remoteappmanager_config.py
tls_verify = True
```
Then starting remoteappmanager gives this error
```
$ remoteappmanager --user=kit --port=45707 --cookie-name=jupyter-hub-token-kit --base-urlpath=/user/kit --hub-host= --hub-prefix=/hub/ --hub-api-url=http://172.16.253.129:8081/hub/api --ip=127.0.0.1 --proxy-api-url=http://127.0.0.1:8001/api/routes/ --config-file=remoteappmanager_config.py
...
[E 160720 14:33:43 web:1548] Uncaught exception GET /user/kit/ (127.0.0.1)
HTTPServerRequest(protocol='http', host='127.0.0.1:8000', method='GET', uri='/user/kit/', version='HTTP/1.1', remote_ip='127.0.0.1', headers={'X-Forwarded-Proto': 'https', 'Accept-Language': 'en-US,en;q=0.5', 'X-Forwarded-For': '127.0.0.1', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'If-None-Match': '"dfba4089bef9b2e40bbd2a75ba5413bb8edbdd83"', 'Host': '127.0.0.1:8000', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0', 'Referer': 'https://127.0.0.1:8000/hub/login', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'close', 'X-Forwarded-Port': '8000', 'Cookie': 'jupyter-hub-token-kit="2|1:0|10:1469021623|21:jupyter-hub-token-kit|44:M2UzMjRlOGNhNDBjNGQ5ZWEwYjg3Njk0N2U0ODY4MmE=|1ec021fdad25b335a6b11b22ff198ce0860a0742a4d95c46a33a6d659d5f4e2d"'})
Traceback (most recent call last):
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 501, in get
value = obj._trait_values[self.name]
KeyError: 'container_manager'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/web.py", line 1469, in _execute
result = yield result
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1021, in run
yielded = self.gen.throw(*exc_info)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/handlers/home_handler.py", line 21, in get
images_info = yield self._get_images_info()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 1015, in run
value = future.result()
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/tornado/gen.py", line 285, in wrapper
yielded = next(result)
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/handlers/home_handler.py", line 184, in _get_images_info
container_manager = self.application.container_manager
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 529, in __get__
return self.get(obj, cls)
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/traitlets/traitlets.py", line 508, in get
value = self._validate(obj, dynamic_default())
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/application.py", line 81, in _container_manager_default
docker_config=self.file_config.docker_config()
File "/mnt/hgfs/VM_shared/SimPhoNy/simphony-remote/remoteappmanager/file_config.py", line 155, in docker_config
assert_hostname=True,
File "/home/kit/.virtualenv/simremote/lib/python3.4/site-packages/docker/tls.py", line 47, in __init__
'Path to a certificate and key files must be provided'
docker.errors.TLSParameterError: Path to a certificate and key files must be provided through the client_config param. TLS configurations should map the Docker CLI client configurations. See https://docs.docker.com/engine/articles/https/ for API details.
```
| simphony/simphony-remote | diff --git a/tests/test_file_config.py b/tests/test_file_config.py
index 22ce879..484d1b9 100644
--- a/tests/test_file_config.py
+++ b/tests/test_file_config.py
@@ -151,3 +151,15 @@ class TestFileConfig(TempMixin, unittest.TestCase):
config = FileConfig(tls=True)
self.assertNotEqual(config.tls_key, '')
self.assertNotEqual(config.tls_cert, '')
+
+ def test_file_parsing_not_overriding_bug_131(self):
+ docker_config = textwrap.dedent('''
+ tls = True
+ ''')
+ with open(self.config_file, 'w') as fhandle:
+ print(docker_config, file=fhandle)
+
+ config = FileConfig()
+ config.parse_config(self.config_file)
+ self.assertNotEqual(config.tls_key, '')
+ self.assertNotEqual(config.tls_cert, '')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_git_commit_hash"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"sphinx",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc",
"apt-get install -y docker.io"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
babel==2.17.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
docutils==0.21.2
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@af0203df6cb3232a28b50fa1023baf73e234aa6d#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- babel==2.17.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- docutils==0.21.2
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/test_file_config.py::TestFileConfig::test_file_parsing_not_overriding_bug_131"
]
| []
| [
"tests/test_file_config.py::TestFileConfig::test_initialization_on_local_docker_machine",
"tests/test_file_config.py::TestFileConfig::test_initialization_on_nonlocal_docker_machine",
"tests/test_file_config.py::TestFileConfig::test_initialization_with_default_accounting",
"tests/test_file_config.py::TestFileConfig::test_initialization_with_good_accounting",
"tests/test_file_config.py::TestFileConfig::test_overriding",
"tests/test_file_config.py::TestFileConfig::test_tls_init",
"tests/test_file_config.py::TestFileConfig::test_tls_no_verify"
]
| []
| BSD 3-Clause "New" or "Revised" License | 653 | [
"remoteappmanager/file_config.py"
]
| [
"remoteappmanager/file_config.py"
]
|
|
mkdocs__mkdocs-996 | c0c4e44cafcc3537b3cb46f5541297b47f707825 | 2016-07-20 17:13:34 | e7d8879d2b53d9e50bdfcf1cf29c48dc3f6bc87f | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index 426ef3c8..b8d9926b 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -64,9 +64,9 @@ created and third-party templates:
#### Increased Template Customization. (#607)
The built-in themes have been updated by having each of their many parts wrapped
-in template blocks which allow each individual block to be easily overriden
+in template blocks which allow each individual block to be easily overridden
using the `theme_dir` config setting. Without any new settings, you can use a
-differant analytics service, replace the default search function, or alter the
+different analytics service, replace the default search function, or alter the
behavior of the navigation, among other things. See the relevant
[documentation][blocks] for more details.
@@ -112,16 +112,26 @@ It is important to note that this method for building the pages is for developme
of content only, since the navigation and other links do not get updated on other
pages.
+#### Stricter Directory Validation
+
+Previously, a warning was issued if the `site_dir` was a child directory of the
+`docs_dir`. This now raises an error. Additionally, an error is now raised if
+the `docs_dir` is set to the directory which contains your config file rather
+than a child directory. You will need to rearrange you directory structure to
+better conform with the documented [layout].
+
+[layout]: ../user-guide/writing-your-docs/#file-layout
+
### Other Changes and Additions to Version 0.16.0
* Bugfix: Support `gh-deploy` command on Windows with Python 3 (#722)
-* Bugfix: Include .woff2 font files in Pyhton package build (#894)
+* Bugfix: Include .woff2 font files in Python package build (#894)
* Various updates and improvements to Documentation Home Page/Tutorial (#870)
* Bugfix: Support livereload for config file changes (#735)
* Bugfix: Non-media template files are no longer copied with media files (#807)
-* Add a flag (-e/--theme-dir) to specifiy theme directory with the commands
+* Add a flag (-e/--theme-dir) to specify theme directory with the commands
`mkdocs build` and `mkdocs serve` (#832)
-* Fixed issues with Unicode filenames under Windows and Python 2. (#833)
+* Fixed issues with Unicode file names under Windows and Python 2. (#833)
* Improved the styling of in-line code in the MkDocs theme. (#718)
* Bugfix: convert variables to JSON when being passed to JavaScript (#850)
* Updated the ReadTheDocs theme to match the upstream font sizes and colours
diff --git a/mkdocs/config/config_options.py b/mkdocs/config/config_options.py
index b3cb9389..70f610c4 100644
--- a/mkdocs/config/config_options.py
+++ b/mkdocs/config/config_options.py
@@ -229,6 +229,15 @@ class Dir(Type):
return os.path.abspath(value)
+ def post_validation(self, config, key_name):
+
+ # Validate that the dir is not the parent dir of the config file.
+ if os.path.dirname(config['config_file_path']) == config[key_name]:
+ raise ValidationError(
+ ("The '{0}' should not be the parent directory of the config "
+ "file. Use a child directory instead so that the config file "
+ "is a sibling of the config file.").format(key_name))
+
class SiteDir(Dir):
"""
@@ -239,6 +248,8 @@ class SiteDir(Dir):
def post_validation(self, config, key_name):
+ super(SiteDir, self).post_validation(config, key_name)
+
# Validate that the docs_dir and site_dir don't contain the
# other as this will lead to copying back and forth on each
# and eventually make a deep nested mess.
@@ -250,7 +261,7 @@ class SiteDir(Dir):
"(site_dir: '{0}', docs_dir: '{1}')"
).format(config['site_dir'], config['docs_dir']))
elif (config['site_dir'] + os.sep).startswith(config['docs_dir'] + os.sep):
- self.warnings.append(
+ raise ValidationError(
("The 'site_dir' should not be within the 'docs_dir' as this "
"leads to the build directory being copied into itself and "
"duplicate nested files in the 'site_dir'."
| Ignore the site_dir contents if it is in the docs_dir
We have been telling people to not do this, but it comes up again and again. So I wonder if it is a usecase we should support? I don't really like it. Perhaps we just need to error more strongly and tell users rather than causing obscure problems. | mkdocs/mkdocs | diff --git a/mkdocs/tests/build_tests.py b/mkdocs/tests/build_tests.py
index f6215df2..aead1c85 100644
--- a/mkdocs/tests/build_tests.py
+++ b/mkdocs/tests/build_tests.py
@@ -26,6 +26,8 @@ def load_config(cfg=None):
cfg = cfg or {}
if 'site_name' not in cfg:
cfg['site_name'] = 'Example'
+ if 'config_file_path' not in cfg:
+ cfg['config_file_path'] = os.path.join(os.path.abspath('.'), 'mkdocs.yml')
conf = config.Config(schema=config.DEFAULT_SCHEMA)
conf.load_dict(cfg)
diff --git a/mkdocs/tests/config/config_options_tests.py b/mkdocs/tests/config/config_options_tests.py
index c492d797..de9396bd 100644
--- a/mkdocs/tests/config/config_options_tests.py
+++ b/mkdocs/tests/config/config_options_tests.py
@@ -186,6 +186,20 @@ class DirTest(unittest.TestCase):
self.assertRaises(config_options.ValidationError,
option.validate, [])
+ def test_doc_dir_is_config_dir(self):
+
+ test_config = {
+ 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml'),
+ 'docs_dir': '.'
+ }
+
+ docs_dir = config_options.Dir()
+
+ test_config['docs_dir'] = docs_dir.validate(test_config['docs_dir'])
+
+ self.assertRaises(config_options.ValidationError,
+ docs_dir.post_validation, test_config, 'docs_dir')
+
class SiteDirTest(unittest.TestCase):
@@ -207,12 +221,13 @@ class SiteDirTest(unittest.TestCase):
)
for test_config in test_configs:
+ test_config['config_file_path'] = j(os.path.abspath('..'), 'mkdocs.yml')
test_config['docs_dir'] = docs_dir.validate(test_config['docs_dir'])
test_config['site_dir'] = option.validate(test_config['site_dir'])
self.assertRaises(config_options.ValidationError,
- option.post_validation, test_config, 'key')
+ option.post_validation, test_config, 'site_dir')
def test_site_dir_in_docs_dir(self):
@@ -225,6 +240,7 @@ class SiteDirTest(unittest.TestCase):
)
for test_config in test_configs:
+ test_config['config_file_path'] = j(os.path.abspath('..'), 'mkdocs.yml')
docs_dir = config_options.Dir()
option = config_options.SiteDir()
@@ -232,11 +248,8 @@ class SiteDirTest(unittest.TestCase):
test_config['docs_dir'] = docs_dir.validate(test_config['docs_dir'])
test_config['site_dir'] = option.validate(test_config['site_dir'])
- option.post_validation(test_config, 'key')
- self.assertEqual(len(option.warnings), 1)
- self.assertEqual(
- option.warnings[0][:50],
- "The 'site_dir' should not be within the 'docs_dir'")
+ self.assertRaises(config_options.ValidationError,
+ option.post_validation, test_config, 'site_dir')
class ThemeTest(unittest.TestCase):
diff --git a/mkdocs/tests/config/config_tests.py b/mkdocs/tests/config/config_tests.py
index 04e51de3..497cfa6f 100644
--- a/mkdocs/tests/config/config_tests.py
+++ b/mkdocs/tests/config/config_tests.py
@@ -135,7 +135,8 @@ class ConfigTests(unittest.TestCase):
conf = config.Config(schema=config.DEFAULT_SCHEMA)
conf.load_dict({
'site_name': 'Example',
- 'docs_dir': tmp_dir
+ 'docs_dir': tmp_dir,
+ 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml')
})
conf.validate()
self.assertEqual(['index.md', 'about.md'], conf['pages'])
@@ -159,7 +160,8 @@ class ConfigTests(unittest.TestCase):
conf = config.Config(schema=config.DEFAULT_SCHEMA)
conf.load_dict({
'site_name': 'Example',
- 'docs_dir': tmp_dir
+ 'docs_dir': tmp_dir,
+ 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml')
})
conf.validate()
self.assertEqual([
@@ -197,6 +199,7 @@ class ConfigTests(unittest.TestCase):
conf = {
'site_name': 'Example',
+ 'config_file_path': j(os.path.abspath('..'), 'mkdocs.yml')
}
for test_config in test_configs:
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 2
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@c0c4e44cafcc3537b3cb46f5541297b47f707825#egg=mkdocs
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
typing_extensions==4.13.0
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- typing-extensions==4.13.0
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/config/config_options_tests.py::DirTest::test_doc_dir_is_config_dir",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_site_dir_in_docs_dir"
]
| [
"mkdocs/tests/build_tests.py::BuildTests::test_absolute_link",
"mkdocs/tests/build_tests.py::BuildTests::test_anchor_only_link",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_asbolute_media",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link_differing_directory",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link_with_anchor",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_media",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_markdown",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_multiple_internal_links",
"mkdocs/tests/build_tests.py::BuildTests::test_copy_theme_files",
"mkdocs/tests/build_tests.py::BuildTests::test_copying_media",
"mkdocs/tests/build_tests.py::BuildTests::test_dont_convert_code_block_urls",
"mkdocs/tests/build_tests.py::BuildTests::test_empty_document",
"mkdocs/tests/build_tests.py::BuildTests::test_extension_config",
"mkdocs/tests/build_tests.py::BuildTests::test_extra_context",
"mkdocs/tests/build_tests.py::BuildTests::test_ignore_email_links",
"mkdocs/tests/build_tests.py::BuildTests::test_ignore_external_link",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_custom_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_duplicate_custom_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_fenced_code_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_table_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_not_use_directory_urls",
"mkdocs/tests/build_tests.py::BuildTests::test_strict_mode_invalid",
"mkdocs/tests/build_tests.py::BuildTests::test_strict_mode_valid",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_config_option",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_default_pages",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_default_pages_nested"
]
| [
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_replace_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required_no_default",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_length",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_multiple_types",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_single_type",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid_url",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_valid_url",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_github",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_github",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_file",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_attribute_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_type_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir_but_required",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_valid_dir",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_doc_dir_in_site_dir",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_talk",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_config",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_type",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_dict",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_empty",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_invalid_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_many_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_one_page",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PrivateTest::test_defined",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins_config",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_configkey",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_duplicates",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_option",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_dict_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_list_dicts",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_mixed_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_none",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_not_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_simple_list",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_doc_dir_in_site_dir",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_empty_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_invalid_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_missing_config_file",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_missing_site_name",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_nonexistant_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_theme"
]
| []
| BSD 2-Clause "Simplified" License | 654 | [
"docs/about/release-notes.md",
"mkdocs/config/config_options.py"
]
| [
"docs/about/release-notes.md",
"mkdocs/config/config_options.py"
]
|
|
simphony__simphony-remote-141 | 3ceb572ce98207102f14d1b11491fc2edd85aaac | 2016-07-21 11:17:02 | 34705b9892b4c781d27e0a30e7f29019450e6b1c | diff --git a/remoteappmanager/db/orm.py b/remoteappmanager/db/orm.py
index 7111d0b..f877d5a 100644
--- a/remoteappmanager/db/orm.py
+++ b/remoteappmanager/db/orm.py
@@ -1,5 +1,6 @@
import contextlib
import hashlib
+import os
from sqlalchemy import (
Column, Integer, Boolean, Unicode, ForeignKey, create_engine, Enum,
@@ -157,6 +158,21 @@ class AppAccounting(ABCAccounting):
def __init__(self, url, **kwargs):
self.db = Database(url, **kwargs)
+ self.check_database_readable()
+
+ def check_database_readable(self):
+ ''' Raise IOError if the database url points to a sqlite database
+ that is not readable
+
+ TODO: may extend for validating databases in other dialects?
+ '''
+ db_url = self.db.url
+
+ if db_url.startswith('sqlite:///'):
+ file_path = os.path.abspath(db_url[10:])
+ if not os.access(file_path, os.R_OK):
+ raise IOError(
+ 'Sqlite database {} is not readable'.format(file_path))
def get_user_by_name(self, user_name):
""" Return an orm.User given a user name. Return None
| Sqlite database file created if it does not exist
Similar to #113, when `remoteappmanager` is started with its default accounting setting and that the default sqlite database does not exist, an empty file `remoteappmanager.db` would be created.
| simphony/simphony-remote | diff --git a/tests/db/test_interfaces.py b/tests/db/test_interfaces.py
index 02ced52..4ed20d0 100644
--- a/tests/db/test_interfaces.py
+++ b/tests/db/test_interfaces.py
@@ -8,7 +8,7 @@ from collections import namedtuple
from remoteappmanager.db.interfaces import (
ABCApplication, ABCApplicationPolicy, ABCAccounting)
-from .abc_test_interfaces import ABCTestDatabaseInterface
+from tests.db.abc_test_interfaces import ABCTestDatabaseInterface
User = namedtuple('User', ('name',))
diff --git a/tests/db/test_orm.py b/tests/db/test_orm.py
index 2869b94..6c1052c 100644
--- a/tests/db/test_orm.py
+++ b/tests/db/test_orm.py
@@ -199,3 +199,12 @@ class TestOrmAppAccounting(TempMixin, ABCTestDatabaseInterface,
self.assertEqual(actual_app, expected_config[0][0])
self.assertEqual(actual_policy, expected_config[0][1])
+
+ def test_no_file_creation_if_sqlite_database_not_exist(self):
+ temp_file_path = os.path.join(self.tempdir, 'some.db')
+
+ with self.assertRaises(IOError):
+ AppAccounting(
+ url="sqlite:///"+temp_file_path)
+
+ self.assertFalse(os.path.exists(temp_file_path))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_issue_reference",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
escapism==1.0.1
exceptiongroup==1.2.2
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
numpy==2.0.2
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
pandas==2.2.3
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
pytz==2025.2
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@3ceb572ce98207102f14d1b11491fc2edd85aaac#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
tzdata==2025.2
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- escapism==1.0.1
- exceptiongroup==1.2.2
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- numpy==2.0.2
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- pandas==2.2.3
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pytz==2025.2
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- tzdata==2025.2
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/db/test_orm.py::TestOrmAppAccounting::test_no_file_creation_if_sqlite_database_not_exist"
]
| [
"tests/db/test_orm.py::TestOrm::test_apps_for_user",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user_across_sessions",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_apps_for_user_mapping_id_rest_compliant"
]
| [
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_apps_for_user",
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_apps_for_user_mapping_id_rest_compliant",
"tests/db/test_interfaces.py::TestDatabaseInterface::test_get_user_by_name",
"tests/db/test_orm.py::TestOrm::test_database_init_and_session",
"tests/db/test_orm.py::TestOrm::test_orm_objects",
"tests/db/test_orm.py::TestOrmAppAccounting::test_get_user_by_name"
]
| []
| BSD 3-Clause "New" or "Revised" License | 655 | [
"remoteappmanager/db/orm.py"
]
| [
"remoteappmanager/db/orm.py"
]
|
|
dask__dask-1397 | b4d3dba54fa488f60a808b9f7629aea4c156176e | 2016-07-21 20:09:01 | 278386a344dcb5183930449f56ff745eb3f980e0 | diff --git a/dask/array/core.py b/dask/array/core.py
index 64405e95c..e4dfaf8d3 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -847,7 +847,7 @@ class Array(Base):
>>> da.ones((10, 10), chunks=(5, 5), dtype='i4')
dask.array<..., shape=(10, 10), dtype=int32, chunksize=(5, 5)>
"""
- chunksize = str(tuple(c[0] for c in self.chunks))
+ chunksize = str(tuple(c[0] if c else 0 for c in self.chunks))
name = self.name if len(self.name) < 10 else self.name[:7] + '...'
return ("dask.array<%s, shape=%s, dtype=%s, chunksize=%s>" %
(name, self.shape, self._dtype, chunksize))
@@ -3030,7 +3030,7 @@ def concatenate3(arrays):
if not ndim:
return arrays
if not arrays:
- return np.empty(())
+ return np.empty(0)
chunks = chunks_from_arrays(arrays)
shape = tuple(map(sum, chunks))
diff --git a/dask/array/linalg.py b/dask/array/linalg.py
index 341263f77..d8a5d362f 100644
--- a/dask/array/linalg.py
+++ b/dask/array/linalg.py
@@ -741,7 +741,7 @@ def lstsq(a, b):
q, r = qr(a)
x = solve_triangular(r, q.T.dot(b))
residuals = b - a.dot(x)
- residuals = (residuals ** 2).sum()
+ residuals = (residuals ** 2).sum(keepdims=True)
token = tokenize(a, b)
diff --git a/dask/array/reductions.py b/dask/array/reductions.py
index dcde63c06..28c7cb4af 100644
--- a/dask/array/reductions.py
+++ b/dask/array/reductions.py
@@ -29,9 +29,9 @@ def reduction(x, chunk, aggregate, axis=None, keepdims=None, dtype=None,
axis = (axis,)
axis = tuple(i if i >= 0 else x.ndim + i for i in axis)
- if dtype and 'dtype' in getargspec(chunk).args:
+ if dtype is not None and 'dtype' in getargspec(chunk).args:
chunk = partial(chunk, dtype=dtype)
- if dtype and 'dtype' in getargspec(aggregate).args:
+ if dtype is not None and 'dtype' in getargspec(aggregate).args:
aggregate = partial(aggregate, dtype=dtype)
# Map chunk across all blocks
diff --git a/dask/array/utils.py b/dask/array/utils.py
index 79f5b7436..f7b896c30 100644
--- a/dask/array/utils.py
+++ b/dask/array/utils.py
@@ -1,3 +1,4 @@
+from distutils.version import LooseVersion
import difflib
import os
import numpy as np
@@ -5,15 +6,47 @@ import numpy as np
from .core import Array
from ..async import get_sync
+if LooseVersion(np.__version__) >= '1.10.0':
+ allclose = np.allclose
+else:
+ def allclose(a, b, **kwargs):
+ if kwargs.pop('equal_nan', False):
+ a_nans = np.isnan(a)
+ b_nans = np.isnan(b)
+ if not (a_nans == b_nans).all():
+ return False
+ a = a[~a_nans]
+ b = b[~b_nans]
+ return np.allclose(a, b, **kwargs)
+
+
+def _not_empty(x):
+ return x.shape and 0 not in x.shape
+
+
+def _maybe_check_dtype(a, dtype=None):
+ # Only check dtype matches for non-empty
+ if _not_empty(a):
+ assert a.dtype == dtype
+
+
def assert_eq(a, b, **kwargs):
if isinstance(a, Array):
adt = a._dtype
a = a.compute(get=get_sync)
+ if adt is not None:
+ _maybe_check_dtype(a, adt)
+ else:
+ adt = getattr(a, 'dtype', None)
else:
adt = getattr(a, 'dtype', None)
if isinstance(b, Array):
bdt = b._dtype
b = b.compute(get=get_sync)
+ if bdt is not None:
+ _maybe_check_dtype(b, bdt)
+ else:
+ bdt = getattr(b, 'dtype', None)
else:
bdt = getattr(b, 'dtype', None)
@@ -23,7 +56,10 @@ def assert_eq(a, b, **kwargs):
os.linesep.join(diff))
try:
- assert np.allclose(a, b, **kwargs)
+ if _not_empty(a) and _not_empty(b):
+ # Treat all empty arrays as equivalent
+ assert a.shape == b.shape
+ assert allclose(a, b, **kwargs)
return
except TypeError:
pass
| dask.array's mean converts float32 to float64 at compute time
Originally reported by @robintw in pydata/xarray#913:
```
import dask.array as da
import numpy as np
x = da.from_array(np.random.rand(10, 10).astype(np.float32), (5, 5)).mean(axis=0)
print(x.dtype) # float32
print(x.compute().dtype) # float64
```
Tested on dask 0.10.1. | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index e1288b5ae..3db7be7e7 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -804,7 +804,10 @@ def test_repr():
assert str(d._dtype) in repr(d)
d = da.ones((4000, 4), chunks=(4, 2))
assert len(str(d)) < 1000
-
+ # Empty array
+ d = da.Array({}, 'd', ((), (3, 4)), dtype='i8')
+ assert str(d.shape) in repr(d)
+ assert str(d._dtype) in repr(d)
def test_slicing_with_ellipsis():
x = np.arange(256).reshape((4, 4, 4, 4))
diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py
index 7da0b98e2..bee68e1d7 100644
--- a/dask/array/tests/test_reductions.py
+++ b/dask/array/tests/test_reductions.py
@@ -4,6 +4,7 @@ import pytest
pytest.importorskip('numpy')
import dask.array as da
+from dask.array.utils import assert_eq as _assert_eq
from dask.core import get_deps
from dask.context import set_options
@@ -16,15 +17,8 @@ except ImportError: # pragma: no cover
nanprod = npcompat.nanprod
-def eq(a, b):
- if isinstance(a, da.Array):
- a = a.compute()
- if isinstance(b, da.Array):
- b = b.compute()
- if isinstance(a, (np.generic, np.ndarray)):
- return np.all(np.isclose(a, b, equal_nan=True))
- else:
- return a == b
+def assert_eq(a, b):
+ _assert_eq(a, b, equal_nan=True)
def same_keys(a, b):
@@ -37,21 +31,21 @@ def same_keys(a, b):
def reduction_1d_test(da_func, darr, np_func, narr, use_dtype=True, split_every=True):
- assert eq(da_func(darr), np_func(narr))
- assert eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
+ assert_eq(da_func(darr), np_func(narr))
+ assert_eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
assert same_keys(da_func(darr), da_func(darr))
assert same_keys(da_func(darr, keepdims=True), da_func(darr, keepdims=True))
if use_dtype:
- assert eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
- assert eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
+ assert_eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
+ assert_eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
assert same_keys(da_func(darr, dtype='i8'), da_func(darr, dtype='i8'))
if split_every:
a1 = da_func(darr, split_every=2)
a2 = da_func(darr, split_every={0: 2})
assert same_keys(a1, a2)
- assert eq(a1, np_func(narr))
- assert eq(a2, np_func(narr))
- assert eq(da_func(darr, keepdims=True, split_every=2),
+ assert_eq(a1, np_func(narr))
+ assert_eq(a2, np_func(narr))
+ assert_eq(da_func(darr, keepdims=True, split_every=2),
np_func(narr, keepdims=True))
@@ -81,34 +75,34 @@ def test_reductions_1D(dtype):
def reduction_2d_test(da_func, darr, np_func, narr, use_dtype=True,
split_every=True):
- assert eq(da_func(darr), np_func(narr))
- assert eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
- assert eq(da_func(darr, axis=0), np_func(narr, axis=0))
- assert eq(da_func(darr, axis=1), np_func(narr, axis=1))
- assert eq(da_func(darr, axis=1, keepdims=True),
+ assert_eq(da_func(darr), np_func(narr))
+ assert_eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
+ assert_eq(da_func(darr, axis=0), np_func(narr, axis=0))
+ assert_eq(da_func(darr, axis=1), np_func(narr, axis=1))
+ assert_eq(da_func(darr, axis=1, keepdims=True),
np_func(narr, axis=1, keepdims=True))
- assert eq(da_func(darr, axis=(1, 0)), np_func(narr, axis=(1, 0)))
+ assert_eq(da_func(darr, axis=(1, 0)), np_func(narr, axis=(1, 0)))
assert same_keys(da_func(darr, axis=1), da_func(darr, axis=1))
assert same_keys(da_func(darr, axis=(1, 0)), da_func(darr, axis=(1, 0)))
if use_dtype:
- assert eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
- assert eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
+ assert_eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
+ assert_eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
if split_every:
a1 = da_func(darr, split_every=4)
a2 = da_func(darr, split_every={0: 2, 1: 2})
assert same_keys(a1, a2)
- assert eq(a1, np_func(narr))
- assert eq(a2, np_func(narr))
- assert eq(da_func(darr, keepdims=True, split_every=4),
+ assert_eq(a1, np_func(narr))
+ assert_eq(a2, np_func(narr))
+ assert_eq(da_func(darr, keepdims=True, split_every=4),
np_func(narr, keepdims=True))
- assert eq(da_func(darr, axis=0, split_every=2), np_func(narr, axis=0))
- assert eq(da_func(darr, axis=0, keepdims=True, split_every=2),
+ assert_eq(da_func(darr, axis=0, split_every=2), np_func(narr, axis=0))
+ assert_eq(da_func(darr, axis=0, keepdims=True, split_every=2),
np_func(narr, axis=0, keepdims=True))
- assert eq(da_func(darr, axis=1, split_every=2), np_func(narr, axis=1))
- assert eq(da_func(darr, axis=1, keepdims=True, split_every=2),
+ assert_eq(da_func(darr, axis=1, split_every=2), np_func(narr, axis=1))
+ assert_eq(da_func(darr, axis=1, keepdims=True, split_every=2),
np_func(narr, axis=1, keepdims=True))
@@ -146,24 +140,24 @@ def test_arg_reductions(dfunc, func):
x = np.random.random((10, 10, 10))
a = da.from_array(x, chunks=(3, 4, 5))
- assert eq(dfunc(a), func(x))
- assert eq(dfunc(a, 0), func(x, 0))
- assert eq(dfunc(a, 1), func(x, 1))
- assert eq(dfunc(a, 2), func(x, 2))
+ assert_eq(dfunc(a), func(x))
+ assert_eq(dfunc(a, 0), func(x, 0))
+ assert_eq(dfunc(a, 1), func(x, 1))
+ assert_eq(dfunc(a, 2), func(x, 2))
with set_options(split_every=2):
- assert eq(dfunc(a), func(x))
- assert eq(dfunc(a, 0), func(x, 0))
- assert eq(dfunc(a, 1), func(x, 1))
- assert eq(dfunc(a, 2), func(x, 2))
+ assert_eq(dfunc(a), func(x))
+ assert_eq(dfunc(a, 0), func(x, 0))
+ assert_eq(dfunc(a, 1), func(x, 1))
+ assert_eq(dfunc(a, 2), func(x, 2))
pytest.raises(ValueError, lambda: dfunc(a, 3))
pytest.raises(TypeError, lambda: dfunc(a, (0, 1)))
x2 = np.arange(10)
a2 = da.from_array(x2, chunks=3)
- assert eq(dfunc(a2), func(x2))
- assert eq(dfunc(a2, 0), func(x2, 0))
- assert eq(dfunc(a2, 0, split_every=2), func(x2, 0))
+ assert_eq(dfunc(a2), func(x2))
+ assert_eq(dfunc(a2, 0), func(x2, 0))
+ assert_eq(dfunc(a2, 0, split_every=2), func(x2, 0))
@pytest.mark.parametrize(['dfunc', 'func'],
@@ -172,8 +166,8 @@ def test_nanarg_reductions(dfunc, func):
x = np.random.random((10, 10, 10))
x[5] = np.nan
a = da.from_array(x, chunks=(3, 4, 5))
- assert eq(dfunc(a), func(x))
- assert eq(dfunc(a, 0), func(x, 0))
+ assert_eq(dfunc(a), func(x))
+ assert_eq(dfunc(a, 0), func(x, 0))
with pytest.raises(ValueError):
dfunc(a, 1).compute()
@@ -212,18 +206,18 @@ def test_reductions_2D_nans():
reduction_2d_test(da.nanmin, a, np.nanmin, x, False, False)
reduction_2d_test(da.nanmax, a, np.nanmax, x, False, False)
- assert eq(da.argmax(a), np.argmax(x))
- assert eq(da.argmin(a), np.argmin(x))
- assert eq(da.nanargmax(a), np.nanargmax(x))
- assert eq(da.nanargmin(a), np.nanargmin(x))
- assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
- assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
- assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
- assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
- assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
- assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
- assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
- assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
+ assert_eq(da.argmax(a), np.argmax(x))
+ assert_eq(da.argmin(a), np.argmin(x))
+ assert_eq(da.nanargmax(a), np.nanargmax(x))
+ assert_eq(da.nanargmin(a), np.nanargmin(x))
+ assert_eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
+ assert_eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
+ assert_eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
+ assert_eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
+ assert_eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
+ assert_eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
+ assert_eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
+ assert_eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
def test_moment():
@@ -234,30 +228,30 @@ def test_moment():
# Poorly conditioned
x = np.array([1., 2., 3.]*10).reshape((3, 10)) + 1e8
a = da.from_array(x, chunks=5)
- assert eq(a.moment(2), moment(x, 2))
- assert eq(a.moment(3), moment(x, 3))
- assert eq(a.moment(4), moment(x, 4))
+ assert_eq(a.moment(2), moment(x, 2))
+ assert_eq(a.moment(3), moment(x, 3))
+ assert_eq(a.moment(4), moment(x, 4))
x = np.arange(1, 122).reshape((11, 11)).astype('f8')
a = da.from_array(x, chunks=(4, 4))
- assert eq(a.moment(4, axis=1), moment(x, 4, axis=1))
- assert eq(a.moment(4, axis=(1, 0)), moment(x, 4, axis=(1, 0)))
+ assert_eq(a.moment(4, axis=1), moment(x, 4, axis=1))
+ assert_eq(a.moment(4, axis=(1, 0)), moment(x, 4, axis=(1, 0)))
# Tree reduction
- assert eq(a.moment(order=4, split_every=4), moment(x, 4))
- assert eq(a.moment(order=4, axis=0, split_every=4), moment(x, 4, axis=0))
- assert eq(a.moment(order=4, axis=1, split_every=4), moment(x, 4, axis=1))
+ assert_eq(a.moment(order=4, split_every=4), moment(x, 4))
+ assert_eq(a.moment(order=4, axis=0, split_every=4), moment(x, 4, axis=0))
+ assert_eq(a.moment(order=4, axis=1, split_every=4), moment(x, 4, axis=1))
def test_reductions_with_negative_axes():
x = np.random.random((4, 4, 4))
a = da.from_array(x, chunks=2)
- assert eq(a.argmin(axis=-1), x.argmin(axis=-1))
- assert eq(a.argmin(axis=-1, split_every=2), x.argmin(axis=-1))
+ assert_eq(a.argmin(axis=-1), x.argmin(axis=-1))
+ assert_eq(a.argmin(axis=-1, split_every=2), x.argmin(axis=-1))
- assert eq(a.sum(axis=-1), x.sum(axis=-1))
- assert eq(a.sum(axis=(0, -1)), x.sum(axis=(0, -1)))
+ assert_eq(a.sum(axis=-1), x.sum(axis=-1))
+ assert_eq(a.sum(axis=(0, -1)), x.sum(axis=(0, -1)))
def test_nan():
@@ -266,16 +260,16 @@ def test_nan():
[9, 10, 11, 12]])
d = da.from_array(x, chunks=(2, 2))
- assert eq(np.nansum(x), da.nansum(d))
- assert eq(np.nansum(x, axis=0), da.nansum(d, axis=0))
- assert eq(np.nanmean(x, axis=1), da.nanmean(d, axis=1))
- assert eq(np.nanmin(x, axis=1), da.nanmin(d, axis=1))
- assert eq(np.nanmax(x, axis=(0, 1)), da.nanmax(d, axis=(0, 1)))
- assert eq(np.nanvar(x), da.nanvar(d))
- assert eq(np.nanstd(x, axis=0), da.nanstd(d, axis=0))
- assert eq(np.nanargmin(x, axis=0), da.nanargmin(d, axis=0))
- assert eq(np.nanargmax(x, axis=0), da.nanargmax(d, axis=0))
- assert eq(nanprod(x), da.nanprod(d))
+ assert_eq(np.nansum(x), da.nansum(d))
+ assert_eq(np.nansum(x, axis=0), da.nansum(d, axis=0))
+ assert_eq(np.nanmean(x, axis=1), da.nanmean(d, axis=1))
+ assert_eq(np.nanmin(x, axis=1), da.nanmin(d, axis=1))
+ assert_eq(np.nanmax(x, axis=(0, 1)), da.nanmax(d, axis=(0, 1)))
+ assert_eq(np.nanvar(x), da.nanvar(d))
+ assert_eq(np.nanstd(x, axis=0), da.nanstd(d, axis=0))
+ assert_eq(np.nanargmin(x, axis=0), da.nanargmin(d, axis=0))
+ assert_eq(np.nanargmax(x, axis=0), da.nanargmax(d, axis=0))
+ assert_eq(nanprod(x), da.nanprod(d))
def test_0d_array():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 4
} | 1.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y graphviz liblzma-dev"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work
aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work
aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work
async-timeout==3.0.1
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work
bcolz==1.2.1
bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work
boto3==1.20.24
botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work
brotlipy==0.7.0
certifi==2021.5.30
cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work
chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work
click==8.0.3
cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work
contextvars==2.4
cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work
cytoolz==0.11.0
-e git+https://github.com/dask/dask.git@b4d3dba54fa488f60a808b9f7629aea4c156176e#egg=dask
decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work
distributed==1.12.1
fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work
h5py==2.10.0
HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work
idna @ file:///tmp/build/80754af9/idna_1637925883363/work
idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work
immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work
importlib-metadata==4.8.3
iniconfig==1.1.1
ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work
ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work
jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work
Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work
jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work
locket==0.2.1
MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work
mock @ file:///tmp/build/80754af9/mock_1607622725907/work
msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work
msgpack-python==0.5.6
multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work
numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work
numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work
olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
parso==0.7.0
partd @ file:///opt/conda/conda-bld/partd_1647245470509/work
pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work
pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work
Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work
pluggy==1.0.0
prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work
psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work
ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
py==1.11.0
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work
pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work
pytest==7.0.1
python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work
pytz==2021.3
PyYAML==5.4.1
s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work
s3transfer==0.5.2
scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work
tables==3.6.1
tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work
tomli==1.2.3
toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work
tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work
traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work
wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work
wrapt==1.12.1
yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work
zict==2.0.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiobotocore=2.1.0=pyhd3eb1b0_0
- aiohttp=3.7.4.post0=py36h7f8727e_2
- aioitertools=0.7.1=pyhd3eb1b0_0
- async-timeout=3.0.1=py36h06a4308_0
- attrs=21.4.0=pyhd3eb1b0_0
- backcall=0.2.0=pyhd3eb1b0_0
- bcolz=1.2.1=py36h04863e7_0
- blas=1.0=openblas
- blosc=1.21.3=h6a678d5_0
- bokeh=2.3.2=py36h06a4308_0
- botocore=1.23.24=pyhd3eb1b0_0
- brotlipy=0.7.0=py36h27cfd23_1003
- bzip2=1.0.8=h5eee18b_6
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- cffi=1.14.6=py36h400218f_0
- chardet=4.0.0=py36h06a4308_1003
- click=8.0.3=pyhd3eb1b0_0
- cloudpickle=2.0.0=pyhd3eb1b0_0
- contextvars=2.4=py_0
- cryptography=35.0.0=py36hd23ed53_0
- cytoolz=0.11.0=py36h7b6447c_0
- decorator=5.1.1=pyhd3eb1b0_0
- freetype=2.12.1=h4a9f257_0
- fsspec=2022.1.0=pyhd3eb1b0_0
- giflib=5.2.2=h5eee18b_0
- h5py=2.10.0=py36h7918eee_0
- hdf5=1.10.4=hb1b8bf9_0
- heapdict=1.0.1=pyhd3eb1b0_0
- idna=3.3=pyhd3eb1b0_0
- idna_ssl=1.1.0=py36h06a4308_0
- immutables=0.16=py36h7f8727e_0
- ipython=7.16.1=py36h5ca1d4c_0
- ipython_genutils=0.2.0=pyhd3eb1b0_1
- jedi=0.17.2=py36h06a4308_1
- jinja2=3.0.3=pyhd3eb1b0_0
- jmespath=0.10.0=pyhd3eb1b0_0
- jpeg=9e=h5eee18b_3
- lcms2=2.16=hb9589c4_0
- ld_impl_linux-64=2.40=h12ee557_0
- lerc=4.0.0=h6a678d5_0
- libdeflate=1.22=h5eee18b_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=7.5.0=ha8ba4b0_17
- libgfortran4=7.5.0=ha8ba4b0_17
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.18=hf726d26_0
- libpng=1.6.39=h5eee18b_0
- libstdcxx-ng=11.2.0=h1234567_1
- libtiff=4.5.1=hffd6297_1
- libwebp=1.2.4=h11a3e52_1
- libwebp-base=1.2.4=h5eee18b_1
- locket=0.2.1=py36h06a4308_1
- lz4-c=1.9.4=h6a678d5_1
- lzo=2.10=h7b6447c_2
- markupsafe=2.0.1=py36h27cfd23_0
- mock=4.0.3=pyhd3eb1b0_0
- multidict=5.1.0=py36h27cfd23_2
- ncurses=6.4=h6a678d5_0
- numexpr=2.7.3=py36h4be448d_1
- numpy=1.19.2=py36h6163131_0
- numpy-base=1.19.2=py36h75fe3a5_0
- olefile=0.46=pyhd3eb1b0_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pandas=1.1.5=py36ha9443f7_0
- parso=0.7.0=py_0
- partd=1.2.0=pyhd3eb1b0_1
- pexpect=4.8.0=pyhd3eb1b0_3
- pickleshare=0.7.5=pyhd3eb1b0_1003
- pillow=8.3.1=py36h5aabda8_0
- pip=21.2.2=py36h06a4308_0
- prompt-toolkit=3.0.20=pyhd3eb1b0_0
- psutil=5.8.0=py36h27cfd23_1
- ptyprocess=0.7.0=pyhd3eb1b0_2
- pycparser=2.21=pyhd3eb1b0_0
- pygments=2.11.2=pyhd3eb1b0_0
- pyopenssl=22.0.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pysocks=1.7.1=py36h06a4308_0
- pytables=3.6.1=py36h71ec239_0
- python=3.6.13=h12debd9_1
- python-dateutil=2.8.2=pyhd3eb1b0_0
- pytz=2021.3=pyhd3eb1b0_0
- pyyaml=5.4.1=py36h27cfd23_1
- readline=8.2=h5eee18b_0
- s3fs=2022.1.0=pyhd3eb1b0_0
- scipy=1.5.2=py36habc2bb6_0
- setuptools=58.0.4=py36h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sortedcontainers=2.4.0=pyhd3eb1b0_0
- sqlite=3.45.3=h5eee18b_0
- tblib=1.7.0=pyhd3eb1b0_0
- tk=8.6.14=h39e8969_0
- toolz=0.11.2=pyhd3eb1b0_0
- tornado=6.1=py36h27cfd23_0
- traitlets=4.3.3=py36h06a4308_0
- typing-extensions=4.1.1=hd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- urllib3=1.26.8=pyhd3eb1b0_0
- wcwidth=0.2.5=pyhd3eb1b0_0
- wheel=0.37.1=pyhd3eb1b0_0
- wrapt=1.12.1=py36h7b6447c_1
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- yarl=1.6.3=py36h27cfd23_0
- zict=2.0.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- boto3==1.20.24
- distributed==1.12.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- msgpack-python==0.5.6
- pluggy==1.0.0
- py==1.11.0
- pytest==7.0.1
- s3transfer==0.5.2
- tomli==1.2.3
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_reductions.py::test_reductions_2D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_2D[i4]"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_unravel",
"dask/array/tests/test_array_core.py::test_reshape",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_to_hdf5",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_h5py_newaxis",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_h5py_tokenize",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_reductions.py::test_reductions_1D[f4]",
"dask/array/tests/test_reductions.py::test_reductions_1D[i4]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]",
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmin-nanargmin]",
"dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmax-nanargmax]",
"dask/array/tests/test_reductions.py::test_reductions_2D_nans",
"dask/array/tests/test_reductions.py::test_moment",
"dask/array/tests/test_reductions.py::test_reductions_with_negative_axes",
"dask/array/tests/test_reductions.py::test_nan",
"dask/array/tests/test_reductions.py::test_0d_array",
"dask/array/tests/test_reductions.py::test_reduction_on_scalar",
"dask/array/tests/test_reductions.py::test_tree_reduce_depth",
"dask/array/tests/test_reductions.py::test_tree_reduce_set_options",
"dask/array/tests/test_reductions.py::test_reduction_names"
]
| []
| BSD 3-Clause "New" or "Revised" License | 656 | [
"dask/array/core.py",
"dask/array/utils.py",
"dask/array/linalg.py",
"dask/array/reductions.py"
]
| [
"dask/array/core.py",
"dask/array/utils.py",
"dask/array/linalg.py",
"dask/array/reductions.py"
]
|
|
Axelrod-Python__Axelrod-671 | 46ad8990affd6b9a792c84af4cc0987670cd515a | 2016-07-22 14:11:42 | 06a2887f51a79bfacb95aff4481c69e72c1a1366 | diff --git a/axelrod/result_set.py b/axelrod/result_set.py
index 6ec59ae6..0a8b4fb2 100644
--- a/axelrod/result_set.py
+++ b/axelrod/result_set.py
@@ -464,28 +464,8 @@ class ResultSet(object):
Where pij is the mean difference of the
scores per turn between player i and j in repetition m.
"""
- plist = list(range(self.nplayers))
- payoff_diffs_means = [[0 for opponent in plist] for player in plist]
-
- for player in plist:
- for opponent in plist:
- diffs = []
- for index_pair, repetitions in self.interactions.items():
- if (player, opponent) == index_pair:
- for interaction in repetitions:
- scores = iu.compute_final_score_per_turn(interaction,
- self.game)
- diffs.append(scores[0] - scores[1])
- elif (opponent, player) == index_pair:
- for interaction in repetitions:
- scores = iu.compute_final_score_per_turn(interaction,
- self.game)
- diffs.append(scores[1] - scores[0])
- if diffs:
- payoff_diffs_means[player][opponent] = mean(diffs)
- else:
- payoff_diffs_means[player][opponent] = 0
-
+ payoff_diffs_means = [[mean(diff) for diff in player]
+ for player in self.score_diffs]
return payoff_diffs_means
@update_progress_bar
| Error in payoff_diffs_means?
I think there's a bug for self interactions for the payoff_diffs_means (this only affects stochastic strategies):
```
>>> import axelrod as axl
>>> from numpy import mean
>>> axl.seed(0)
>>> players = [s() for s in axl.demo_strategies]
>>> tournament = axl.Tournament(players, repetitions=2, turns=5)
>>> results = tournament.play()
>>> results.score_diffs
[[[0.0, 0.0],
[-5.0, -5.0],
[0.0, 0.0],
[0.0, 0.0],
[-3.0, -1.9999999999999998]],
[[5.0, 5.0], [0.0, 0.0], [1.0, 1.0], [1.0, 1.0], [1.0, 1.0]],
[[0.0, 0.0], [-1.0, -1.0], [0.0, 0.0], [0.0, 0.0], [0.0, -1.0]],
[[0.0, 0.0], [-1.0, -1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]],
[[3.0, 1.9999999999999998], [-1.0, -1.0], [0.0, 1.0], [0.0, 0.0], [2.0, 0.0]]]
```
If you look at the last element we see that over the two repetitions, the Random strategy scored 2 and 0 against itself (both positive numbers).
```
>>> results.payoff_diffs_means
[[0.0, -5.0, 0.0, 0.0, -2.5],
[5.0, 0.0, 1.0, 1.0, 1.0],
[0.0, -1.0, 0.0, 0.0, -0.5],
[0.0, -1.0, 0.0, 0.0, 0.0],
[2.5, -1.0, 0.5, 0.0, -1.0]]
```
That last mean is `-1.0` which is just the opposite mean (this is due to how the self interactions are handled and the fact that `build_payoff_diffs_means` rebuilds the payoff diff means from the interactions. A more direct calculation gives:
```
>>> [[mean(diff) for diff in player] for player in results.score_diffs]
[[0.0, -5.0, 0.0, 0.0, -2.5],
[5.0, 0.0, 1.0, 1.0, 1.0],
[0.0, -1.0, 0.0, 0.0, -0.5],
[0.0, -1.0, 0.0, 0.0, 0.0],
[2.5, -1.0, 0.5, 0.0, 1.0]]
```
which is the expected result. Just about to push a PR fix that replaces `build_payoff_diffs_means` with this. | Axelrod-Python/Axelrod | diff --git a/axelrod/tests/unit/test_resultset.py b/axelrod/tests/unit/test_resultset.py
index 5a81687b..dccd3e8f 100644
--- a/axelrod/tests/unit/test_resultset.py
+++ b/axelrod/tests/unit/test_resultset.py
@@ -364,6 +364,14 @@ class TestResultSet(unittest.TestCase):
for j, rate in enumerate(rs.eigenmoses_rating):
self.assertAlmostEqual(rate, self.expected_eigenmoses_rating[j])
+ def test_self_interaction_for_random_strategies(self):
+ # Based on https://github.com/Axelrod-Python/Axelrod/issues/670
+ axelrod.seed(0)
+ players = [s() for s in axelrod.demo_strategies]
+ tournament = axelrod.Tournament(players, repetitions=2, turns=5)
+ results = tournament.play()
+ self.assertEqual(results.payoff_diffs_means[-1][-1], 1.0)
+
class TestResultSetFromFile(unittest.TestCase):
tmp_file = tempfile.NamedTemporaryFile(mode='w', delete=False)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"numpy>=1.16.0",
"pandas>=1.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y libfreetype6-dev libpng-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
-e git+https://github.com/Axelrod-Python/Axelrod.git@46ad8990affd6b9a792c84af4cc0987670cd515a#egg=Axelrod
cycler==0.12.1
exceptiongroup==1.2.2
hypothesis==6.130.5
iniconfig==2.1.0
kiwisolver==1.4.7
matplotlib==3.3.4
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pillow==11.1.0
pluggy==1.5.0
pyparsing==2.1.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
tqdm==3.4.0
tzdata==2025.2
| name: Axelrod
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- cycler==0.12.1
- exceptiongroup==1.2.2
- hypothesis==6.130.5
- iniconfig==2.1.0
- kiwisolver==1.4.7
- matplotlib==3.3.4
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pillow==11.1.0
- pluggy==1.5.0
- pyparsing==2.1.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- tqdm==3.4.0
- tzdata==2025.2
prefix: /opt/conda/envs/Axelrod
| [
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_self_interaction_for_random_strategies",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_self_interaction_for_random_strategies",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_self_interaction_for_random_strategies",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_self_interaction_for_random_strategies"
]
| []
| [
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_cooperating_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_eigenjesus_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_eigenmoses_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_good_partner_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_good_partner_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_init_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_match_lengths",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_normalised_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_normalised_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_null_results_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_diffs_means",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoff_stddevs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_payoffs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_ranked_names",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_ranking",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_score_diffs",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_scores_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_vengeful_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_wins",
"axelrod/tests/unit/test_resultset.py::TestResultSet::test_with_progress_bar",
"axelrod/tests/unit/test_resultset.py::TestResultSetFromFile::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSetFromFile::test_init_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetFromFile::test_progres_bar",
"axelrod/tests/unit/test_resultset.py::TestDecorator::test_update_progress_bar",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_cooperating_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_eigenjesus_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_eigenmoses_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_good_partner_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_good_partner_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_init_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_match_lengths",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_normalised_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_normalised_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_null_results_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_payoff_diffs_means",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_payoff_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_payoff_stddevs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_payoffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_ranked_names",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_ranking",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_score_diffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_scores_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_vengeful_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_wins",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructure::test_with_progress_bar",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_cooperating_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_eigenjesus_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_eigenmoses_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_good_partner_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_good_partner_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_init_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_match_lengths",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_normalised_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_normalised_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_null_results_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_payoff_diffs_means",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_payoff_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_payoff_stddevs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_payoffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_ranked_names",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_ranking",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_score_diffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_scores_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_vengeful_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_wins",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureTwo::test_with_progress_bar",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_cooperating_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_eigenjesus_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_eigenmoses_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_good_partner_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_good_partner_rating",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_init",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_init_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_match_lengths",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_normalised_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_normalised_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_null_results_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_payoff_diffs_means",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_payoff_matrix",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_payoff_stddevs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_payoffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_ranked_names",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_ranking",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_score_diffs",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_scores",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_scores_with_different_game",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_vengeful_cooperation",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_wins",
"axelrod/tests/unit/test_resultset.py::TestResultSetSpatialStructureThree::test_with_progress_bar"
]
| []
| MIT License | 657 | [
"axelrod/result_set.py"
]
| [
"axelrod/result_set.py"
]
|
|
scikit-build__scikit-build-105 | abaaeee43e0456ef9da7d4878f0310c569bd6525 | 2016-07-22 19:35:31 | abaaeee43e0456ef9da7d4878f0310c569bd6525 | diff --git a/skbuild/cmaker.py b/skbuild/cmaker.py
index 9030c1e..2c7d7f5 100644
--- a/skbuild/cmaker.py
+++ b/skbuild/cmaker.py
@@ -9,6 +9,8 @@ import shlex
import sys
import sysconfig
+from subprocess import CalledProcessError
+
from .platform_specifics import get_platform
from .exceptions import SKBuildError
@@ -62,10 +64,11 @@ def _touch_init(folder):
class CMaker(object):
def __init__(self, **defines):
- if platform.system() != 'Windows':
- rtn = subprocess.call(['which', 'cmake'])
- if rtn != 0:
- sys.exit('CMake is not installed, aborting build.')
+ # verify that CMake is installed
+ try:
+ subprocess.check_call(['cmake', '--version'])
+ except (OSError, CalledProcessError):
+ raise SKBuildError('CMake is not installed, aborting build.')
self.platform = get_platform()
@@ -93,8 +96,9 @@ class CMaker(object):
generator_id = self.platform.get_best_generator(generator_id)
if generator_id is None:
- sys.exit("Could not get working generator for your system."
- " Aborting build.")
+ raise SKBuildError(
+ "Could not get working generator for your system."
+ " Aborting build.")
if not os.path.exists(CMAKE_BUILD_DIR):
os.makedirs(CMAKE_BUILD_DIR)
@@ -137,11 +141,20 @@ class CMaker(object):
# changes dir to cmake_build and calls cmake's configure step
# to generate makefile
- rtn = subprocess.check_call(cmd, cwd=CMAKE_BUILD_DIR)
+ rtn = subprocess.call(cmd, cwd=CMAKE_BUILD_DIR)
if rtn != 0:
- raise RuntimeError("Could not successfully configure "
- "your project. Please see CMake's "
- "output for more information.")
+ raise SKBuildError(
+ "An error occurred while configuring with CMake.\n"
+ " Command:\n"
+ " {}\n"
+ " Source directory:\n"
+ " {}\n"
+ " Working directory:\n"
+ " {}\n"
+ "Please see CMake's output for more information.".format(
+ self._formatArgsForDisplay(cmd),
+ os.path.abspath(cwd),
+ os.path.abspath(CMAKE_BUILD_DIR)))
CMaker.check_for_bad_installs()
@@ -335,7 +348,6 @@ class CMaker(object):
if bad_installs:
raise SKBuildError("\n".join((
- "",
" CMake-installed files must be within the project root.",
" Project Root:",
" " + install_dir,
@@ -349,7 +361,7 @@ class CMaker(object):
"""
clargs, config = pop_arg('--config', clargs, config)
if not os.path.exists(CMAKE_BUILD_DIR):
- raise RuntimeError(("CMake build folder ({}) does not exist. "
+ raise SKBuildError(("CMake build folder ({}) does not exist. "
"Did you forget to run configure before "
"make?").format(CMAKE_BUILD_DIR))
@@ -361,8 +373,20 @@ class CMaker(object):
shlex.split(os.environ.get("SKBUILD_BUILD_OPTIONS", "")))
)
- rtn = subprocess.check_call(cmd, cwd=CMAKE_BUILD_DIR)
- return rtn
+ rtn = subprocess.call(cmd, cwd=CMAKE_BUILD_DIR)
+ if rtn != 0:
+ raise SKBuildError(
+ "An error occurred while building with CMake.\n"
+ " Command:\n"
+ " {}\n"
+ " Source directory:\n"
+ " {}\n"
+ " Working directory:\n"
+ " {}\n"
+ "Please see CMake's output for more information.".format(
+ self._formatArgsForDisplay(cmd),
+ os.path.abspath(source_dir),
+ os.path.abspath(CMAKE_BUILD_DIR)))
def install(self):
"""Returns a list of tuples of (install location, file list) to install
@@ -377,3 +401,14 @@ class CMaker(object):
return [_remove_cwd_prefix(path) for path in manifest]
return []
+
+ @staticmethod
+ def _formatArgsForDisplay(args):
+ """Format a list of arguments appropriately for display. When formatting
+ a command and its arguments, the user should be able to execute the
+ command by copying and pasting the output directly into a shell.
+
+ Currently, the only formatting is naively surrounding each argument with
+ quotation marks.
+ """
+ return ' '.join("\"{}\"".format(arg) for arg in args)
diff --git a/skbuild/exceptions.py b/skbuild/exceptions.py
index 4a0e074..2b8f8b1 100644
--- a/skbuild/exceptions.py
+++ b/skbuild/exceptions.py
@@ -1,3 +1,6 @@
-class SKBuildError(Exception):
+class SKBuildError(RuntimeError):
+ """Exception raised when an error occurs while configuring or building a
+ project.
+ """
pass
diff --git a/skbuild/setuptools_wrap.py b/skbuild/setuptools_wrap.py
index 0fbd86f..54efdb3 100644
--- a/skbuild/setuptools_wrap.py
+++ b/skbuild/setuptools_wrap.py
@@ -131,12 +131,56 @@ def setup(*args, **kw):
reverse=True
))
- cmkr = cmaker.CMaker()
- cmkr.configure(cmake_args)
- cmkr.make(make_args)
+ try:
+ cmkr = cmaker.CMaker()
+ cmkr.configure(cmake_args)
+ cmkr.make(make_args)
+ except SKBuildError as e:
+ import traceback
+ print("Traceback (most recent call last):")
+ traceback.print_tb(sys.exc_info()[2])
+ print()
+ sys.exit(e)
+
+ _classify_files(cmkr.install(), package_data, package_prefixes, py_modules,
+ scripts, new_scripts, data_files)
+
+ kw['package_data'] = package_data
+ kw['package_dir'] = {
+ package: os.path.join(cmaker.CMAKE_INSTALL_DIR, prefix)
+ for prefix, package in package_prefixes
+ }
+
+ kw['py_modules'] = py_modules
+
+ kw['scripts'] = [
+ os.path.join(cmaker.CMAKE_INSTALL_DIR, script) if mask else script
+ for script, mask in new_scripts.items()
+ ]
+
+ kw['data_files'] = [
+ (parent_dir, list(file_set))
+ for parent_dir, file_set in data_files.items()
+ ]
+
+ # work around https://bugs.python.org/issue1011113
+ # (patches provided, but no updates since 2014)
+ cmdclass = kw.get('cmdclass', {})
+ cmdclass['build'] = cmdclass.get('build', build.build)
+ cmdclass['install'] = cmdclass.get('install', install.install)
+ cmdclass['clean'] = cmdclass.get('clean', clean.clean)
+ cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
+ cmdclass['bdist_wheel'] = cmdclass.get(
+ 'bdist_wheel', bdist_wheel.bdist_wheel)
+ kw['cmdclass'] = cmdclass
+
+ return upstream_setup(*args, **kw)
+
+def _classify_files(install_paths, package_data, package_prefixes, py_modules,
+ scripts, new_scripts, data_files):
install_root = os.path.join(os.getcwd(), cmaker.CMAKE_INSTALL_DIR)
- for path in cmkr.install():
+ for path in install_paths:
found_package = False
found_module = False
found_script = False
@@ -204,34 +248,3 @@ def setup(*args, **kw):
data_files[parent_dir] = file_set
file_set.add(os.path.join(cmaker.CMAKE_INSTALL_DIR, path))
del parent_dir, file_set
-
- kw['package_data'] = package_data
- kw['package_dir'] = {
- package: os.path.join(cmaker.CMAKE_INSTALL_DIR, prefix)
- for prefix, package in package_prefixes
- }
-
- kw['py_modules'] = py_modules
-
- kw['scripts'] = [
- os.path.join(cmaker.CMAKE_INSTALL_DIR, script) if mask else script
- for script, mask in new_scripts.items()
- ]
-
- kw['data_files'] = [
- (parent_dir, list(file_set))
- for parent_dir, file_set in data_files.items()
- ]
-
- # work around https://bugs.python.org/issue1011113
- # (patches provided, but no updates since 2014)
- cmdclass = kw.get('cmdclass', {})
- cmdclass['build'] = cmdclass.get('build', build.build)
- cmdclass['install'] = cmdclass.get('install', install.install)
- cmdclass['clean'] = cmdclass.get('clean', clean.clean)
- cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
- cmdclass['bdist_wheel'] = cmdclass.get(
- 'bdist_wheel', bdist_wheel.bdist_wheel)
- kw['cmdclass'] = cmdclass
-
- return upstream_setup(*args, **kw)
| Improve cmaker exception
When there is a problem building python module, report "human-friendly" error | scikit-build/scikit-build | diff --git a/tests/test_outside_project_root.py b/tests/test_outside_project_root.py
index 9500a4d..d67baa4 100644
--- a/tests/test_outside_project_root.py
+++ b/tests/test_outside_project_root.py
@@ -5,7 +5,8 @@
----------------------------------
Tries to build the `fail-outside-project-root` sample project. Ensures that the
-attempt fails with an SKBuildError exception.
+attempt fails with a SystemExit exception that has an SKBuildError exception as
+its value.
"""
from skbuild.exceptions import SKBuildError
@@ -23,10 +24,10 @@ def test_outside_project_root_fails():
def should_fail():
pass
- exception_thrown = False
+ failed = False
try:
should_fail()
- except SKBuildError:
- exception_thrown = True
+ except SystemExit as e:
+ failed = isinstance(e.code, SKBuildError)
- assert exception_thrown
+ assert failed
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"tox"
],
"pre_install": [
"apt-get update",
"apt-get install -y build-essential"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
-e git+https://github.com/scikit-build/scikit-build.git@abaaeee43e0456ef9da7d4878f0310c569bd6525#egg=scikit_build
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: scikit-build
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- setuptools==22.0.5
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.16.2
- wheel==0.29.0
- zipp==3.6.0
prefix: /opt/conda/envs/scikit-build
| [
"tests/test_outside_project_root.py::test_outside_project_root_fails"
]
| []
| []
| []
| MIT License | 658 | [
"skbuild/setuptools_wrap.py",
"skbuild/exceptions.py",
"skbuild/cmaker.py"
]
| [
"skbuild/setuptools_wrap.py",
"skbuild/exceptions.py",
"skbuild/cmaker.py"
]
|
|
falconry__falcon-851 | a9f1813bbd85ae58dd3d81aaea53a6db58032c3b | 2016-07-23 20:26:05 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage][cc-pull] is 100% (diff: 100%)
> Merging [#851][cc-pull] into [master][cc-base-branch] will not change coverage
```diff
@@ master #851 diff @@
====================================
Files 29 29
Lines 1848 1846 -2
Methods 0 0
Messages 0 0
Branches 307 305 -2
====================================
- Hits 1848 1846 -2
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [a9f1813...8e1cb96][cc-compare]
[cc-base-branch]: https://codecov.io/gh/falconry/falcon/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/falconry/falcon/compare/a9f1813bbd85ae58dd3d81aaea53a6db58032c3b...8e1cb96cb4f92f3abf961e3ac78c0a71edcc833f?src=pr
[cc-pull]: https://codecov.io/gh/falconry/falcon/pull/851?src=pr | diff --git a/falcon/http_error.py b/falcon/http_error.py
index 1e9143e..6b7cd1e 100644
--- a/falcon/http_error.py
+++ b/falcon/http_error.py
@@ -40,8 +40,7 @@ class HTTPError(Exception):
returns ``True``, but child classes may override it
in order to return ``False`` when an empty HTTP body is desired.
See also the ``falcon.http_error.NoRepresentation`` mixin.
- title (str): Error title to send to the client. Will be ``None`` if
- the error should result in an HTTP response with an empty body.
+ title (str): Error title to send to the client.
description (str): Description of the error to send to the client.
headers (dict): Extra headers to add to the response.
link (str): An href that the client can provide to the user for
@@ -53,7 +52,8 @@ class HTTPError(Exception):
status (str): HTTP status code and text, such as "400 Bad Request"
Keyword Args:
- title (str): Human-friendly error title (default ``None``).
+ title (str): Human-friendly error title. If not provided, defaults
+ to the HTTP status line as determined by the ``status`` argument.
description (str): Human-friendly description of the error, along with
a helpful suggestion or two (default ``None``).
headers (dict or list): A ``dict`` of header names and values
@@ -97,7 +97,13 @@ class HTTPError(Exception):
def __init__(self, status, title=None, description=None, headers=None,
href=None, href_text=None, code=None):
self.status = status
- self.title = title
+
+ # TODO(kgriffs): HTTP/2 does away with the "reason phrase". Eventually
+ # we'll probably switch over to making everything code-based to more
+ # easily support HTTP/2. When that happens, should we continue to
+ # include the reason phrase in the title?
+ self.title = title or status
+
self.description = description
self.headers = headers
self.code = code
@@ -133,8 +139,7 @@ class HTTPError(Exception):
obj = obj_type()
- if self.title is not None:
- obj['title'] = self.title
+ obj['title'] = self.title
if self.description is not None:
obj['description'] = self.description
@@ -171,8 +176,7 @@ class HTTPError(Exception):
error_element = et.Element('error')
- if self.title is not None:
- et.SubElement(error_element, 'title').text = self.title
+ et.SubElement(error_element, 'title').text = self.title
if self.description is not None:
et.SubElement(error_element, 'description').text = self.description
| Error title should default to HTTP status
When the title is set to `None`, `HTTPError` should default to simply reusing the description passed in with the code in the `status` argument, i.e.:
```py
self.title = status_code[4:]
``` | falconry/falcon | diff --git a/tests/test_httperror.py b/tests/test_httperror.py
index 9cb39b4..14f0eef 100644
--- a/tests/test_httperror.py
+++ b/tests/test_httperror.py
@@ -259,15 +259,15 @@ class TestHTTPError(testing.TestBase):
def test_no_description_json(self):
body = self.simulate_request('/fail', method='PATCH')
self.assertEqual(self.srmock.status, falcon.HTTP_400)
- self.assertEqual(body, [b'{}'])
+ self.assertEqual(body, [b'{\n "title": "400 Bad Request"\n}'])
def test_no_description_xml(self):
body = self.simulate_request('/fail', method='PATCH',
headers={'Accept': 'application/xml'})
self.assertEqual(self.srmock.status, falcon.HTTP_400)
- expected_xml = (b'<?xml version="1.0" encoding="UTF-8"?>'
- b'<error />')
+ expected_xml = (b'<?xml version="1.0" encoding="UTF-8"?><error>'
+ b'<title>400 Bad Request</title></error>')
self.assertEqual(body, [expected_xml])
@@ -550,6 +550,7 @@ class TestHTTPError(testing.TestBase):
self.assertEqual(self.srmock.status, falcon.HTTP_404)
self.assertNotEqual(response, [])
expected_body = {
+ u'title': u'404 Not Found',
u'description': u'Not Found'
}
self.assertEqual(json.loads(response), expected_body)
@@ -590,6 +591,7 @@ class TestHTTPError(testing.TestBase):
self.assertEqual(self.srmock.status, falcon.HTTP_405)
self.assertNotEqual(response, [])
expected_body = {
+ u'title': u'405 Method Not Allowed',
u'description': u'Not Allowed'
}
self.assertEqual(json.loads(response), expected_body)
@@ -777,3 +779,14 @@ class TestHTTPError(testing.TestBase):
needs_title=False)
self._misc_test(falcon.HTTPInternalServerError, falcon.HTTP_500)
self._misc_test(falcon.HTTPBadGateway, falcon.HTTP_502)
+
+ def test_title_default_message_if_none(self):
+ headers = {
+ 'X-Error-Status': falcon.HTTP_503
+ }
+
+ body = self.simulate_request('/fail', headers=headers, decode='utf-8')
+ body_json = json.loads(body)
+
+ self.assertEqual(self.srmock.status, headers['X-Error-Status'])
+ self.assertEqual(body_json['title'], headers['X-Error-Status'])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"requests",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@a9f1813bbd85ae58dd3d81aaea53a6db58032c3b#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_httperror.py::TestHTTPError::test_404_with_body",
"tests/test_httperror.py::TestHTTPError::test_405_with_body",
"tests/test_httperror.py::TestHTTPError::test_no_description_json",
"tests/test_httperror.py::TestHTTPError::test_no_description_xml",
"tests/test_httperror.py::TestHTTPError::test_title_default_message_if_none"
]
| [
"tests/test_httperror.py::TestHTTPError::test_custom_new_error_serializer",
"tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer"
]
| [
"tests/test_httperror.py::TestHTTPError::test_401",
"tests/test_httperror.py::TestHTTPError::test_404_without_body",
"tests/test_httperror.py::TestHTTPError::test_405_without_body",
"tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers",
"tests/test_httperror.py::TestHTTPError::test_405_without_body_with_extra_headers_double_check",
"tests/test_httperror.py::TestHTTPError::test_411",
"tests/test_httperror.py::TestHTTPError::test_413",
"tests/test_httperror.py::TestHTTPError::test_416",
"tests/test_httperror.py::TestHTTPError::test_429",
"tests/test_httperror.py::TestHTTPError::test_429_datetime",
"tests/test_httperror.py::TestHTTPError::test_429_no_retry_after",
"tests/test_httperror.py::TestHTTPError::test_503_datetime_retry_after",
"tests/test_httperror.py::TestHTTPError::test_503_integer_retry_after",
"tests/test_httperror.py::TestHTTPError::test_base_class",
"tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_anything",
"tests/test_httperror.py::TestHTTPError::test_client_does_not_accept_json_or_xml",
"tests/test_httperror.py::TestHTTPError::test_custom_old_error_serializer_no_body",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_json",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_1_text_xml",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_2_application_xml",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_3_application_vnd_company_system_project_resource_xml_v_1_1",
"tests/test_httperror.py::TestHTTPError::test_epic_fail_xml_4_application_atom_xml",
"tests/test_httperror.py::TestHTTPError::test_forbidden_1_application_json",
"tests/test_httperror.py::TestHTTPError::test_forbidden_2_application_vnd_company_system_project_resource_json_v_1_1",
"tests/test_httperror.py::TestHTTPError::test_forbidden_3_application_json_patch_json",
"tests/test_httperror.py::TestHTTPError::test_invalid_header",
"tests/test_httperror.py::TestHTTPError::test_invalid_param",
"tests/test_httperror.py::TestHTTPError::test_misc",
"tests/test_httperror.py::TestHTTPError::test_missing_header",
"tests/test_httperror.py::TestHTTPError::test_missing_param",
"tests/test_httperror.py::TestHTTPError::test_temporary_413_datetime_retry_after",
"tests/test_httperror.py::TestHTTPError::test_temporary_413_integer_retry_after",
"tests/test_httperror.py::TestHTTPError::test_unicode_json",
"tests/test_httperror.py::TestHTTPError::test_unicode_xml"
]
| []
| Apache License 2.0 | 659 | [
"falcon/http_error.py"
]
| [
"falcon/http_error.py"
]
|
JonathonReinhart__scuba-68 | 91feb2f3c2ee491723bfd07d9f805eae42740c01 | 2016-07-25 01:55:20 | 7e0e786630258376013b454c75ef74e7e18711b8 | codecov-io: ## [Current coverage][cc-pull] is 98.24% (diff: 100%)
> Merging [#68][cc-pull] into [master][cc-base-branch] will increase coverage by **0.13%**
```diff
@@ master #68 diff @@
==========================================
Files 7 7
Lines 371 399 +28
Methods 0 0
Messages 0 0
Branches 0 0
==========================================
+ Hits 364 392 +28
Misses 7 7
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [7e74ccd...17bfbc7][cc-compare]
[cc-base-branch]: https://codecov.io/gh/JonathonReinhart/scuba/branch/master?src=pr
[cc-compare]: https://codecov.io/gh/JonathonReinhart/scuba/compare/7e74ccde08d9943a71697b6c9210ae483fd0d2fe...17bfbc77adb33ab4ed30098f8d3b468b5c23062f?src=pr
[cc-pull]: https://codecov.io/gh/JonathonReinhart/scuba/pull/68?src=pr | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2785697..fad024c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,7 @@ This project adheres to [Semantic Versioning](http://semver.org/).
## [Unreleased]
### Added
- Added support for enhanced aliases (#67)
+- Added support for per-alias image specification (#68)
### Changed
- All ancillary files are bind-mounted via single temp dir
diff --git a/example/per_alias_image/.scuba.yml b/example/per_alias_image/.scuba.yml
new file mode 100644
index 0000000..9b9bae0
--- /dev/null
+++ b/example/per_alias_image/.scuba.yml
@@ -0,0 +1,14 @@
+image: !from_yaml ../common.yml image
+aliases:
+
+ # This one inherits the default, top-level 'image'
+ default:
+ script:
+ - cat /etc/os-release
+
+ # This one specifies a different image to use
+ different:
+ image: alpine
+ script:
+ - cat /etc/os-release
+
diff --git a/example/per_alias_image/run_example.sh b/example/per_alias_image/run_example.sh
new file mode 100755
index 0000000..767ba50
--- /dev/null
+++ b/example/per_alias_image/run_example.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+cd $(dirname $0)
+
+echo -e "\nRunning 'scuba default'"
+scuba default
+
+echo -e "\nRunning 'scuba different'"
+scuba different
+
+echo ""
diff --git a/example/run_all.sh b/example/run_all.sh
index 9cabda1..e4d84f7 100755
--- a/example/run_all.sh
+++ b/example/run_all.sh
@@ -8,3 +8,4 @@ cd $(dirname $0)
./external_yaml_nested/run_example.sh
./scubainit_hooks/run_example.sh
./alias_multiline/run_example.sh
+./per_alias_image/run_example.sh
diff --git a/scuba/__main__.py b/scuba/__main__.py
index a0f4ef3..87c647b 100644
--- a/scuba/__main__.py
+++ b/scuba/__main__.py
@@ -125,6 +125,9 @@ class ScubaDive(object):
writeln(s, ' {0} => {1} {2}'.format(hostpath, contpath, options))
writeln(s, ' user_command: {0}'.format(self.user_command))
+ writeln(s, ' context:')
+ writeln(s, ' script: ' + str(self.context.script))
+ writeln(s, ' image: ' + str(self.context.image))
return s.getvalue()
@@ -231,7 +234,7 @@ class ScubaDive(object):
# Process any aliases
try:
- script = self.config.process_command(self.user_command)
+ context = self.config.process_command(self.user_command)
except ConfigError as cfgerr:
raise ScubaError(str(cfgerr))
@@ -240,14 +243,14 @@ class ScubaDive(object):
default CMD is run. Because we set the entrypiont, scuba must emulate the
default behavior itself.
'''
- if len(script) == 0:
+ if not context.script:
# No user-provided command; we want to run the image's default command
verbose_msg('No user command; getting command from image')
try:
- script = [get_image_command(self.config.image)]
+ context.script = [get_image_command(context.image)]
except DockerError as e:
raise ScubaError(str(e))
- verbose_msg('{0} Cmd: "{1}"'.format(self.config.image, script[0]))
+ verbose_msg('{0} Cmd: "{1}"'.format(context.image, context.script[0]))
# The user command is executed via a generated shell script
with self.open_scubadir_file('command.sh', 'wt') as f:
@@ -255,9 +258,11 @@ class ScubaDive(object):
writeln(f, '#!/bin/sh')
writeln(f, '# Auto-generated from scuba')
writeln(f, 'set -e')
- for cmd in script:
+ for cmd in context.script:
writeln(f, shell_quote_cmd(cmd))
+ self.context = context
+
def open_scubadir_file(self, name, mode):
@@ -329,7 +334,7 @@ class ScubaDive(object):
args += self.options
# Docker image
- args.append(self.config.image)
+ args.append(self.context.image)
# Command to run in container
args += self.docker_cmd
diff --git a/scuba/config.py b/scuba/config.py
index 9a7fd41..07bed29 100644
--- a/scuba/config.py
+++ b/scuba/config.py
@@ -103,6 +103,46 @@ def find_config():
rel = os.path.join(rest, rel)
+def _process_script_node(node, name):
+ '''Process a script-type node
+
+ This handles nodes that follow the *Common script schema*,
+ as outlined in doc/yaml-reference.md.
+ '''
+ if isinstance(node, basestring):
+ # The script is just the text itself
+ return [node]
+
+
+ if isinstance(node, dict):
+ # There must be a "script" key, which must be a list of strings
+ script = node.get('script')
+ if not script:
+ raise ConfigError("{0}: must have a 'script' subkey".format(name))
+
+ if not isinstance(script, list):
+ raise ConfigError("{0}.script: must be a list".format(name))
+
+ return script
+
+ raise ConfigError("{0}: must be string or dict".format(name))
+
+
+class ScubaAlias(object):
+ def __init__(self, name, script, image):
+ self.name = name
+ self.script = script
+ self.image = image
+
+ @classmethod
+ def from_dict(cls, name, node):
+ script = [shlex_split(cmd) for cmd in _process_script_node(node, name)]
+ image = node.get('image') if isinstance(node, dict) else None
+ return cls(name, script, image)
+
+class ScubaContext(object):
+ pass
+
class ScubaConfig(object):
def __init__(self, **data):
required_nodes = ('image',)
@@ -126,51 +166,13 @@ class ScubaConfig(object):
self._load_hooks(data)
- def _process_script(self, node, name):
- '''Process a script-type node
-
- This can handle yaml of either a simple form:
-
- node: this is my script
-
- Or a more complex form (which allows for other sub-nodes):
-
- node:
- script:
- - this is my script
- - it has multiple parts
-
- Other forms are disallowed:
-
- node:
- - this
- - is
- - forbidden
- '''
- if isinstance(node, basestring):
- # The script is just the text itself
- return [node]
-
-
- if isinstance(node, dict):
- # There must be a "script" key, which must be a list of strings
- script = node.get('script')
- if not script:
- raise ConfigError("{0}: must have a 'script' subkey".format(name))
-
- if not isinstance(script, list):
- raise ConfigError("{0}.script: must be a list".format(name))
-
- return script
-
- raise ConfigError("{0}: must be string or dict".format(name))
def _load_aliases(self, data):
self._aliases = {}
for name, node in data.get('aliases', {}).items():
- self._aliases[name] = [shlex_split(cmd) for cmd in self._process_script(node, name)]
+ self._aliases[name] = ScubaAlias.from_dict(name, node)
def _load_hooks(self, data):
@@ -179,7 +181,7 @@ class ScubaConfig(object):
for name in ('user', 'root',):
node = data.get('hooks', {}).get(name)
if node:
- hook = self._process_script(node, name)
+ hook = _process_script_node(node, name)
self._hooks[name] = hook
@@ -202,25 +204,39 @@ class ScubaConfig(object):
Arguments:
command A user command list (e.g. argv)
- Returns: A "script" - a list of command lists
+ Returns: A ScubaContext object with the following attributes:
+ script: a list of command lists
+ image: the docker image name to use
'''
- if not command:
- return command
-
- script = self.aliases.get(command[0])
- if not script:
- return [command]
-
- if len(command) > 1:
- # If an alias is a multiline script, then no additional
- # arguments will be allowed in the scuba invocation.
- if len(script) > 1:
- raise ConfigError('Additional arguments not allowed with multi-line aliases')
-
- command.pop(0)
- return [script[0] + command]
-
- return script
+ result = ScubaContext()
+ result.script = None
+ result.image = self.image
+
+ if command:
+ alias = self.aliases.get(command[0])
+ if not alias:
+ # Command is not an alias; use it as-is.
+ result.script = [command]
+ else:
+ # Using an alias
+ # Does this alias override the image?
+ if alias.image:
+ result.image = alias.image
+
+ if len(alias.script) > 1:
+ # Alias is a multiline script; no additional
+ # arguments are allowed in the scuba invocation.
+ if len(command) > 1:
+ raise ConfigError('Additional arguments not allowed with multi-line aliases')
+ result.script = alias.script
+
+ else:
+ # Alias is a single-line script; perform substituion
+ # and add user arguments.
+ command.pop(0)
+ result.script = [alias.script[0] + command]
+
+ return result
def load_config(path):
| Allow command aliases to be associated with images
This builds upon #60, which allows for aliases to be multiline scripts, and also allows those aliases to run with a different Docker image.
Example `.scuba.yml`:
```yaml
image: default_image_if_not_overridden
aliases:
foo: simple alias string
bar:
image: this overrides the top-level image
script: a simple script
snap:
image: this overrides the top-level image
script:
- a complex script
- with multiple commands
```
By using this schema, we can `!from_yaml` an entire alias definition directly from a [`.gitlab-ci.yml`] file. This allows scuba behave somewhat like a local CI runner.
[`.gitlab-ci.yml`]: http://doc.gitlab.com/ce/ci/yaml/README.html | JonathonReinhart/scuba | diff --git a/tests/test_config.py b/tests/test_config.py
index 3843dad..6be5b4b 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -115,8 +115,8 @@ class TestConfig(TestCase):
config = scuba.config.load_config('.scuba.yml')
assert_equals(config.image, 'busybox')
assert_equals(len(config.aliases), 2)
- assert_seq_equal(config.aliases['foo'], [['bar']])
- assert_seq_equal(config.aliases['snap'], [['crackle', 'pop']])
+ assert_seq_equal(config.aliases['foo'].script, [['bar']])
+ assert_seq_equal(config.aliases['snap'].script, [['crackle', 'pop']])
@@ -188,7 +188,7 @@ class TestConfig(TestCase):
image = 'na',
)
result = cfg.process_command([])
- assert_equal(result, [])
+ assert_equal(result.script, None)
def test_process_command_no_aliases(self):
@@ -197,7 +197,7 @@ class TestConfig(TestCase):
image = 'na',
)
result = cfg.process_command(['cmd', 'arg1', 'arg2'])
- assert_equal(result, [['cmd', 'arg1', 'arg2']])
+ assert_equal(result.script, [['cmd', 'arg1', 'arg2']])
def test_process_command_aliases_unused(self):
'''process_command handles unused aliases'''
@@ -209,7 +209,7 @@ class TestConfig(TestCase):
),
)
result = cfg.process_command(['cmd', 'arg1', 'arg2'])
- assert_equal(result, [['cmd', 'arg1', 'arg2']])
+ assert_equal(result.script, [['cmd', 'arg1', 'arg2']])
def test_process_command_aliases_used_noargs(self):
'''process_command handles aliases with no args'''
@@ -221,7 +221,7 @@ class TestConfig(TestCase):
),
)
result = cfg.process_command(['apple', 'arg1', 'arg2'])
- assert_equal(result, [['banana', 'arg1', 'arg2']])
+ assert_equal(result.script, [['banana', 'arg1', 'arg2']])
def test_process_command_aliases_used_withargs(self):
'''process_command handles aliases with args'''
@@ -233,7 +233,7 @@ class TestConfig(TestCase):
),
)
result = cfg.process_command(['apple', 'arg1', 'arg2'])
- assert_equal(result, [['banana', 'cherry', 'pie is good', 'arg1', 'arg2']])
+ assert_equal(result.script, [['banana', 'cherry', 'pie is good', 'arg1', 'arg2']])
def test_process_command_multiline_aliases_used(self):
'''process_command handles multiline aliases'''
@@ -248,7 +248,7 @@ class TestConfig(TestCase):
),
)
result = cfg.process_command(['apple'])
- assert_equal(result, [
+ assert_equal(result.script, [
['banana', 'cherry', 'pie is good'],
['so', 'is', 'peach'],
])
@@ -267,6 +267,23 @@ class TestConfig(TestCase):
)
assert_raises(scuba.config.ConfigError, cfg.process_command, ['apple', 'ARGS', 'NOT ALLOWED'])
+ def test_process_command_alias_overrides_image(self):
+ '''aliases can override the image'''
+ cfg = scuba.config.ScubaConfig(
+ image = 'default',
+ aliases = dict(
+ apple = dict(
+ script = [
+ 'banana cherry "pie is good"',
+ 'so is peach',
+ ],
+ image = 'overridden',
+ ),
+ ),
+ )
+ result = cfg.process_command(['apple'])
+ assert_equal(result.image, 'overridden')
+
############################################################################
# Hooks
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 4
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==3.7.1
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
-e git+https://github.com/JonathonReinhart/scuba.git@91feb2f3c2ee491723bfd07d9f805eae42740c01#egg=scuba
tomli==2.2.1
| name: scuba
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==3.7.1
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
prefix: /opt/conda/envs/scuba
| [
"tests/test_config.py::TestConfig::test_load_config_with_aliases",
"tests/test_config.py::TestConfig::test_process_command_alias_overrides_image",
"tests/test_config.py::TestConfig::test_process_command_aliases_unused",
"tests/test_config.py::TestConfig::test_process_command_aliases_used_noargs",
"tests/test_config.py::TestConfig::test_process_command_aliases_used_withargs",
"tests/test_config.py::TestConfig::test_process_command_empty",
"tests/test_config.py::TestConfig::test_process_command_multiline_aliases_used",
"tests/test_config.py::TestConfig::test_process_command_no_aliases"
]
| []
| [
"tests/test_config.py::TestConfig::test_find_config_cur_dir",
"tests/test_config.py::TestConfig::test_find_config_nonexist",
"tests/test_config.py::TestConfig::test_find_config_parent_dir",
"tests/test_config.py::TestConfig::test_find_config_way_up",
"tests/test_config.py::TestConfig::test_hooks_invalid_list",
"tests/test_config.py::TestConfig::test_hooks_invalid_script_type",
"tests/test_config.py::TestConfig::test_hooks_missing_script",
"tests/test_config.py::TestConfig::test_hooks_mixed",
"tests/test_config.py::TestConfig::test_load_config_empty",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml_missing_arg",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml_missing_file",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml_nested_key_missing",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml_nested_keys",
"tests/test_config.py::TestConfig::test_load_config_image_from_yaml_unicode_args",
"tests/test_config.py::TestConfig::test_load_config_minimal",
"tests/test_config.py::TestConfig::test_load_unexpected_node",
"tests/test_config.py::TestConfig::test_process_command_multiline_aliases_forbid_user_args"
]
| []
| MIT License | 660 | [
"CHANGELOG.md",
"example/per_alias_image/.scuba.yml",
"scuba/__main__.py",
"scuba/config.py",
"example/run_all.sh",
"example/per_alias_image/run_example.sh"
]
| [
"CHANGELOG.md",
"example/per_alias_image/.scuba.yml",
"scuba/__main__.py",
"scuba/config.py",
"example/run_all.sh",
"example/per_alias_image/run_example.sh"
]
|
zalando-stups__senza-278 | 5e02aa336c41af3199acab430a9cf97440d2aac3 | 2016-07-26 07:31:01 | 35b73f49b8cb58e7892908413bdf2a61cfe3058e | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index 6140440..347b515 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -23,34 +23,7 @@ def get_load_balancer_name(stack_name: str, stack_version: str):
return '{}-{}'.format(stack_name[:l], stack_version)
-def component_elastic_load_balancer(definition, configuration, args, info, force, account_info):
- lb_name = configuration["Name"]
-
- # domains pointing to the load balancer
- subdomain = ''
- main_zone = None
- for name, domain in configuration.get('Domains', {}).items():
- name = '{}{}'.format(lb_name, name)
- definition["Resources"][name] = {
- "Type": "AWS::Route53::RecordSet",
- "Properties": {
- "Type": "CNAME",
- "TTL": 20,
- "ResourceRecords": [
- {"Fn::GetAtt": [lb_name, "DNSName"]}
- ],
- "Name": "{0}.{1}".format(domain["Subdomain"], domain["Zone"]),
- "HostedZoneName": "{0}".format(domain["Zone"])
- },
- }
-
- if domain["Type"] == "weighted":
- definition["Resources"][name]["Properties"]['Weight'] = 0
- definition["Resources"][name]["Properties"]['SetIdentifier'] = "{0}-{1}".format(info["StackName"],
- info["StackVersion"])
- subdomain = domain['Subdomain']
- main_zone = domain['Zone'] # type: str
-
+def get_listeners(subdomain, main_zone, configuration):
ssl_cert = configuration.get('SSLCertificateId')
if ACMCertificate.arn_is_acm_certificate(ssl_cert):
@@ -95,6 +68,46 @@ def component_elastic_load_balancer(definition, configuration, args, info, force
'SSL certificate for "{}"'.format(name))
else:
fatal_error('Could not find any SSL certificate')
+ return [
+ {
+ "PolicyNames": [],
+ "SSLCertificateId": ssl_cert,
+ "Protocol": "HTTPS",
+ "InstancePort": configuration["HTTPPort"],
+ "LoadBalancerPort": 443
+ }
+ ]
+
+
+def component_elastic_load_balancer(definition, configuration, args, info, force, account_info):
+ lb_name = configuration["Name"]
+
+ # domains pointing to the load balancer
+ subdomain = ''
+ main_zone = None
+ for name, domain in configuration.get('Domains', {}).items():
+ name = '{}{}'.format(lb_name, name)
+ definition["Resources"][name] = {
+ "Type": "AWS::Route53::RecordSet",
+ "Properties": {
+ "Type": "CNAME",
+ "TTL": 20,
+ "ResourceRecords": [
+ {"Fn::GetAtt": [lb_name, "DNSName"]}
+ ],
+ "Name": "{0}.{1}".format(domain["Subdomain"], domain["Zone"]),
+ "HostedZoneName": "{0}".format(domain["Zone"])
+ },
+ }
+
+ if domain["Type"] == "weighted":
+ definition["Resources"][name]["Properties"]['Weight'] = 0
+ definition["Resources"][name]["Properties"]['SetIdentifier'] = "{0}-{1}".format(info["StackName"],
+ info["StackVersion"])
+ subdomain = domain['Subdomain']
+ main_zone = domain['Zone'] # type: str
+
+ listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration)
health_check_protocol = "HTTP"
allowed_health_check_protocols = ("HTTP", "TCP", "UDP", "SSL")
@@ -157,15 +170,7 @@ def component_elastic_load_balancer(definition, configuration, args, info, force
"Timeout": "5",
"Target": health_check_target
},
- "Listeners": [
- {
- "PolicyNames": [],
- "SSLCertificateId": ssl_cert,
- "Protocol": "HTTPS",
- "InstancePort": configuration["HTTPPort"],
- "LoadBalancerPort": 443
- }
- ],
+ "Listeners": listeners,
"ConnectionDrainingPolicy": {
"Enabled": True,
"Timeout": 60
| Can't Deploy ZMON appliance (without SSL Certificate)
Senza is returning an error, when I'm trying to deploy the ZMON appliance:
```
senza --region=eu-central-1 create zmon-appliance-planetexpress-qa.yaml cd14c1 cd14
Generating Cloud Formation template.. EXCEPTION OCCURRED: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-
Unknown Error: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-.
Please create an issue with the content of /var/folders/cv/08715ldx7qx_76kkn3kmdmdnlc10w4/T/senza-traceback-l_pvs53y
```
Contents of this file are the following:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 80, in get_by_name
certificate = certificates[0]
IndexError: list index out of range
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/senza/error_handling.py", line 69, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 555, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 638, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/local/lib/python3.5/site-packages/senza/cli.py", line 239, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 67, in component_elastic_load_balancer
certificate = IAMServerCertificate.get_by_name(ssl_cert)
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 82, in get_by_name
raise error
File "/usr/local/lib/python3.5/site-packages/senza/manaus/iam.py", line 71, in get_by_name
response = client.get_server_certificate(ServerCertificateName=name)
File "/usr/local/lib/python3.5/site-packages/botocore/client.py", line 278, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python3.5/site-packages/botocore/client.py", line 572, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (ValidationError) when calling the GetServerCertificate operation: The specified value for serverCertificateName is invalid. It must contain only alphanumeric characters and/or the following: +=,.@_-`
```
The LB-part of zmon-appliance-planetexpress-qa.yaml (autogenerated by Stups' script) looks like this:
```
ZmonApplianceLoadBalancer:
Type: Senza::ElasticLoadBalancer
HTTPPort: 9090
HealthCheckPath: /health
HealthCheckPort: 9090
SecurityGroups:
app-zmon-{{AccountInfo.TeamID}}
Scheme: internal
SSLCertificateId: arn:none # hack to disable SSL
Listeners:
LoadBalancerPort: 9090
Protocol: HTTP
InstancePort: 9090
InstanceProtocol: HTTP
```
(had to remove the '-' signs for Markup to work).
Am I right to assume SSLCertificateId: arn:none is causing the issue?
What can I do? | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 7112492..4b3330f 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -136,6 +136,28 @@ def test_component_load_balancer_idletimeout(monkeypatch):
assert 'HTTPPort' not in result["Resources"]["test_lb"]["Properties"]
+def test_component_load_balancer_http_only(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999",
+ "SSLCertificateId": "arn:none", # should be ignored as we overwrite Listeners
+ "Listeners": [{"Foo": "Bar"}]
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer.resolve_security_groups', mock_string_result)
+
+ result = component_elastic_load_balancer(definition, configuration, args, info, False, MagicMock())
+ assert 'Bar' == result["Resources"]["test_lb"]["Properties"]["Listeners"][0]["Foo"]
+
+
def test_component_load_balancer_namelength(monkeypatch):
configuration = {
"Name": "test_lb",
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@5e02aa336c41af3199acab430a9cf97440d2aac3#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_load_balancer_http_only"
]
| [
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version"
]
| []
| Apache License 2.0 | 661 | [
"senza/components/elastic_load_balancer.py"
]
| [
"senza/components/elastic_load_balancer.py"
]
|
|
goldmann__docker-squash-109 | afd81aefb5230ef8f001fec544e8649984427444 | 2016-07-26 09:11:52 | 1e6ae47e44e69f3094e2a5e75be20a74c1c0a44c | diff --git a/docker_squash/image.py b/docker_squash/image.py
index 9487d44..40fa7fe 100644
--- a/docker_squash/image.py
+++ b/docker_squash/image.py
@@ -492,15 +492,21 @@ class Image(object):
"Skipping '%s' marker file, this file was added earlier for some reason..." % marker.name)
continue
+ # https://github.com/goldmann/docker-squash/issues/108
+ # Some tar archives do have the filenames prefixed with './'
+ # which does not have any effect when we unpack the tar achive,
+ # but when processing tar content - we see this.
+ actual_files = [actual_file, "./%s" % actual_file, "%s/" % actual_file, ".%s/" % actual_file]
+
if files_in_layers:
for files in files_in_layers.values():
- if actual_file in files or "%s/" % actual_file in files:
+ if set(files).intersection(actual_files):
should_be_added_back = True
break
else:
# There are no previous layers, so we need to add it back
# In fact this shouldn't happen since having a marker file
- # where there is no previous layer doesn not make sense.
+ # where there is no previous layer does not make sense.
should_be_added_back = True
if should_be_added_back:
| Marker files is skipped when it shouldn't be
When using d568d17703beed8d4eea04ef3987f591c1082af0 -- a removed file `/lib/systemd/system/getty.target` was added back because the marker file `usr/lib/systemd/system/.wh.getty.target` was not added back.
Build log:
```
2016-07-21 08:32:36,167 - atomic_reactor.plugins.squash - INFO - will squash from base-image: 'sha256:5fbb74308f2db1a6c2fa06e72ede9fdd4ecf34754e8e9280e382b77652b470a3'
2016-07-21 08:32:36,167 - atomic_reactor.plugins.squash - DEBUG - Preparing Docker client...
2016-07-21 08:32:36,257 - atomic_reactor.plugins.squash - DEBUG - Docker client ready
2016-07-21 08:32:36,433 - atomic_reactor.plugins.squash - INFO - docker-squash version 1.0.0dev, Docker 7ffc8ee-unsupported, API 1.22...
2016-07-21 08:32:36,434 - atomic_reactor.plugins.squash - INFO - Using v2 image format
2016-07-21 08:32:36,435 - atomic_reactor.plugins.squash - DEBUG - Using /tmp/docker-squash-rXDNxo as the temporary directory
2016-07-21 08:32:36,619 - atomic_reactor.plugins.squash - INFO - Old image has 37 layers
2016-07-21 08:32:36,619 - atomic_reactor.plugins.squash - DEBUG - Old layers: [u'sha256:5fbb74308f2db1a6c2fa06e72ede9fdd4ecf34754e8e9280e382b77652b470a3', u'sha256:bbee0f5d0ff117a3d79fa01be9736f0857454f2cb84a3509e27a5c8ced5ddde9', u'sha256:832da14f124378d10b5bf503e027f79f87e470a6aae150ed8405011ee3a4e90f', u'sha256:b276d5028ac16bec8160a5efb359bb92bab09c0f57219e5eb1f275dc6ba07ac1', u'sha256:e3d88785934717f96c14268ac609c0051637e10ade16e72f86b033aad5644063', u'sha256:97bb7a6f821846c9f74720782e71d8d58b032708605db2005636206eb1ccb376', u'sha256:dc584227175f896f8a88e606e199139489ecdc9a7937c91bace4d7a1509657d3', u'sha256:280d0437a5fea1bd95efa83c7ee81da1e8796219f40cb7899ff61b072c35f1d0', u'sha256:feabda7193bf435dd18e53fe22e8229de59e47b24934a4348bbd443a2281e5df', u'sha256:e544a788961c41219be547f02bcfd9f783510feda88a8b11718e3d202935d4ff', u'sha256:6d005672424201244af31e9340e65c8f4b2dfe81e37f8c747db78ecbe245508b', u'sha256:118f84e0dd0d7ef09bed5b5e5393184299d0c6602c1f2b1b83dab00700857de9', u'sha256:c09f0dea089b08f8905f2e881d5a29d2319d366dafe9b23ffcb2fa2868a7bee2', u'sha256:bbdaa984deb66c72828ca785bbd0fa2c937217c1e0f7ad0bead3806188d0b2ab', u'sha256:42f45005f9a773018e0da6b0ae68fb1821dd2eb919455c54a729c86fca80a1c0', u'sha256:9f6da8b3ddd79b39d99625aaa93b19f86177ec4b48da9e30c019764d107b120e', u'sha256:ea023ebd14ed0701fd291ee34d78fa3da2197ec456402b85135616745832d0fc', u'sha256:60a5c01275d5332d769a806c1dd5cba4e88f6faa776a013ef4567cc266066d6b', u'sha256:3091849c14c020c13832e3aee982043204971850a7720baaf8d2153c8a31115c', u'sha256:f22e7e897116af57810575e3bfabafe26200ac21499ba63b3adcc962abd77570', u'sha256:8d3c9563a66aafa920e5adab86508d6817d67e6485dab5bf735ee53b37e782ef', u'sha256:b20efd6f3e7f10a55dc8dd2d8501fae01aa0f66cd77b83f1bc0a5d782e9ee56c', u'sha256:6a3365a64966f3c1c7f3d709fe6274fcca4bd6701ae8d7fc9de958cbe29a1af9', u'sha256:819df5700f8fad20f1911ee83b30e240bcc70568757a6df82fee8286be7c3f38', u'sha256:72f2deb0660e678c0a44e3f5b0099fe693e67304619913a8e0744f70a1784b63', u'sha256:d16c502709728c73f00cd47fed58ad7533d47b032825c5319c9e0a7cdc233103', u'sha256:0eaad91b9476a5d724e10417df335f9ed5683cd65afb73bc21e8b5ca1d50ff3a', u'sha256:afb45d9f8f31741a4cbc1e5da1dff2735828976f1463394f5f269e2369337902', u'sha256:cfeec6a9ef41f307b29e3349ee7b1ebdf61d0ef7525245f3c9b6998949a257a1', u'sha256:c38bf991060ff332099a45ac442653e6c45664bd03df583d8bd674583a2c39a2', u'sha256:2720289416f1983f6eaf4ebd9efd2ec40561ee5a0ec8d4af22b3190823f25c4c', u'sha256:66baef731ae867db4513fa1e8ad4da2f79aba65359e5b376b61f334f6113263d', u'sha256:3838b05694fd9924171b58784cf91b0ee1b692fe8bf1d70033bf6e00513fa49a', u'sha256:2c8942f23fc3dc2cf400c624ec30deb6401d6a934992cfcb661a8fde6300c1de', u'sha256:9010760b9de5c078759da7490e98047843d9f137ed6951d5d63ff44707b63e19', u'sha256:94deeb9a1f4a6196b799dcc5333560184bf2cdbb6d1f8205201808854a5b837f', u'sha256:8d5939338b94210f4258c7e03bbfa1b318fb5353ece6ed749ae7423aed488af4']
2016-07-21 08:32:36,619 - atomic_reactor.plugins.squash - DEBUG - We detected layer as the argument to squash
2016-07-21 08:32:36,650 - atomic_reactor.plugins.squash - DEBUG - Layer ID to squash from: sha256:5fbb74308f2db1a6c2fa06e72ede9fdd4ecf34754e8e9280e382b77652b470a3
2016-07-21 08:32:36,650 - atomic_reactor.plugins.squash - INFO - Checking if squashing is necessary...
2016-07-21 08:32:36,651 - atomic_reactor.plugins.squash - INFO - Attempting to squash last 36 layers...
2016-07-21 08:32:36,651 - atomic_reactor.plugins.squash - DEBUG - Layers to squash: [u'sha256:bbee0f5d0ff117a3d79fa01be9736f0857454f2cb84a3509e27a5c8ced5ddde9', u'sha256:832da14f124378d10b5bf503e027f79f87e470a6aae150ed8405011ee3a4e90f', u'sha256:b276d5028ac16bec8160a5efb359bb92bab09c0f57219e5eb1f275dc6ba07ac1', u'sha256:e3d88785934717f96c14268ac609c0051637e10ade16e72f86b033aad5644063', u'sha256:97bb7a6f821846c9f74720782e71d8d58b032708605db2005636206eb1ccb376', u'sha256:dc584227175f896f8a88e606e199139489ecdc9a7937c91bace4d7a1509657d3', u'sha256:280d0437a5fea1bd95efa83c7ee81da1e8796219f40cb7899ff61b072c35f1d0', u'sha256:feabda7193bf435dd18e53fe22e8229de59e47b24934a4348bbd443a2281e5df', u'sha256:e544a788961c41219be547f02bcfd9f783510feda88a8b11718e3d202935d4ff', u'sha256:6d005672424201244af31e9340e65c8f4b2dfe81e37f8c747db78ecbe245508b', u'sha256:118f84e0dd0d7ef09bed5b5e5393184299d0c6602c1f2b1b83dab00700857de9', u'sha256:c09f0dea089b08f8905f2e881d5a29d2319d366dafe9b23ffcb2fa2868a7bee2', u'sha256:bbdaa984deb66c72828ca785bbd0fa2c937217c1e0f7ad0bead3806188d0b2ab', u'sha256:42f45005f9a773018e0da6b0ae68fb1821dd2eb919455c54a729c86fca80a1c0', u'sha256:9f6da8b3ddd79b39d99625aaa93b19f86177ec4b48da9e30c019764d107b120e', u'sha256:ea023ebd14ed0701fd291ee34d78fa3da2197ec456402b85135616745832d0fc', u'sha256:60a5c01275d5332d769a806c1dd5cba4e88f6faa776a013ef4567cc266066d6b', u'sha256:3091849c14c020c13832e3aee982043204971850a7720baaf8d2153c8a31115c', u'sha256:f22e7e897116af57810575e3bfabafe26200ac21499ba63b3adcc962abd77570', u'sha256:8d3c9563a66aafa920e5adab86508d6817d67e6485dab5bf735ee53b37e782ef', u'sha256:b20efd6f3e7f10a55dc8dd2d8501fae01aa0f66cd77b83f1bc0a5d782e9ee56c', u'sha256:6a3365a64966f3c1c7f3d709fe6274fcca4bd6701ae8d7fc9de958cbe29a1af9', u'sha256:819df5700f8fad20f1911ee83b30e240bcc70568757a6df82fee8286be7c3f38', u'sha256:72f2deb0660e678c0a44e3f5b0099fe693e67304619913a8e0744f70a1784b63', u'sha256:d16c502709728c73f00cd47fed58ad7533d47b032825c5319c9e0a7cdc233103', u'sha256:0eaad91b9476a5d724e10417df335f9ed5683cd65afb73bc21e8b5ca1d50ff3a', u'sha256:afb45d9f8f31741a4cbc1e5da1dff2735828976f1463394f5f269e2369337902', u'sha256:cfeec6a9ef41f307b29e3349ee7b1ebdf61d0ef7525245f3c9b6998949a257a1', u'sha256:c38bf991060ff332099a45ac442653e6c45664bd03df583d8bd674583a2c39a2', u'sha256:2720289416f1983f6eaf4ebd9efd2ec40561ee5a0ec8d4af22b3190823f25c4c', u'sha256:66baef731ae867db4513fa1e8ad4da2f79aba65359e5b376b61f334f6113263d', u'sha256:3838b05694fd9924171b58784cf91b0ee1b692fe8bf1d70033bf6e00513fa49a', u'sha256:2c8942f23fc3dc2cf400c624ec30deb6401d6a934992cfcb661a8fde6300c1de', u'sha256:9010760b9de5c078759da7490e98047843d9f137ed6951d5d63ff44707b63e19', u'sha256:94deeb9a1f4a6196b799dcc5333560184bf2cdbb6d1f8205201808854a5b837f', u'sha256:8d5939338b94210f4258c7e03bbfa1b318fb5353ece6ed749ae7423aed488af4']
2016-07-21 08:32:36,651 - atomic_reactor.plugins.squash - DEBUG - Layers to move: [u'sha256:5fbb74308f2db1a6c2fa06e72ede9fdd4ecf34754e8e9280e382b77652b470a3']
2016-07-21 08:32:36,651 - atomic_reactor.plugins.squash - INFO - Saving image sha256:8d5939338b94210f4258c7e03bbfa1b318fb5353ece6ed749ae7423aed488af4 to /tmp/docker-squash-rXDNxo/old/image.tar file...
2016-07-21 08:32:36,651 - atomic_reactor.plugins.squash - DEBUG - Try #1...
2016-07-21 08:33:35,240 - atomic_reactor.plugins.squash - INFO - Image saved!
2016-07-21 08:33:35,240 - atomic_reactor.plugins.squash - INFO - Unpacking /tmp/docker-squash-rXDNxo/old/image.tar tar file to /tmp/docker-squash-rXDNxo/old directory
2016-07-21 08:33:36,523 - atomic_reactor.plugins.squash - INFO - Archive unpacked!
2016-07-21 08:33:36,524 - atomic_reactor.plugins.squash - DEBUG - Removing exported tar (/tmp/docker-squash-rXDNxo/old/image.tar)...
2016-07-21 08:33:36,674 - atomic_reactor.plugins.squash - INFO - Squashing image 'sha256:8d5939338b94210f4258c7e03bbfa1b318fb5353ece6ed749ae7423aed488af4'...
2016-07-21 08:33:36,675 - atomic_reactor.plugins.squash - DEBUG - Reading '/tmp/docker-squash-rXDNxo/old/manifest.json' JSON file...
2016-07-21 08:33:36,676 - atomic_reactor.plugins.squash - DEBUG - Reading '/tmp/docker-squash-rXDNxo/old/8d5939338b94210f4258c7e03bbfa1b318fb5353ece6ed749ae7423aed488af4.json' JSON file...
2016-07-21 08:33:36,689 - atomic_reactor.plugins.squash - INFO - Starting squashing...
2016-07-21 08:33:36,689 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/37b1cf0c18c9775f6c1b90d2c31912259f18592c5a91bec55f4fc9663548e8bb/layer.tar'...
2016-07-21 08:33:36,691 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/37b1cf0c18c9775f6c1b90d2c31912259f18592c5a91bec55f4fc9663548e8bb/layer.tar' archive...
2016-07-21 08:33:36,691 - atomic_reactor.plugins.squash - DEBUG - Found 'etc/yum.repos.d/.wh.rhgs-docker.repo' marker file
2016-07-21 08:33:36,693 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/yum.repos.d/.wh.rhgs-docker.repo' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:36,693 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/cd17bd725c39babaa6695303ea2bf52ad1aa9ca7af626ddd4715be577e0db700/layer.tar'...
2016-07-21 08:33:36,695 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/cd17bd725c39babaa6695303ea2bf52ad1aa9ca7af626ddd4715be577e0db700/layer.tar' archive...
2016-07-21 08:33:36,696 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/e7a0da1b99477373e8dfc4bf0a6e8b19d6f3c86be46f176251c688e8f4390d52/layer.tar'...
2016-07-21 08:33:36,698 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/e7a0da1b99477373e8dfc4bf0a6e8b19d6f3c86be46f176251c688e8f4390d52/layer.tar' archive...
2016-07-21 08:33:36,698 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,699 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/512004235517479565c3b551711f31d2fb0b870ee3107b6c4bf492e49b035e03/layer.tar'...
2016-07-21 08:33:36,700 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/512004235517479565c3b551711f31d2fb0b870ee3107b6c4bf492e49b035e03/layer.tar' archive...
2016-07-21 08:33:36,701 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,701 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd' file because it's older than file already added to the archive
2016-07-21 08:33:36,701 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd/system' file because it's older than file already added to the archive
2016-07-21 08:33:36,701 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd/system/multi-user.target.wants' file because it's older than file already added to the archive
2016-07-21 08:33:36,701 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/8c643c4ce3f04c2c54fb28328d69101f15dff20c216cbe1fdc65ca66d47cfaec/layer.tar'...
2016-07-21 08:33:36,703 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/8c643c4ce3f04c2c54fb28328d69101f15dff20c216cbe1fdc65ca66d47cfaec/layer.tar' archive...
2016-07-21 08:33:36,742 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/c5ad8634728331c6361417c356104f9ddaf355dec459bec4ad19dcda076f2e5f/layer.tar'...
2016-07-21 08:33:36,744 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/c5ad8634728331c6361417c356104f9ddaf355dec459bec4ad19dcda076f2e5f/layer.tar' archive...
2016-07-21 08:33:36,744 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Group' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Installtid' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Name' file because it's older than file already added to the archive
2016-07-21 08:33:36,745 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Packages' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Providename' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Sha1header' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.001' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.002' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.003' file because it's older than file already added to the archive
2016-07-21 08:33:36,746 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/4016a911b5fa873be6dbf21b92da3ed34cc17d6eef01b113c08cf22d9ec2f594/layer.tar'...
2016-07-21 08:33:36,747 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/4016a911b5fa873be6dbf21b92da3ed34cc17d6eef01b113c08cf22d9ec2f594/layer.tar' archive...
2016-07-21 08:33:36,748 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/95fe99e8d651069505ad86144924048152d11b65738423db087c4152ebdc3308/layer.tar'...
2016-07-21 08:33:36,749 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/95fe99e8d651069505ad86144924048152d11b65738423db087c4152ebdc3308/layer.tar' archive...
2016-07-21 08:33:36,749 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr' file because it's older than file already added to the archive
2016-07-21 08:33:36,749 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/sbin' file because it's older than file already added to the archive
2016-07-21 08:33:36,750 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/cc1c8659cdb6f73f52e533a84a29c9d38b362c563c0ca7bed659ddd93e94c0c5/layer.tar'...
2016-07-21 08:33:36,751 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/cc1c8659cdb6f73f52e533a84a29c9d38b362c563c0ca7bed659ddd93e94c0c5/layer.tar' archive...
2016-07-21 08:33:36,751 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr' file because it's older than file already added to the archive
2016-07-21 08:33:36,751 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/sbin' file because it's older than file already added to the archive
2016-07-21 08:33:36,752 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/sbin/gluster-setup.sh' file because it's older than file already added to the archive
2016-07-21 08:33:36,752 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/e8516839de22e25bd0623c249f00e669015ccf51fb840c7c210260a86d7654a5/layer.tar'...
2016-07-21 08:33:36,753 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/e8516839de22e25bd0623c249f00e669015ccf51fb840c7c210260a86d7654a5/layer.tar' archive...
2016-07-21 08:33:36,753 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,754 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd' file because it's older than file already added to the archive
2016-07-21 08:33:36,754 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd/system' file because it's older than file already added to the archive
2016-07-21 08:33:36,754 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/ee5df63d06865eebe86318dcafa447e8623d728e15d00bc3e07129aa86de09d2/layer.tar'...
2016-07-21 08:33:36,765 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/ee5df63d06865eebe86318dcafa447e8623d728e15d00bc3e07129aa86de09d2/layer.tar' archive...
2016-07-21 08:33:36,766 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,769 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var' file because it's older than file already added to the archive
2016-07-21 08:33:36,769 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib' file because it's older than file already added to the archive
2016-07-21 08:33:36,781 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/f32375a80ae4bd943cbc3b808e10e699adcadfc0c72bc50da7195a1f80c91f57/layer.tar'...
2016-07-21 08:33:36,783 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/f32375a80ae4bd943cbc3b808e10e699adcadfc0c72bc50da7195a1f80c91f57/layer.tar' archive...
2016-07-21 08:33:36,783 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,783 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/9c2b4573275d60a182bdccf958a4be651513c9709aec7249bf7356344500dc8e/layer.tar'...
2016-07-21 08:33:36,784 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/9c2b4573275d60a182bdccf958a4be651513c9709aec7249bf7356344500dc8e/layer.tar' archive...
2016-07-21 08:33:36,785 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:36,786 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/96f5dc33a3b54f675842fd5ab829bf460485547eb1998c50c5fe9b52ba040d76/layer.tar'...
2016-07-21 08:33:36,787 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/96f5dc33a3b54f675842fd5ab829bf460485547eb1998c50c5fe9b52ba040d76/layer.tar' archive...
2016-07-21 08:33:36,787 - atomic_reactor.plugins.squash - DEBUG - Found 'usr/lib/systemd/system/.wh.getty.target' marker file
2016-07-21 08:33:36,787 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr' file because it's older than file already added to the archive
2016-07-21 08:33:36,788 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd/system/.wh.getty.target' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:36,788 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/dbf1ba70edba86c927c15719195c38ef165baaa87f0b9dc9c48ef76d44737dbf/layer.tar'...
2016-07-21 08:33:37,159 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/dbf1ba70edba86c927c15719195c38ef165baaa87f0b9dc9c48ef76d44737dbf/layer.tar' archive...
2016-07-21 08:33:37,161 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:37,181 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/ssh' file because it's older than file already added to the archive
2016-07-21 08:33:37,183 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/ssh/sshd_config' file because it's older than file already added to the archive
2016-07-21 08:33:37,189 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd' file because it's older than file already added to the archive
2016-07-21 08:33:37,189 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd/system' file because it's older than file already added to the archive
2016-07-21 08:33:37,189 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/systemd/system/multi-user.target.wants' file because it's older than file already added to the archive
2016-07-21 08:33:37,191 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr' file because it's older than file already added to the archive
2016-07-21 08:33:37,215 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib' file because it's older than file already added to the archive
2016-07-21 08:33:37,333 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd' file because it's older than file already added to the archive
2016-07-21 08:33:37,334 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd/system' file because it's older than file already added to the archive
2016-07-21 08:33:37,517 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/sbin' file because it's older than file already added to the archive
2016-07-21 08:33:37,625 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var' file because it's older than file already added to the archive
2016-07-21 08:33:37,628 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib' file because it's older than file already added to the archive
2016-07-21 08:33:37,652 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm' file because it's older than file already added to the archive
2016-07-21 08:33:37,657 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Group' file because it's older than file already added to the archive
2016-07-21 08:33:37,658 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Installtid' file because it's older than file already added to the archive
2016-07-21 08:33:37,658 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Name' file because it's older than file already added to the archive
2016-07-21 08:33:37,659 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Packages' file because it's older than file already added to the archive
2016-07-21 08:33:37,659 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Providename' file because it's older than file already added to the archive
2016-07-21 08:33:37,661 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Sha1header' file because it's older than file already added to the archive
2016-07-21 08:33:37,662 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.001' file because it's older than file already added to the archive
2016-07-21 08:33:37,663 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.002' file because it's older than file already added to the archive
2016-07-21 08:33:37,663 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.003' file because it's older than file already added to the archive
2016-07-21 08:33:37,931 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/log' file because it's older than file already added to the archive
2016-07-21 08:33:37,940 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/28995220d92565311a680db8e19a0678ece71608a7bb438779bf802aeb0ecf25/layer.tar'...
2016-07-21 08:33:37,943 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/28995220d92565311a680db8e19a0678ece71608a7bb438779bf802aeb0ecf25/layer.tar' archive...
2016-07-21 08:33:37,943 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:37,944 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/25197a0411b14fd70d3aa675fb01ba89e703603ecf8924aaf46eefcced544027/layer.tar'...
2016-07-21 08:33:38,250 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/25197a0411b14fd70d3aa675fb01ba89e703603ecf8924aaf46eefcced544027/layer.tar' archive...
2016-07-21 08:33:38,252 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/rpmdb-indexes/.wh.conflicts' marker file
2016-07-21 08:33:38,252 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/rpmdb-indexes/.wh.file-requires' marker file
2016-07-21 08:33:38,252 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/rpmdb-indexes/.wh.obsoletes' marker file
2016-07-21 08:33:38,252 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/rpmdb-indexes/.wh.pkgtups-checksums' marker file
2016-07-21 08:33:38,253 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/rpmdb-indexes/.wh.version' marker file
2016-07-21 08:33:38,253 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/b/.wh.af68a0f05a10ae1013a9d128a322d95bd2d2253c-bash-4.2.46-19.el7-x86_64' marker file
2016-07-21 08:33:38,253 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/c/.wh.0d55537940fff88ff97b377bf78c1cb52d99374b-coreutils-8.22-15.el7_2.1-x86_64' marker file
2016-07-21 08:33:38,253 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/d/.wh.dfdb080818dfd2e02836534dd7c19e821e2b1e8c-device-mapper-1.02.107-5.el7_2.4-x86_64' marker file
2016-07-21 08:33:38,254 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/d/.wh.f3565c38beb21b718dce196260cb748e64665dd7-device-mapper-libs-1.02.107-5.el7_2.4-x86_64' marker file
2016-07-21 08:33:38,254 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/l/.wh.c07ac9e951c85c7131a4c486a02a6300b70c3d6a-libxml2-2.9.1-6.el7_2.2-x86_64' marker file
2016-07-21 08:33:38,254 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/l/.wh.c66656ef67456a38385b0177a27456a780cb21eb-libxml2-python-2.9.1-6.el7_2.2-x86_64' marker file
2016-07-21 08:33:38,255 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/r/.wh.b96bc12559ab41298e99719fd6a0564fbf646f69-redhat-release-server-7.2-9.el7-x86_64' marker file
2016-07-21 08:33:38,255 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/s/.wh.42fe7434d9c2e306eacd4dc4bd7713efb431595e-sed-4.2.2-5.el7-x86_64' marker file
2016-07-21 08:33:38,255 - atomic_reactor.plugins.squash - DEBUG - Found 'var/lib/yum/yumdb/t/.wh.a39e4ca78e7a994a840cbd96cff6584d1ed31118-tzdata-2016d-1.el7-noarch' marker file
2016-07-21 08:33:38,255 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:38,259 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/ld.so.cache' file because it's older than file already added to the archive
2016-07-21 08:33:38,271 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/yum.repos.d' file because it's older than file already added to the archive
2016-07-21 08:33:38,273 - atomic_reactor.plugins.squash - DEBUG - Skipping 'root' file because it's older than file already added to the archive
2016-07-21 08:33:38,274 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr' file because it's older than file already added to the archive
2016-07-21 08:33:38,275 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/bin' file because it's older than file already added to the archive
2016-07-21 08:33:38,344 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib' file because it's older than file already added to the archive
2016-07-21 08:33:38,345 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd' file because it's older than file already added to the archive
2016-07-21 08:33:38,345 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd/system' file because it's older than file already added to the archive
2016-07-21 08:33:38,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/udev' file because it's older than file already added to the archive
2016-07-21 08:33:38,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/udev/rules.d' file because it's older than file already added to the archive
2016-07-21 08:33:38,350 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib64' file because it's older than file already added to the archive
2016-07-21 08:33:38,365 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/libexec' file because it's older than file already added to the archive
2016-07-21 08:33:38,367 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/sbin' file because it's older than file already added to the archive
2016-07-21 08:33:38,370 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/share' file because it's older than file already added to the archive
2016-07-21 08:33:39,200 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var' file because it's older than file already added to the archive
2016-07-21 08:33:39,200 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache' file because it's older than file already added to the archive
2016-07-21 08:33:39,201 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/ldconfig' file because it's older than file already added to the archive
2016-07-21 08:33:39,201 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/ldconfig/aux-cache' file because it's older than file already added to the archive
2016-07-21 08:33:39,202 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/yum' file because it's older than file already added to the archive
2016-07-21 08:33:39,202 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/yum/x86_64' file because it's older than file already added to the archive
2016-07-21 08:33:39,202 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/yum/x86_64/7Server' file because it's older than file already added to the archive
2016-07-21 08:33:39,311 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/cache/yum/x86_64/7Server/timedhosts' file because it's older than file already added to the archive
2016-07-21 08:33:39,311 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib' file because it's older than file already added to the archive
2016-07-21 08:33:39,313 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm' file because it's older than file already added to the archive
2016-07-21 08:33:39,314 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Basenames' file because it's older than file already added to the archive
2016-07-21 08:33:39,314 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Conflictname' file because it's older than file already added to the archive
2016-07-21 08:33:39,315 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Dirnames' file because it's older than file already added to the archive
2016-07-21 08:33:39,315 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Group' file because it's older than file already added to the archive
2016-07-21 08:33:39,317 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Installtid' file because it's older than file already added to the archive
2016-07-21 08:33:39,318 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Name' file because it's older than file already added to the archive
2016-07-21 08:33:39,319 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Obsoletename' file because it's older than file already added to the archive
2016-07-21 08:33:39,319 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Packages' file because it's older than file already added to the archive
2016-07-21 08:33:39,320 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Providename' file because it's older than file already added to the archive
2016-07-21 08:33:39,321 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Requirename' file because it's older than file already added to the archive
2016-07-21 08:33:39,322 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Sha1header' file because it's older than file already added to the archive
2016-07-21 08:33:39,323 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Sigmd5' file because it's older than file already added to the archive
2016-07-21 08:33:39,324 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.001' file because it's older than file already added to the archive
2016-07-21 08:33:39,324 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.002' file because it's older than file already added to the archive
2016-07-21 08:33:39,325 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.003' file because it's older than file already added to the archive
2016-07-21 08:33:39,325 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum' file because it's older than file already added to the archive
2016-07-21 08:33:39,326 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/history' file because it's older than file already added to the archive
2016-07-21 08:33:39,327 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/history/2016-06-16' file because it's older than file already added to the archive
2016-07-21 08:33:39,331 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/history/history-2016-06-16.sqlite' file because it's older than file already added to the archive
2016-07-21 08:33:39,332 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/history/history-2016-06-16.sqlite-journal' file because it's older than file already added to the archive
2016-07-21 08:33:39,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.conflicts' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.file-requires' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.obsoletes' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.pkgtups-checksums' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,348 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.version' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,349 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb' file because it's older than file already added to the archive
2016-07-21 08:33:39,349 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/b' file because it's older than file already added to the archive
2016-07-21 08:33:39,358 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/b/.wh.af68a0f05a10ae1013a9d128a322d95bd2d2253c-bash-4.2.46-19.el7-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,359 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/c' file because it's older than file already added to the archive
2016-07-21 08:33:39,366 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/c/.wh.0d55537940fff88ff97b377bf78c1cb52d99374b-coreutils-8.22-15.el7_2.1-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,366 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/d' file because it's older than file already added to the archive
2016-07-21 08:33:39,381 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/d/.wh.dfdb080818dfd2e02836534dd7c19e821e2b1e8c-device-mapper-1.02.107-5.el7_2.4-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,381 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/d/.wh.f3565c38beb21b718dce196260cb748e64665dd7-device-mapper-libs-1.02.107-5.el7_2.4-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,381 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/l' file because it's older than file already added to the archive
2016-07-21 08:33:39,387 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/l/.wh.c07ac9e951c85c7131a4c486a02a6300b70c3d6a-libxml2-2.9.1-6.el7_2.2-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,388 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/l/.wh.c66656ef67456a38385b0177a27456a780cb21eb-libxml2-python-2.9.1-6.el7_2.2-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,394 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/r' file because it's older than file already added to the archive
2016-07-21 08:33:39,394 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/r/.wh.b96bc12559ab41298e99719fd6a0564fbf646f69-redhat-release-server-7.2-9.el7-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,402 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/s' file because it's older than file already added to the archive
2016-07-21 08:33:39,403 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/s/.wh.42fe7434d9c2e306eacd4dc4bd7713efb431595e-sed-4.2.2-5.el7-x86_64' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,409 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/t' file because it's older than file already added to the archive
2016-07-21 08:33:39,415 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/t/.wh.a39e4ca78e7a994a840cbd96cff6584d1ed31118-tzdata-2016d-1.el7-noarch' marker file, at the end of squashing we'll see if it's necessary to add it back
2016-07-21 08:33:39,415 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/log' file because it's older than file already added to the archive
2016-07-21 08:33:39,416 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/log/rhsm' file because it's older than file already added to the archive
2016-07-21 08:33:39,416 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/log/rhsm/rhsm.log' file because it's older than file already added to the archive
2016-07-21 08:33:39,417 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/log/yum.log' file because it's older than file already added to the archive
2016-07-21 08:33:39,417 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/e480600995182186ed587a8339e775f2a994ac69e8f9e5605321d7b17a4f183b/layer.tar'...
2016-07-21 08:33:39,421 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/e480600995182186ed587a8339e775f2a994ac69e8f9e5605321d7b17a4f183b/layer.tar' archive...
2016-07-21 08:33:39,422 - atomic_reactor.plugins.squash - DEBUG - Skipping 'run' file because it's older than file already added to the archive
2016-07-21 08:33:39,423 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var' file because it's older than file already added to the archive
2016-07-21 08:33:39,423 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib' file because it's older than file already added to the archive
2016-07-21 08:33:39,424 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm' file because it's older than file already added to the archive
2016-07-21 08:33:39,424 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Group' file because it's older than file already added to the archive
2016-07-21 08:33:39,424 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Installtid' file because it's older than file already added to the archive
2016-07-21 08:33:39,425 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Name' file because it's older than file already added to the archive
2016-07-21 08:33:39,425 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Packages' file because it's older than file already added to the archive
2016-07-21 08:33:39,425 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Providename' file because it's older than file already added to the archive
2016-07-21 08:33:39,426 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/Sha1header' file because it's older than file already added to the archive
2016-07-21 08:33:39,426 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.001' file because it's older than file already added to the archive
2016-07-21 08:33:39,426 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.002' file because it's older than file already added to the archive
2016-07-21 08:33:39,427 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/rpm/__db.003' file because it's older than file already added to the archive
2016-07-21 08:33:39,427 - atomic_reactor.plugins.squash - INFO - Squashing file '/tmp/docker-squash-rXDNxo/old/42134c2195d29c68166fcc5643d8cc00a18f0f431b177815612939ea35f683b0/layer.tar'...
2016-07-21 08:33:39,428 - atomic_reactor.plugins.squash - DEBUG - Searching for marker files in '/tmp/docker-squash-rXDNxo/old/42134c2195d29c68166fcc5643d8cc00a18f0f431b177815612939ea35f683b0/layer.tar' archive...
2016-07-21 08:33:39,428 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc' file because it's older than file already added to the archive
2016-07-21 08:33:39,429 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/yum.repos.d' file because it's older than file already added to the archive
2016-07-21 08:33:39,429 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/yum.repos.d/rhgs-docker.repo' file because it's on the list to skip files
2016-07-21 08:33:40,170 - atomic_reactor.plugins.squash - DEBUG - Generating list of files in layer '94f79d4a6ca60db40b3095e4dd367211092685793d2c6f113b86134ade4fb2ab'...
2016-07-21 08:33:41,803 - atomic_reactor.plugins.squash - DEBUG - Done, found 11085 files
2016-07-21 08:33:41,817 - atomic_reactor.plugins.squash - DEBUG - Marker files to add: ['var/lib/yum/rpmdb-indexes/.wh.conflicts', 'var/lib/yum/yumdb/l/.wh.c07ac9e951c85c7131a4c486a02a6300b70c3d6a-libxml2-2.9.1-6.el7_2.2-x86_64', 'var/lib/yum/yumdb/c/.wh.0d55537940fff88ff97b377bf78c1cb52d99374b-coreutils-8.22-15.el7_2.1-x86_64', 'var/lib/yum/rpmdb-indexes/.wh.pkgtups-checksums', 'var/lib/yum/rpmdb-indexes/.wh.file-requires', 'var/lib/yum/yumdb/t/.wh.a39e4ca78e7a994a840cbd96cff6584d1ed31118-tzdata-2016d-1.el7-noarch', 'var/lib/yum/yumdb/r/.wh.b96bc12559ab41298e99719fd6a0564fbf646f69-redhat-release-server-7.2-9.el7-x86_64', 'var/lib/yum/rpmdb-indexes/.wh.obsoletes', 'var/lib/yum/yumdb/s/.wh.42fe7434d9c2e306eacd4dc4bd7713efb431595e-sed-4.2.2-5.el7-x86_64', 'var/lib/yum/yumdb/l/.wh.c66656ef67456a38385b0177a27456a780cb21eb-libxml2-python-2.9.1-6.el7_2.2-x86_64', 'var/lib/yum/rpmdb-indexes/.wh.version', 'var/lib/yum/yumdb/d/.wh.f3565c38beb21b718dce196260cb748e64665dd7-device-mapper-libs-1.02.107-5.el7_2.4-x86_64', 'etc/yum.repos.d/.wh.rhgs-docker.repo', 'usr/lib/systemd/system/.wh.getty.target', 'var/lib/yum/yumdb/b/.wh.af68a0f05a10ae1013a9d128a322d95bd2d2253c-bash-4.2.46-19.el7-x86_64', 'var/lib/yum/yumdb/d/.wh.dfdb080818dfd2e02836534dd7c19e821e2b1e8c-device-mapper-1.02.107-5.el7_2.4-x86_64']
2016-07-21 08:33:41,821 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.conflicts' marker file...
2016-07-21 08:33:41,822 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/l/.wh.c07ac9e951c85c7131a4c486a02a6300b70c3d6a-libxml2-2.9.1-6.el7_2.2-x86_64' marker file...
2016-07-21 08:33:41,823 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/c/.wh.0d55537940fff88ff97b377bf78c1cb52d99374b-coreutils-8.22-15.el7_2.1-x86_64' marker file...
2016-07-21 08:33:41,824 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.pkgtups-checksums' marker file...
2016-07-21 08:33:41,825 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.file-requires' marker file...
2016-07-21 08:33:41,826 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/t/.wh.a39e4ca78e7a994a840cbd96cff6584d1ed31118-tzdata-2016d-1.el7-noarch' marker file...
2016-07-21 08:33:41,827 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/r/.wh.b96bc12559ab41298e99719fd6a0564fbf646f69-redhat-release-server-7.2-9.el7-x86_64' marker file...
2016-07-21 08:33:41,828 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.obsoletes' marker file...
2016-07-21 08:33:41,829 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/s/.wh.42fe7434d9c2e306eacd4dc4bd7713efb431595e-sed-4.2.2-5.el7-x86_64' marker file...
2016-07-21 08:33:41,830 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/l/.wh.c66656ef67456a38385b0177a27456a780cb21eb-libxml2-python-2.9.1-6.el7_2.2-x86_64' marker file...
2016-07-21 08:33:41,831 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/rpmdb-indexes/.wh.version' marker file...
2016-07-21 08:33:41,832 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/d/.wh.f3565c38beb21b718dce196260cb748e64665dd7-device-mapper-libs-1.02.107-5.el7_2.4-x86_64' marker file...
2016-07-21 08:33:41,834 - atomic_reactor.plugins.squash - DEBUG - Skipping 'etc/yum.repos.d/.wh.rhgs-docker.repo' marker file...
2016-07-21 08:33:41,835 - atomic_reactor.plugins.squash - DEBUG - Skipping 'usr/lib/systemd/system/.wh.getty.target' marker file...
2016-07-21 08:33:41,836 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/b/.wh.af68a0f05a10ae1013a9d128a322d95bd2d2253c-bash-4.2.46-19.el7-x86_64' marker file...
2016-07-21 08:33:41,838 - atomic_reactor.plugins.squash - DEBUG - Skipping 'var/lib/yum/yumdb/d/.wh.dfdb080818dfd2e02836534dd7c19e821e2b1e8c-device-mapper-1.02.107-5.el7_2.4-x86_64' marker file...
2016-07-21 08:33:41,839 - atomic_reactor.plugins.squash - INFO - Squashing finished!
2016-07-21 08:33:45,324 - atomic_reactor.plugins.squash - DEBUG - Moving unmodified layer '94f79d4a6ca60db40b3095e4dd367211092685793d2c6f113b86134ade4fb2ab'...
2016-07-21 08:33:45,326 - atomic_reactor.plugins.squash - INFO - New squashed image ID is 4b406ac86d1ef8b15580141883c49901f20909347446095719eea85e81094cdd
2016-07-21 08:33:45,329 - atomic_reactor.plugins.squash - DEBUG - Generating tar archive for the squashed image...
2016-07-21 08:33:46,195 - atomic_reactor.plugins.squash - DEBUG - Archive generated
2016-07-21 08:33:46,195 - atomic_reactor.plugins.squash - INFO - Image available at '/tmp/tmp5cg_J_/image.tar'
2016-07-21 08:33:46,196 - atomic_reactor.plugins.squash - DEBUG - Generating tar archive for the squashed image...
2016-07-21 08:33:48,727 - atomic_reactor.plugins.squash - DEBUG - Archive generated
2016-07-21 08:33:48,728 - atomic_reactor.plugins.squash - DEBUG - Loading squashed image...
2016-07-21 08:34:09,220 - atomic_reactor.plugins.squash - DEBUG - Image loaded!
2016-07-21 08:34:09,342 - atomic_reactor.plugins.squash - INFO - Image registered in Docker daemon as vrutkovs/buildroot:none-20160721141845
2016-07-21 08:34:09,342 - atomic_reactor.plugins.squash - DEBUG - Cleaning up /tmp/docker-squash-rXDNxo temporary directory
2016-07-21 08:34:09,546 - atomic_reactor.plugins.squash - INFO - Done
``` | goldmann/docker-squash | diff --git a/tests/test_unit_v1_image.py b/tests/test_unit_v1_image.py
index f742365..6316399 100644
--- a/tests/test_unit_v1_image.py
+++ b/tests/test_unit_v1_image.py
@@ -274,6 +274,31 @@ class TestAddMarkers(unittest.TestCase):
self.assertTrue(len(tar.addfile.mock_calls) == 0)
+ # https://github.com/goldmann/docker-squash/issues/108
+ def test_should_add_marker_file_when_tar_has_prefixed_entries(self):
+ tar = mock.Mock()
+ # Files already in tar
+ tar.getnames.return_value = ['./abc', './def']
+
+ marker_1 = mock.Mock()
+ type(marker_1).name = mock.PropertyMock(return_value='.wh.some/file')
+ marker_2 = mock.Mock()
+ type(marker_2).name = mock.PropertyMock(return_value='.wh.file2')
+
+ markers = {marker_1: 'filecontent1', marker_2: 'filecontent2'}
+
+ # List of layers to move (and files in these layers)
+ self.squash._add_markers(markers, tar, {'1234layerdid': ['./some/file', './other/file', './stuff']})
+
+ self.assertEqual(len(tar.addfile.mock_calls), 1)
+ tar_info, marker_file = tar.addfile.call_args[0]
+ self.assertIsInstance(tar_info, tarfile.TarInfo)
+ # We need to add the marker file because we need to
+ # override the already existing file
+ self.assertEqual(marker_file, 'filecontent1')
+ self.assertTrue(tar_info.isfile())
+
+
class TestGeneral(unittest.TestCase):
def setUp(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_git_commit_hash"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docker-py==1.10.6
docker-pycreds==0.4.0
-e git+https://github.com/goldmann/docker-squash.git@afd81aefb5230ef8f001fec544e8649984427444#egg=docker_squash
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
websocket-client==1.8.0
| name: docker-squash
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docker-py==1.10.6
- docker-pycreds==0.4.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- websocket-client==1.8.0
prefix: /opt/conda/envs/docker-squash
| [
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_add_marker_file_when_tar_has_prefixed_entries"
]
| []
| [
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_not_skip_file_not_in_path_to_skip",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_not_skip_the_file_that_name_is_similar_to_skipped_path",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_skip_exact_files",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_skip_files_in_subdirectory",
"tests/test_unit_v1_image.py::TestParseImageName::test_should_parse_name_name_with_proper_tag",
"tests/test_unit_v1_image.py::TestParseImageName::test_should_parse_name_name_without_tag",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_create_tmp_directory_if_not_provided",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_should_raise_if_directory_already_exists",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_should_use_provided_tmp_dir",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_generate_list_of_layers",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_not_fail_with_empty_list_of_layers",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_return_all_layers_if_from_layer_is_not_found",
"tests/test_unit_v1_image.py::TestGenerateV1ImageId::test_should_generate_id",
"tests/test_unit_v1_image.py::TestGenerateV1ImageId::test_should_generate_id_that_is_not_integer_shen_shortened",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_generate_json",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_handle_empty_image_id",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_should_not_generate_repositories_if_name_and_tag_is_missing",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_find_all_marker_files",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_return_empty_dict_when_no_files_are_in_the_tar",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_return_empty_dict_when_no_marker_files_are_found",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_add_all_marker_files_to_empty_tar",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_not_add_any_marker_files",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_not_fail_with_empty_list_of_markers_to_add",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_skip_a_marker_file_if_file_is_in_unsquashed_layers",
"tests/test_unit_v1_image.py::TestGeneral::test_exit_if_no_output_path_provided_and_loading_is_disabled_too",
"tests/test_unit_v1_image.py::TestGeneral::test_handle_case_when_no_image_is_provided"
]
| []
| MIT License | 662 | [
"docker_squash/image.py"
]
| [
"docker_squash/image.py"
]
|
|
zalando-stups__senza-280 | 46c3172d27a4e02375f71a3aee408e73c668b5e0 | 2016-07-26 14:24:47 | 35b73f49b8cb58e7892908413bdf2a61cfe3058e | diff --git a/senza/manaus/acm.py b/senza/manaus/acm.py
index 0c16faa..ad918e8 100644
--- a/senza/manaus/acm.py
+++ b/senza/manaus/acm.py
@@ -80,16 +80,16 @@ class ACMCertificate:
arn = certificate['CertificateArn']
subject_alternative_name = certificate['SubjectAlternativeNames']
domain_validation_options = certificate['DomainValidationOptions']
- serial = certificate['Serial']
subject = certificate['Subject']
- issuer = certificate['Issuer']
created_at = certificate['CreatedAt']
- issued_at = certificate['IssuedAt']
status = certificate['Status']
- not_before = certificate['NotBefore']
- not_after = certificate['NotAfter']
signature_algorithm = certificate['SignatureAlgorithm']
in_use_by = certificate['InUseBy']
+ serial = certificate.get('Serial')
+ issuer = certificate.get('Issuer')
+ issued_at = certificate.get('IssuedAt')
+ not_before = certificate.get('NotBefore')
+ not_after = certificate.get('NotAfter')
revoked_at = certificate.get('RevokedAt')
revocation_reason = certificate.get('RevocationReason')
| ACM Cert lookup fails with KeyError
```
Generating Cloud Formation template.. EXCEPTION OCCURRED: 'Serial'
Unknown Error: 'Serial'.
Please create an issue with the content of /tmp/senza-traceback-078eseqg
$ cat /tmp/senza-traceback-078eseqg
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/senza/error_handling.py", line 69, in __call__
self.function(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.5/dist-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 555, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 638, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/local/lib/python3.5/dist-packages/senza/cli.py", line 239, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/dist-packages/senza/components/weighted_dns_elastic_load_balancer.py", line 29, in component_weighted_dns_elastic_load_balancer
return component_elastic_load_balancer(definition, configuration, args, info, force, account_info)
File "/usr/local/lib/python3.5/dist-packages/senza/components/elastic_load_balancer.py", line 110, in component_elastic_load_balancer
listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration)
File "/usr/local/lib/python3.5/dist-packages/senza/components/elastic_load_balancer.py", line 48, in get_listeners
reverse=True)
File "/usr/local/lib/python3.5/dist-packages/senza/manaus/acm.py", line 173, in get_certificates
certificate = ACMCertificate.get_by_arn(arn)
File "/usr/local/lib/python3.5/dist-packages/senza/manaus/acm.py", line 110, in get_by_arn
return cls.from_boto_dict(certificate)
File "/usr/local/lib/python3.5/dist-packages/senza/manaus/acm.py", line 83, in from_boto_dict
serial = certificate['Serial']
KeyError: 'Serial'
```
The cert has status "'VALIDATION_TIMED_OUT" in the error case. | zalando-stups/senza | diff --git a/tests/test_manaus/test_acm.py b/tests/test_manaus/test_acm.py
index 13691ed..51e12d4 100644
--- a/tests/test_manaus/test_acm.py
+++ b/tests/test_manaus/test_acm.py
@@ -85,6 +85,24 @@ CERT2 = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert2',
'*.senza.aws.example.net',
'*.app.example.net']}
+CERT_VALIDATION_TIMED_OUT = {
+ 'KeyAlgorithm': 'RSA-2048',
+ 'DomainName': 'alpha.example.org',
+ 'InUseBy': [],
+ 'CreatedAt': datetime(2016, 7, 11, 15, 15, 30),
+ 'SubjectAlternativeNames': ['alpha.example.org'],
+ 'SignatureAlgorithm': 'SHA256WITHRSA',
+ 'Status': 'VALIDATION_TIMED_OUT',
+ 'DomainValidationOptions': [{'DomainName': 'alpha.example.org',
+ 'ValidationEmails': ['[email protected]',
+ '[email protected]',
+ '[email protected]',
+ '[email protected]',
+ '[email protected]'],
+ 'ValidationDomain': 'alpha.example.org'}],
+ 'CertificateArn': 'arn:aws:acm:eu-central-1:123123:certificate/f8a0fa1a-381b-44b6-ab10-1b94ba1480a1',
+ 'Subject': 'CN=alpha.example.org'}
+
def test_certificate_valid():
certificate1 = ACMCertificate.from_boto_dict(CERT1)
@@ -108,6 +126,9 @@ def test_certificate_valid():
assert not certificate1_revoked.is_valid(when=datetime(2013, 4, 2, 10, 11, 12,
tzinfo=timezone.utc))
+ cert_invalid = ACMCertificate.from_boto_dict(CERT_VALIDATION_TIMED_OUT)
+ assert not cert_invalid.is_valid()
+
def test_certificate_comparison():
cert2 = CERT1.copy()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@46c3172d27a4e02375f71a3aee408e73c668b5e0#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_acm.py::test_certificate_valid"
]
| []
| [
"tests/test_manaus/test_acm.py::test_certificate_comparison",
"tests/test_manaus/test_acm.py::test_certificate_get_by_arn",
"tests/test_manaus/test_acm.py::test_certificate_matches",
"tests/test_manaus/test_acm.py::test_get_certificates",
"tests/test_manaus/test_acm.py::test_arn_is_acm_certificate"
]
| []
| Apache License 2.0 | 663 | [
"senza/manaus/acm.py"
]
| [
"senza/manaus/acm.py"
]
|
|
tornadoweb__tornado-1782 | 4c95212cadfe8de91cc3f1d2e7644b8255c0d6b1 | 2016-07-26 18:46:20 | ecd8968c5135b810cd607b5902dda2cd32122b39 | AaronOpfer: Looks like my `__del__` method has some problems, I'll fix them.
AaronOpfer: It looks like implementing the `__del__` method broke various unit tests which expected no consequences of letting half-finished coroutines get garbage collected. These test cases pass but log errors about uncaught future exceptions, failing the overall test. I fixed one instance of this in `queues_test.py` locally but there seems to be many more instances where this can occur.
I can continue to clean up instances like these but first I'd like a Tornado developer to tell me if I'm on the right path with this or if I should just leave the `__del__` change out and quit while I'm ahead.
bdarnell: > It is difficult to imagine a situation where Runners being garbage collected before the future has a result being a desired behavior, though.
It doesn't make sense for the `Runner` to be collected *before* the `Future`, but they could be collected at the same time. I think that's what's happening in the tests. I think this is legitimate and we want to allow it. Besides, after the first change to use the `WeakKeyDictionary`, what other scenarios would this destructor be guarding against? I'd just get rid of it.
I think I'd make the `WeakKeyDictionary` a global. I'm not seeing any benefit to giving each coroutine function its own dict (which could possibly go out of scope if the decorated function does.
I'm concerned about the performance impact of a `WeakKeyDictionary` - will it increase GC overhead? I'd like to see some benchmarks showing no significant impact before merging this.
AaronOpfer: I agree with your points. I'll remove the `__del__` method and make the `WeakKeyDictionary` a global.
I agree that we should test the performance. Does Tornado have any existing performance benchmarking suites we can use for testing performance after my changes?
bdarnell: We don't have great benchmarks. What we do have is in `demos/benchmark/benchmark.py` (and a few other scripts, but benchmark.py is the main one). It should be enough to show the impact of this change. Run it with `--keepalive` since that removes some of the networking overhead.
demos/benchmark/benchmark.py --keepalive --quiet --num_runs=5|grep "Requests per second"
AaronOpfer: I ran the performance benchmarks and I saw no differences between my implementation and the previous implementation. The tests were highly variable, I ran each multiple times and they would vary between 1100 req/sec and 1600 req/sec.
AaronOpfer: I believe this changeset addresses all of your concerns @bdarnell , let me know if you'd like to see any other adjustments. | diff --git a/.travis.yml b/.travis.yml
index 7d6b79a5..16f61830 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -30,13 +30,9 @@ install:
- curl-config --version; pip freeze
script:
- # Run the tests once from the source directory to detect issues
- # involving relative __file__ paths; see
- # https://github.com/tornadoweb/tornado/issues/1780
- - unset TORNADO_EXTENSION && python -m tornado.test
- # For all other test variants, get out of the source directory before
- # running tests to ensure that we get the installed speedups module
- # instead of the source directory which doesn't have it.
+ # Get out of the source directory before running tests to avoid PYTHONPATH
+ # confusion. This is necessary to ensure that the speedups module can
+ # be found in the installation directory.
- cd maint
# Copy the coveragerc down so coverage.py can find it.
- cp ../.coveragerc .
diff --git a/demos/chat/static/chat.js b/demos/chat/static/chat.js
index 151a5880..0054c710 100644
--- a/demos/chat/static/chat.js
+++ b/demos/chat/static/chat.js
@@ -16,16 +16,15 @@ $(document).ready(function() {
if (!window.console) window.console = {};
if (!window.console.log) window.console.log = function() {};
- $("#messageform").on("submit", function() {
+ $("#messageform").live("submit", function() {
newMessage($(this));
return false;
});
- $("#messageform").on("keypress", function(e) {
+ $("#messageform").live("keypress", function(e) {
if (e.keyCode == 13) {
newMessage($(this));
return false;
}
- return true;
});
$("#message").select();
updater.poll();
@@ -57,13 +56,13 @@ jQuery.postJSON = function(url, args, callback) {
success: function(response) {
if (callback) callback(eval("(" + response + ")"));
}, error: function(response) {
- console.log("ERROR:", response);
+ console.log("ERROR:", response)
}});
};
jQuery.fn.formToDict = function() {
var fields = this.serializeArray();
- var json = {};
+ var json = {}
for (var i = 0; i < fields.length; i++) {
json[fields[i].name] = fields[i].value;
}
diff --git a/demos/chat/templates/index.html b/demos/chat/templates/index.html
index 58433b44..8916c350 100644
--- a/demos/chat/templates/index.html
+++ b/demos/chat/templates/index.html
@@ -16,7 +16,7 @@
<form action="/a/message/new" method="post" id="messageform">
<table>
<tr>
- <td><input type="text" name="body" id="message" style="width:500px"></td>
+ <td><input name="body" id="message" style="width:500px"></td>
<td style="padding-left:5px">
<input type="submit" value="{{ _("Post") }}">
<input type="hidden" name="next" value="{{ request.path }}">
@@ -27,7 +27,7 @@
</form>
</div>
</div>
- <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js" type="text/javascript"></script>
+ <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.3/jquery.min.js" type="text/javascript"></script>
<script src="{{ static_url("chat.js") }}" type="text/javascript"></script>
</body>
</html>
diff --git a/demos/tcpecho/README.md b/demos/tcpecho/README.md
deleted file mode 100644
index 60d0b70c..00000000
--- a/demos/tcpecho/README.md
+++ /dev/null
@@ -1,30 +0,0 @@
-TCP echo demo
-=============
-
-This demo shows how to use Tornado's asynchronous TCP client and
-server by implementing `handle_stream` as a coroutine.
-
-To run the server:
-
-```
-$ python server.py
-```
-
-The client will send the message given with the `--message` option
-(which defaults to "ping"), wait for a response, then quit. To run:
-
-```
-$ python client.py --message="your message here"
-```
-
-Alternatively, you can interactively send messages to the echo server
-with a telnet client. For example:
-
-```
-$ telnet localhost 9888
-Trying ::1...
-Connected to localhost.
-Escape character is '^]'.
-ping
-ping
-```
diff --git a/demos/tcpecho/client.py b/demos/tcpecho/client.py
deleted file mode 100644
index a369fa47..00000000
--- a/demos/tcpecho/client.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from __future__ import print_function
-from tornado.ioloop import IOLoop
-from tornado import gen
-from tornado.tcpclient import TCPClient
-from tornado.options import options, define
-
-define("host", default="localhost", help="TCP server host")
-define("port", default=9888, help="TCP port to connect to")
-define("message", default="ping", help="Message to send")
-
-
[email protected]
-def send_message():
- stream = yield TCPClient().connect(options.host, options.port)
- yield stream.write((options.message + "\n").encode())
- print("Sent to server:", options.message)
- reply = yield stream.read_until(b"\n")
- print("Response from server:", reply.decode().strip())
-
-
-if __name__ == "__main__":
- options.parse_command_line()
- IOLoop.current().run_sync(send_message)
diff --git a/demos/tcpecho/server.py b/demos/tcpecho/server.py
deleted file mode 100644
index bc0b054a..00000000
--- a/demos/tcpecho/server.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import logging
-from tornado.ioloop import IOLoop
-from tornado import gen
-from tornado.iostream import StreamClosedError
-from tornado.tcpserver import TCPServer
-from tornado.options import options, define
-
-define("port", default=9888, help="TCP port to listen on")
-logger = logging.getLogger(__name__)
-
-
-class EchoServer(TCPServer):
- @gen.coroutine
- def handle_stream(self, stream, address):
- while True:
- try:
- data = yield stream.read_until(b"\n")
- logger.info("Received bytes: %s", data)
- if not data.endswith(b"\n"):
- data = data + b"\n"
- yield stream.write(data)
- except StreamClosedError:
- logger.warning("Lost client at host %s", address[0])
- break
- except Exception as e:
- print(e)
-
-
-if __name__ == "__main__":
- options.parse_command_line()
- server = EchoServer()
- server.listen(options.port)
- logger.info("Listening on TCP port %d", options.port)
- IOLoop.current().start()
diff --git a/demos/websocket/static/chat.js b/demos/websocket/static/chat.js
index 4e7ec049..b4bb18a9 100644
--- a/demos/websocket/static/chat.js
+++ b/demos/websocket/static/chat.js
@@ -16,11 +16,11 @@ $(document).ready(function() {
if (!window.console) window.console = {};
if (!window.console.log) window.console.log = function() {};
- $("#messageform").on("submit", function() {
+ $("#messageform").live("submit", function() {
newMessage($(this));
return false;
});
- $("#messageform").on("keypress", function(e) {
+ $("#messageform").live("keypress", function(e) {
if (e.keyCode == 13) {
newMessage($(this));
return false;
diff --git a/demos/websocket/templates/index.html b/demos/websocket/templates/index.html
index 91a45363..cbf2b23d 100644
--- a/demos/websocket/templates/index.html
+++ b/demos/websocket/templates/index.html
@@ -27,7 +27,7 @@
</form>
</div>
</div>
- <script src="http://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js" type="text/javascript"></script>
+ <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.3/jquery.min.js" type="text/javascript"></script>
<script src="{{ static_url("chat.js") }}" type="text/javascript"></script>
</body>
</html>
diff --git a/docs/releases/v4.1.0.rst b/docs/releases/v4.1.0.rst
index 29ad1914..c38511b8 100644
--- a/docs/releases/v4.1.0.rst
+++ b/docs/releases/v4.1.0.rst
@@ -24,7 +24,7 @@ Backwards-compatibility notes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* `.HTTPServer` now calls ``start_request`` with the correct
- arguments. This change is backwards-incompatible, affecting any
+ arguments. This change is backwards-incompatible, afffecting any
application which implemented `.HTTPServerConnectionDelegate` by
following the example of `.Application` instead of the documented
method signatures.
diff --git a/tornado/gen.py b/tornado/gen.py
index b308ca7d..73f9ba10 100644
--- a/tornado/gen.py
+++ b/tornado/gen.py
@@ -83,6 +83,7 @@ import os
import sys
import textwrap
import types
+import weakref
from tornado.concurrent import Future, TracebackFuture, is_future, chain_future
from tornado.ioloop import IOLoop
@@ -244,6 +245,24 @@ def coroutine(func, replace_callback=True):
"""
return _make_coroutine_wrapper(func, replace_callback=True)
+# Ties lifetime of runners to their result futures. Github Issue #1769
+# Generators, like any object in Python, must be strong referenced
+# in order to not be cleaned up by the garbage collector. When using
+# coroutines, the Runner object is what strong-refs the inner
+# generator. However, the only item that strong-reffed the Runner
+# was the last Future that the inner generator yielded (via the
+# Future's internal done_callback list). Usually this is enough, but
+# it is also possible for this Future to not have any strong references
+# other than other objects referenced by the Runner object (usually
+# when using other callback patterns and/or weakrefs). In this
+# situation, if a garbage collection ran, a cycle would be detected and
+# Runner objects could be destroyed along with their inner generators
+# and everything in their local scope.
+# This map provides strong references to Runner objects as long as
+# their result future objects also have strong references (typically
+# from the parent coroutine's Runner). This keeps the coroutine's
+# Runner alive.
+_futures_to_runners = weakref.WeakKeyDictionary()
def _make_coroutine_wrapper(func, replace_callback):
"""The inner workings of ``@gen.coroutine`` and ``@gen.engine``.
@@ -294,7 +313,7 @@ def _make_coroutine_wrapper(func, replace_callback):
except Exception:
future.set_exc_info(sys.exc_info())
else:
- Runner(result, future, yielded)
+ _futures_to_runners[future] = Runner(result, future, yielded)
try:
return future
finally:
diff --git a/tornado/platform/asyncio.py b/tornado/platform/asyncio.py
index 9556da61..3fd67dbd 100644
--- a/tornado/platform/asyncio.py
+++ b/tornado/platform/asyncio.py
@@ -14,9 +14,9 @@ loops.
.. note::
- Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of
- methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on
- Windows. Use the `~asyncio.SelectorEventLoop` instead.
+ Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods,
+ so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows.
+ Use the `~asyncio.SelectorEventLoop` instead.
"""
from __future__ import absolute_import, division, print_function, with_statement
diff --git a/tornado/tcpserver.py b/tornado/tcpserver.py
index 54837f7a..0839d392 100644
--- a/tornado/tcpserver.py
+++ b/tornado/tcpserver.py
@@ -39,21 +39,7 @@ class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
To use `TCPServer`, define a subclass which overrides the `handle_stream`
- method. For example, a simple echo server could be defined like this::
-
- from tornado.tcpserver import TCPServer
- from tornado.iostream import StreamClosedError
- from tornado import gen
-
- class EchoServer(TCPServer):
- @gen.coroutine
- def handle_stream(self, stream, address):
- while True:
- try:
- data = yield stream.read_until(b"\n")
- yield stream.write(data)
- except StreamClosedError:
- break
+ method.
To make this server serve SSL traffic, send the ``ssl_options`` keyword
argument with an `ssl.SSLContext` object. For compatibility with older
diff --git a/tornado/web.py b/tornado/web.py
index 96b204eb..f54c4d03 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -527,7 +527,7 @@ class RequestHandler(object):
Additional keyword arguments are set on the Cookie.Morsel
directly.
- See https://docs.python.org/2/library/cookie.html#Cookie.Morsel
+ See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
| Runner objects can get GCed unexpectedly
Hi,
This is probably a little unusual and I am not sure I understand the whole object graph, but I believe I have reduced my production issue to this test case.
If a coroutine is waiting on a future, and the only hard references to that future are entirely inside the body of the inner generator of the coroutine, a garbage collection run can sweep away the in-progress coroutine and its runner object, leave the coroutine's future unresolved forever, and in some cases leave the application in a stuck state.
Ran on Python 3.4 with Tornado 4.3.
```python
from tornado.ioloop import IOLoop
from tornado.gen import Future, coroutine
import weakref
import gc
wr_fut = None
def count_runners():
runners = len([
x for x in gc.get_objects()
if x.__class__.__name__ == "Runner"
])
print("There are",runners,"Runners")
def resolve_fut():
count_runners()
gc.collect(2)
count_runners()
fut = wr_fut()
if not fut:
print('failure! The app is going to hang forever now')
else:
fut.set_result(1)
@coroutine
def main():
global wr_fut
my_fut = Future()
wr_fut = weakref.ref(my_fut)
IOLoop.current().add_callback(resolve_fut)
yield my_fut
print('success!')
IOLoop.current().run_sync(main)
```
This program hangs forever despite using run_sync. That's because the future returned by the coroutine in main() still exists, it just no longer has a Runner object to actually make the future do anything useful. This is _very_ surprising behavior. I would expect the generator/coroutine/Runner/frame objects to be alive for as long as {{run_sync}} is waiting.
Also, note that explicit call to ```gc.collect(2)``` ; this bug actually requires the garbage collector to run for the bug to trigger, which made this bug *very* difficult to track down.
I have managed hack tornado into making it so that the Runner cannot die while its Future is alive which causes my test case to work:
```python
# gen.py line 1241 tornado 4.3
except (StopIteration, Return) as e:
future.set_result(_value_from_stopiteration(e))
except Exception:
future.set_exc_info(sys.exc_info())
else:
# this is the original code below:
#Runner(result, future, yielded)
# this hack makes the Runner not die and my test case work
future._the_runner = Runner(result, future, yielded)
try:
return future
```
So the real-world scenario: I have a proprietary networking library that is using weakrefs for event callbacks and a wrapper library that converts these events into futures. Because I have gone out of my way to avoid creating garbage collection cycles (so that my network client can be GCed when it is no longer in use, unlike a lot of async clients available to tornado), it appears I have discovered this issue.
What is the most appropriate bugfix for this? I personally feel that Runner objects should have their lifecycle tied directly to the future they're driving so that Runners cannot die while their future is alive. However, an argument could also be made that Runner should have a `__del__` method that marks the future as canceled (or `.set_exception(RuntimeError("GCed"))`) instead so that developers can more clearly see that they hit this failure mode.
Please don't tell me the answer is "don't use weakrefs"! I believe it may be possible to create a test case that presents the same problem without using weakrefs but it would have to abuse certain behaviors of sockets, filenos and IOLoop's fd handlers to more closely emulate the actual workings of my network library. | tornadoweb/tornado | diff --git a/tornado/test/gen_test.py b/tornado/test/gen_test.py
index 4c873f4b..bfaba567 100644
--- a/tornado/test/gen_test.py
+++ b/tornado/test/gen_test.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import, division, print_function, with_statement
+import gc
import contextlib
import datetime
import functools
@@ -25,7 +26,6 @@ try:
except ImportError:
futures = None
-
class GenEngineTest(AsyncTestCase):
def setUp(self):
super(GenEngineTest, self).setUp()
@@ -1368,5 +1368,28 @@ class WaitIteratorTest(AsyncTestCase):
gen.WaitIterator(gen.sleep(0)).next())
+class RunnerGCTest(AsyncTestCase):
+ """Github issue 1769: Runner objects can get GCed unexpectedly"""
+ @gen_test
+ def test_gc(self):
+ """Runners shouldn't GC if future is alive"""
+ # Create the weakref
+ weakref_scope = [None]
+ def callback():
+ gc.collect(2)
+ weakref_scope[0]().set_result(123)
+
+ @gen.coroutine
+ def tester():
+ fut = Future()
+ weakref_scope[0] = weakref.ref(fut)
+ self.io_loop.add_callback(callback)
+ yield fut
+
+ yield gen.with_timeout(
+ datetime.timedelta(seconds=0.2),
+ tester()
+ )
+
if __name__ == '__main__':
unittest.main()
diff --git a/tornado/test/options_test.py b/tornado/test/options_test.py
index c050cb64..f7b215c5 100644
--- a/tornado/test/options_test.py
+++ b/tornado/test/options_test.py
@@ -36,7 +36,7 @@ class OptionsTest(unittest.TestCase):
options.define("port", default=80)
options.define("username", default='foo')
options.define("my_path")
- config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ config_path = os.path.join(os.path.dirname(__file__),
"options_test.cfg")
options.parse_config_file(config_path)
self.assertEqual(options.port, 443)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 10
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
futures==2.2.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@4c95212cadfe8de91cc3f1d2e7644b8255c0d6b1#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- futures==2.2.0
- mock==5.2.0
- monotonic==1.6
- six==1.17.0
- trollius==2.1.post2
prefix: /opt/conda/envs/tornado
| [
"tornado/test/gen_test.py::RunnerGCTest::test_gc"
]
| []
| [
"tornado/test/gen_test.py::GenEngineTest::test_arguments",
"tornado/test/gen_test.py::GenEngineTest::test_async_raise_return",
"tornado/test/gen_test.py::GenEngineTest::test_async_raise_return_value",
"tornado/test/gen_test.py::GenEngineTest::test_async_raise_return_value_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_bogus_yield",
"tornado/test/gen_test.py::GenEngineTest::test_bogus_yield_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_exception_in_task_phase1",
"tornado/test/gen_test.py::GenEngineTest::test_exception_in_task_phase2",
"tornado/test/gen_test.py::GenEngineTest::test_exception_in_yield",
"tornado/test/gen_test.py::GenEngineTest::test_exception_phase1",
"tornado/test/gen_test.py::GenEngineTest::test_exception_phase2",
"tornado/test/gen_test.py::GenEngineTest::test_future",
"tornado/test/gen_test.py::GenEngineTest::test_inline_cb",
"tornado/test/gen_test.py::GenEngineTest::test_ioloop_cb",
"tornado/test/gen_test.py::GenEngineTest::test_key_mismatch",
"tornado/test/gen_test.py::GenEngineTest::test_key_mismatch_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_key_reuse",
"tornado/test/gen_test.py::GenEngineTest::test_key_reuse_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_leaked_callback",
"tornado/test/gen_test.py::GenEngineTest::test_leaked_callback_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_multi",
"tornado/test/gen_test.py::GenEngineTest::test_multi_dict",
"tornado/test/gen_test.py::GenEngineTest::test_multi_dict_future",
"tornado/test/gen_test.py::GenEngineTest::test_multi_empty",
"tornado/test/gen_test.py::GenEngineTest::test_multi_exceptions",
"tornado/test/gen_test.py::GenEngineTest::test_multi_future",
"tornado/test/gen_test.py::GenEngineTest::test_multi_future_delayed",
"tornado/test/gen_test.py::GenEngineTest::test_multi_future_dict_delayed",
"tornado/test/gen_test.py::GenEngineTest::test_multi_future_duplicate",
"tornado/test/gen_test.py::GenEngineTest::test_multi_future_exceptions",
"tornado/test/gen_test.py::GenEngineTest::test_multi_mixed_types",
"tornado/test/gen_test.py::GenEngineTest::test_multi_performance",
"tornado/test/gen_test.py::GenEngineTest::test_multi_yieldpoint_delayed",
"tornado/test/gen_test.py::GenEngineTest::test_multi_yieldpoint_dict_delayed",
"tornado/test/gen_test.py::GenEngineTest::test_no_yield",
"tornado/test/gen_test.py::GenEngineTest::test_orphaned_callback",
"tornado/test/gen_test.py::GenEngineTest::test_parallel_callback",
"tornado/test/gen_test.py::GenEngineTest::test_raise_after_stop",
"tornado/test/gen_test.py::GenEngineTest::test_resume_after_exception_in_yield",
"tornado/test/gen_test.py::GenEngineTest::test_return_value",
"tornado/test/gen_test.py::GenEngineTest::test_return_value_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_reuse",
"tornado/test/gen_test.py::GenEngineTest::test_stack_context_leak",
"tornado/test/gen_test.py::GenEngineTest::test_stack_context_leak_exception",
"tornado/test/gen_test.py::GenEngineTest::test_sync_raise_return",
"tornado/test/gen_test.py::GenEngineTest::test_sync_raise_return_value",
"tornado/test/gen_test.py::GenEngineTest::test_sync_raise_return_value_tuple",
"tornado/test/gen_test.py::GenEngineTest::test_task",
"tornado/test/gen_test.py::GenEngineTest::test_task_refcounting",
"tornado/test/gen_test.py::GenEngineTest::test_task_transfer_stack_context",
"tornado/test/gen_test.py::GenEngineTest::test_wait_all",
"tornado/test/gen_test.py::GenEngineTest::test_wait_transfer_stack_context",
"tornado/test/gen_test.py::GenEngineTest::test_with_arg",
"tornado/test/gen_test.py::GenEngineTest::test_with_arg_tuple",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_await",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_await_mixed_multi_native_future",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_await_mixed_multi_native_yieldpoint",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_early_return",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_gen_return",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_raise",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_return",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_return_no_value",
"tornado/test/gen_test.py::GenCoroutineTest::test_async_with_timeout",
"tornado/test/gen_test.py::GenCoroutineTest::test_moment",
"tornado/test/gen_test.py::GenCoroutineTest::test_pass_callback",
"tornado/test/gen_test.py::GenCoroutineTest::test_py3_leak_exception_context",
"tornado/test/gen_test.py::GenCoroutineTest::test_replace_context_exception",
"tornado/test/gen_test.py::GenCoroutineTest::test_replace_yieldpoint_exception",
"tornado/test/gen_test.py::GenCoroutineTest::test_sleep",
"tornado/test/gen_test.py::GenCoroutineTest::test_swallow_context_exception",
"tornado/test/gen_test.py::GenCoroutineTest::test_swallow_yieldpoint_exception",
"tornado/test/gen_test.py::GenCoroutineTest::test_sync_gen_return",
"tornado/test/gen_test.py::GenCoroutineTest::test_sync_raise",
"tornado/test/gen_test.py::GenCoroutineTest::test_sync_return",
"tornado/test/gen_test.py::GenCoroutineTest::test_sync_return_no_value",
"tornado/test/gen_test.py::GenWebTest::test_async_prepare_error_handler",
"tornado/test/gen_test.py::GenWebTest::test_coroutine_exception_handler",
"tornado/test/gen_test.py::GenWebTest::test_coroutine_sequence_handler",
"tornado/test/gen_test.py::GenWebTest::test_coroutine_unfinished_sequence_handler",
"tornado/test/gen_test.py::GenWebTest::test_exception_handler",
"tornado/test/gen_test.py::GenWebTest::test_native_coroutine_handler",
"tornado/test/gen_test.py::GenWebTest::test_sequence_handler",
"tornado/test/gen_test.py::GenWebTest::test_task_handler",
"tornado/test/gen_test.py::GenWebTest::test_undecorated_coroutines",
"tornado/test/gen_test.py::GenWebTest::test_yield_exception_handler",
"tornado/test/gen_test.py::WithTimeoutTest::test_already_resolved",
"tornado/test/gen_test.py::WithTimeoutTest::test_completed_concurrent_future",
"tornado/test/gen_test.py::WithTimeoutTest::test_completes_before_timeout",
"tornado/test/gen_test.py::WithTimeoutTest::test_fails_before_timeout",
"tornado/test/gen_test.py::WithTimeoutTest::test_timeout",
"tornado/test/gen_test.py::WithTimeoutTest::test_timeout_concurrent_future",
"tornado/test/gen_test.py::WaitIteratorTest::test_already_done",
"tornado/test/gen_test.py::WaitIteratorTest::test_empty_iterator",
"tornado/test/gen_test.py::WaitIteratorTest::test_iterator",
"tornado/test/gen_test.py::WaitIteratorTest::test_iterator_async_await",
"tornado/test/gen_test.py::WaitIteratorTest::test_no_ref",
"tornado/test/options_test.py::OptionsTest::test_as_dict",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_cli",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_file",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_introspection",
"tornado/test/options_test.py::OptionsTest::test_error_redefine",
"tornado/test/options_test.py::OptionsTest::test_getitem",
"tornado/test/options_test.py::OptionsTest::test_group_dict",
"tornado/test/options_test.py::OptionsTest::test_help",
"tornado/test/options_test.py::OptionsTest::test_items",
"tornado/test/options_test.py::OptionsTest::test_iter",
"tornado/test/options_test.py::OptionsTest::test_mock_patch",
"tornado/test/options_test.py::OptionsTest::test_multiple_int",
"tornado/test/options_test.py::OptionsTest::test_multiple_string",
"tornado/test/options_test.py::OptionsTest::test_parse_callbacks",
"tornado/test/options_test.py::OptionsTest::test_parse_command_line",
"tornado/test/options_test.py::OptionsTest::test_parse_config_file",
"tornado/test/options_test.py::OptionsTest::test_setattr",
"tornado/test/options_test.py::OptionsTest::test_setattr_type_check",
"tornado/test/options_test.py::OptionsTest::test_setattr_with_callback",
"tornado/test/options_test.py::OptionsTest::test_setitem",
"tornado/test/options_test.py::OptionsTest::test_subcommand",
"tornado/test/options_test.py::OptionsTest::test_types"
]
| []
| Apache License 2.0 | 664 | [
"demos/websocket/static/chat.js",
"tornado/web.py",
"tornado/gen.py",
"tornado/platform/asyncio.py",
"demos/chat/static/chat.js",
"tornado/tcpserver.py",
"demos/tcpecho/README.md",
".travis.yml",
"demos/chat/templates/index.html",
"docs/releases/v4.1.0.rst",
"demos/tcpecho/server.py",
"demos/websocket/templates/index.html",
"demos/tcpecho/client.py"
]
| [
"demos/websocket/static/chat.js",
"tornado/web.py",
"tornado/gen.py",
"tornado/platform/asyncio.py",
"demos/chat/static/chat.js",
"tornado/tcpserver.py",
"demos/tcpecho/README.md",
".travis.yml",
"demos/chat/templates/index.html",
"docs/releases/v4.1.0.rst",
"demos/tcpecho/server.py",
"demos/websocket/templates/index.html",
"demos/tcpecho/client.py"
]
|
opentok__Opentok-Python-SDK-89 | 613e5368748ec31a18c4f47c26683bb3bcdbbcf1 | 2016-07-26 19:59:19 | 8116ca27ae7ae4e464d306ac79eaf9453c6b7c96 | diff --git a/opentok/opentok.py b/opentok/opentok.py
index d3f855f..61899fd 100644
--- a/opentok/opentok.py
+++ b/opentok/opentok.py
@@ -275,7 +275,7 @@ class OpenTok(object):
"""For internal use."""
return {
'User-Agent': 'OpenTok-Python-SDK/' + __version__ + ' ' + platform.python_version(),
- 'X-TB-OPENTOK-AUTH': self._create_jwt_auth_header()
+ 'X-OPENTOK-AUTH': self._create_jwt_auth_header()
}
def archive_headers(self):
@@ -451,7 +451,8 @@ class OpenTok(object):
payload = {
'ist': 'project',
'iss': self.api_key,
- 'exp': int(time.time()) + (60*5), # 5 minutes
+ 'iat': int(time.time()), # current time in unix time (seconds)
+ 'exp': int(time.time()) + (60*3), # 3 minutes in the future (seconds)
'jti': '{:f}'.format(random.random())
}
| add an "iat" claim to opentok-auth tokens
something we overlooked before, but we're now trying to add into the OpenTok-Auth JWTs: https://tools.ietf.org/html/rfc7519#section-4.1.6. | opentok/Opentok-Python-SDK | diff --git a/tests/test_archive.py b/tests/test_archive.py
index b4a5995..7307f62 100644
--- a/tests/test_archive.py
+++ b/tests/test_archive.py
@@ -57,7 +57,7 @@ class OpenTokArchiveTest(unittest.TestCase):
archive.stop()
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(archive).to.be.an(Archive)
@@ -102,7 +102,7 @@ class OpenTokArchiveTest(unittest.TestCase):
archive.delete()
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# TODO: test that the object is invalidated
diff --git a/tests/test_archive_api.py b/tests/test_archive_api.py
index f8f7ba9..e4aed1d 100644
--- a/tests/test_archive_api.py
+++ b/tests/test_archive_api.py
@@ -42,7 +42,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.start_archive(self.session_id)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# non-deterministic json encoding. have to decode to test it properly
@@ -93,7 +93,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.start_archive(self.session_id, name=u('ARCHIVE NAME'))
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# non-deterministic json encoding. have to decode to test it properly
@@ -142,7 +142,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.start_archive(self.session_id, name=u('ARCHIVE NAME'), has_video=False)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# non-deterministic json encoding. have to decode to test it properly
@@ -193,7 +193,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.start_archive(self.session_id, name=u('ARCHIVE NAME'), output_mode=OutputModes.individual)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# non-deterministic json encoding. have to decode to test it properly
@@ -245,7 +245,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.start_archive(self.session_id, name=u('ARCHIVE NAME'), output_mode=OutputModes.composed)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
# non-deterministic json encoding. have to decode to test it properly
@@ -298,7 +298,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.stop_archive(archive_id)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(archive).to.be.an(Archive)
@@ -325,7 +325,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
self.opentok.delete_archive(archive_id)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
@@ -354,7 +354,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive = self.opentok.get_archive(archive_id)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(archive).to.be.an(Archive)
@@ -469,7 +469,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive_list = self.opentok.get_archives()
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(archive_list).to.be.an(ArchiveList)
@@ -529,7 +529,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive_list = self.opentok.get_archives(offset=3)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(httpretty.last_request()).to.have.property("querystring").being.equal({
@@ -579,7 +579,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive_list = self.opentok.get_archives(count=2)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(httpretty.last_request()).to.have.property("querystring").being.equal({
@@ -655,7 +655,7 @@ class OpenTokArchiveApiTest(unittest.TestCase):
archive_list = self.opentok.get_archives(count=4, offset=2)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
expect(httpretty.last_request().headers[u('content-type')]).to.equal(u('application/json'))
expect(httpretty.last_request()).to.have.property("querystring").being.equal({
diff --git a/tests/test_session_creation.py b/tests/test_session_creation.py
index 804e348..e25c717 100644
--- a/tests/test_session_creation.py
+++ b/tests/test_session_creation.py
@@ -23,7 +23,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session()
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
body = parse_qs(httpretty.last_request().body)
expect(body).to.have.key(b('p2p.preference')).being.equal([b('enabled')])
@@ -42,7 +42,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session(media_mode=MediaModes.routed)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
body = parse_qs(httpretty.last_request().body)
expect(body).to.have.key(b('p2p.preference')).being.equal([b('disabled')])
@@ -61,7 +61,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session(location='12.34.56.78')
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
# ordering of keys is non-deterministic, must parse the body to see if it is correct
body = parse_qs(httpretty.last_request().body)
@@ -81,7 +81,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session(location='12.34.56.78', media_mode=MediaModes.routed)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
# ordering of keys is non-deterministic, must parse the body to see if it is correct
body = parse_qs(httpretty.last_request().body)
@@ -101,7 +101,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session(media_mode=MediaModes.routed, archive_mode=ArchiveModes.manual)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
body = parse_qs(httpretty.last_request().body)
expect(body).to.have.key(b('p2p.preference')).being.equal([b('disabled')])
@@ -120,7 +120,7 @@ class OpenTokSessionCreationTest(unittest.TestCase):
session = self.opentok.create_session(media_mode=MediaModes.routed, archive_mode=ArchiveModes.always)
- validate_jwt_header(self, httpretty.last_request().headers[u('x-tb-opentok-auth')])
+ validate_jwt_header(self, httpretty.last_request().headers[u('x-opentok-auth')])
expect(httpretty.last_request().headers[u('user-agent')]).to.contain(u('OpenTok-Python-SDK/')+__version__)
body = parse_qs(httpretty.last_request().body)
expect(body).to.have.key(b('p2p.preference')).being.equal([b('disabled')])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 2.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"httpretty",
"sure",
"wheel",
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
ecdsa==0.19.1
httpretty==1.1.4
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==5.2.0
nose==1.3.7
-e git+https://github.com/opentok/Opentok-Python-SDK.git@613e5368748ec31a18c4f47c26683bb3bcdbbcf1#egg=opentok
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyasn1==0.4.8
pyparsing==3.1.4
pytest==7.0.1
python-jose==3.4.0
pytz==2025.2
requests==2.27.1
rsa==4.9
six==1.17.0
sure==2.0.1
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: Opentok-Python-SDK
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- ecdsa==0.19.1
- httpretty==1.1.4
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==5.2.0
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.4.8
- pyparsing==3.1.4
- pytest==7.0.1
- python-jose==3.4.0
- pytz==2025.2
- requests==2.27.1
- rsa==4.9
- six==1.17.0
- sure==2.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/Opentok-Python-SDK
| [
"tests/test_archive.py::OpenTokArchiveTest::test_delete_archive",
"tests/test_archive.py::OpenTokArchiveTest::test_stop_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_delete_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archives",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archives_with_count",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archives_with_offset",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archives_with_offset_and_count",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_start_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_start_archive_with_name",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_start_composed_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_start_individual_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_start_voice_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_stop_archive",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_always_archive_mode_session",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_default_session",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_manual_archive_mode_session",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_routed_session",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_routed_session_with_location_hint",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_create_session_with_location_hint"
]
| []
| [
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_archive_with_unknown_properties",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_expired_archive",
"tests/test_archive_api.py::OpenTokArchiveApiTest::test_find_paused_archive",
"tests/test_session_creation.py::OpenTokSessionCreationTest::test_complains_about_always_archive_mode_and_relayed_session"
]
| []
| MIT License | 665 | [
"opentok/opentok.py"
]
| [
"opentok/opentok.py"
]
|
|
zalando-stups__senza-282 | 56b109cbf40fe05f508580ad2fce9d07e60075e6 | 2016-07-27 07:31:57 | 35b73f49b8cb58e7892908413bdf2a61cfe3058e | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index 347b515..5644c13 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -2,6 +2,7 @@ import click
from clickclick import fatal_error
from senza.aws import resolve_security_groups
+from ..cli import AccountArguments, TemplateArguments
from ..manaus import ClientError
from ..manaus.iam import IAM, IAMServerCertificate
from ..manaus.acm import ACM, ACMCertificate
@@ -23,13 +24,14 @@ def get_load_balancer_name(stack_name: str, stack_version: str):
return '{}-{}'.format(stack_name[:l], stack_version)
-def get_listeners(subdomain, main_zone, configuration):
+def get_listeners(subdomain, main_zone, configuration,
+ account_info: AccountArguments):
ssl_cert = configuration.get('SSLCertificateId')
if ACMCertificate.arn_is_acm_certificate(ssl_cert):
# check if certificate really exists
try:
- ACMCertificate.get_by_arn(ssl_cert)
+ ACMCertificate.get_by_arn(account_info.Region, ssl_cert)
except ClientError as e:
error_msg = e.response['Error']['Message']
fatal_error(error_msg)
@@ -44,7 +46,8 @@ def get_listeners(subdomain, main_zone, configuration):
iam_pattern = main_zone.lower().rstrip('.').replace('.', '-')
name = '{sub}.{zone}'.format(sub=subdomain,
zone=main_zone.rstrip('.'))
- acm_certificates = sorted(ACM.get_certificates(domain_name=name),
+ acm = ACM(account_info.Region)
+ acm_certificates = sorted(acm.get_certificates(domain_name=name),
reverse=True)
else:
iam_pattern = ''
@@ -79,9 +82,13 @@ def get_listeners(subdomain, main_zone, configuration):
]
-def component_elastic_load_balancer(definition, configuration, args, info, force, account_info):
+def component_elastic_load_balancer(definition,
+ configuration: dict,
+ args: TemplateArguments,
+ info: dict,
+ force,
+ account_info: AccountArguments):
lb_name = configuration["Name"]
-
# domains pointing to the load balancer
subdomain = ''
main_zone = None
@@ -107,7 +114,7 @@ def component_elastic_load_balancer(definition, configuration, args, info, force
subdomain = domain['Subdomain']
main_zone = domain['Zone'] # type: str
- listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration)
+ listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration, account_info)
health_check_protocol = "HTTP"
allowed_health_check_protocols = ("HTTP", "TCP", "UDP", "SSL")
diff --git a/senza/manaus/acm.py b/senza/manaus/acm.py
index ad918e8..d28fe26 100644
--- a/senza/manaus/acm.py
+++ b/senza/manaus/acm.py
@@ -101,11 +101,11 @@ class ACMCertificate:
revoked_at, revocation_reason)
@classmethod
- def get_by_arn(cls, arn: str) -> "ACMCertificate":
+ def get_by_arn(cls, region: str, arn: str) -> "ACMCertificate":
"""
Gets a ACMCertificate based on ARN alone
"""
- client = boto3.client('acm')
+ client = boto3.client('acm', region)
certificate = client.describe_certificate(CertificateArn=arn)['Certificate']
return cls.from_boto_dict(certificate)
@@ -156,21 +156,26 @@ class ACM:
See http://boto3.readthedocs.io/en/latest/reference/services/acm.html
"""
- @staticmethod
- def get_certificates(valid_only: bool=True,
+ def __init__(self, region=str):
+ self.region = region
+
+ def get_certificates(self,
+ *,
+ valid_only: bool=True,
domain_name: Optional[str]=None) -> Iterator[ACMCertificate]:
"""
Gets certificates from ACM. By default it returns all valid certificates
+ :param region: AWS region
:param valid_only: Return only valid certificates
:param domain_name: Return only certificates that match the domain
"""
# TODO implement pagination
- client = boto3.client('acm')
+ client = boto3.client('acm', self.region)
certificates = client.list_certificates()['CertificateSummaryList']
for summary in certificates:
arn = summary['CertificateArn']
- certificate = ACMCertificate.get_by_arn(arn)
+ certificate = ACMCertificate.get_by_arn(self.region, arn)
if valid_only and not certificate.is_valid():
pass
elif domain_name is not None and not certificate.matches(domain_name):
| NoRegionError: You must specify a region.
When trying to run `senza create` I am getting this error:
```
$ senza create --region eu-central-1 --force ../hello-flask/hello.yaml v56 ImageVersion=bus56 --stacktrace-visible
Generating Cloud Formation template.. EXCEPTION OCCURRED: You must specify a region.
Traceback (most recent call last):
File "/Users/rcaricio/.virtualenvs/lizzy-init/bin/senza", line 11, in <module>
sys.exit(main())
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/cli.py", line 1492, in main
HandleExceptions(cli)()
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/error_handling.py", line 99, in __call__
self.die_unknown_error(e)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/error_handling.py", line 57, in die_unknown_error
raise e
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/error_handling.py", line 69, in __call__
self.function(*args, **kwargs)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/click/core.py", line 1060, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/cli.py", line 555, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/cli.py", line 638, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/cli.py", line 239, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/components/weighted_dns_elastic_load_balancer.py", line 29, in component_weighted_dns_elastic_load_balancer
return component_elastic_load_balancer(definition, configuration, args, info, force, account_info)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 110, in component_elastic_load_balancer
listeners = configuration.get('Listeners') or get_listeners(subdomain, main_zone, configuration)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 48, in get_listeners
reverse=True)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/senza/manaus/acm.py", line 169, in get_certificates
client = boto3.client('acm')
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/boto3/__init__.py", line 79, in client
return _get_default_session().client(*args, **kwargs)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/boto3/session.py", line 250, in client
aws_session_token=aws_session_token, config=config)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/session.py", line 818, in create_client
client_config=config, api_version=api_version)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/client.py", line 69, in create_client
verify, credentials, scoped_config, client_config, endpoint_bridge)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/client.py", line 199, in _get_client_args
service_name, region_name, endpoint_url, is_secure)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/client.py", line 322, in resolve
service_name, region_name)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/regions.py", line 122, in construct_endpoint
partition, service_name, region_name)
File "/Users/rcaricio/.virtualenvs/lizzy-init/lib/python3.5/site-packages/botocore/regions.py", line 135, in _endpoint_for_partition
raise NoRegionError()
botocore.exceptions.NoRegionError: You must specify a region.
```
Senza version `1.0.91`
Does not happen with older versions of Senza though.
| zalando-stups/senza | diff --git a/tests/test_manaus/test_acm.py b/tests/test_manaus/test_acm.py
index 51e12d4..f022ff2 100644
--- a/tests/test_manaus/test_acm.py
+++ b/tests/test_manaus/test_acm.py
@@ -148,7 +148,8 @@ def test_certificate_get_by_arn(monkeypatch):
m_client.describe_certificate.return_value = {'Certificate': CERT1}
monkeypatch.setattr('boto3.client', m_client)
- certificate1 = ACMCertificate.get_by_arn('arn:aws:acm:eu-west-1:cert')
+ certificate1 = ACMCertificate.get_by_arn('dummy-region',
+ 'arn:aws:acm:eu-west-1:cert')
assert certificate1.domain_name == '*.senza.example.com'
assert certificate1.is_valid(when=datetime(2016, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
@@ -183,7 +184,7 @@ def test_get_certificates(monkeypatch):
tzinfo=timezone.utc)
monkeypatch.setattr('senza.manaus.acm.datetime', m_datetime)
- acm = ACM()
+ acm = ACM('dummy-region')
certificates_default = list(acm.get_certificates())
assert len(certificates_default) == 1 # Cert2 is excluded because it's REVOKED
assert certificates_default[0].arn == 'arn:aws:acm:eu-west-1:cert1'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@56b109cbf40fe05f508580ad2fce9d07e60075e6#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_acm.py::test_certificate_get_by_arn",
"tests/test_manaus/test_acm.py::test_get_certificates"
]
| []
| [
"tests/test_manaus/test_acm.py::test_certificate_valid",
"tests/test_manaus/test_acm.py::test_certificate_comparison",
"tests/test_manaus/test_acm.py::test_certificate_matches",
"tests/test_manaus/test_acm.py::test_arn_is_acm_certificate"
]
| []
| Apache License 2.0 | 666 | [
"senza/components/elastic_load_balancer.py",
"senza/manaus/acm.py"
]
| [
"senza/components/elastic_load_balancer.py",
"senza/manaus/acm.py"
]
|
|
goldmann__docker-squash-110 | 15bda09f82d4031fcf3e493d18133ffbbd09fdbd | 2016-07-27 08:36:36 | 1e6ae47e44e69f3094e2a5e75be20a74c1c0a44c | diff --git a/docker_squash/cli.py b/docker_squash/cli.py
index 462b872..cff0773 100644
--- a/docker_squash/cli.py
+++ b/docker_squash/cli.py
@@ -42,7 +42,9 @@ class CLI(object):
parser.add_argument(
'-f', '--from-layer', help='ID of the layer or image ID or image name. If not specified will squash all layers in the image')
parser.add_argument(
- '-t', '--tag', help="Specify the tag to be used for the new image. By default it'll be set to 'image' argument")
+ '-t', '--tag', help="Specify the tag to be used for the new image. by default it'll be set to 'image' argument")
+ parser.add_argument(
+ '-c', '--cleanup', action='store_true', help="Remove source image from Docker after squashing")
parser.add_argument(
'--tmp-dir', help='Temporary directory to be used')
parser.add_argument(
@@ -59,7 +61,7 @@ class CLI(object):
try:
squash.Squash(log=self.log, image=args.image,
- from_layer=args.from_layer, tag=args.tag, output_path=args.output_path, tmp_dir=args.tmp_dir, development=args.development).run()
+ from_layer=args.from_layer, tag=args.tag, output_path=args.output_path, tmp_dir=args.tmp_dir, development=args.development, cleanup=args.cleanup).run()
except KeyboardInterrupt:
self.log.error("Program interrupted by user, exiting...")
sys.exit(1)
diff --git a/docker_squash/squash.py b/docker_squash/squash.py
index d6d21e0..e81960a 100644
--- a/docker_squash/squash.py
+++ b/docker_squash/squash.py
@@ -14,7 +14,7 @@ from docker_squash.version import version
class Squash(object):
def __init__(self, log, image, docker=None, from_layer=None, tag=None, tmp_dir=None,
- output_path=None, load_image=True, development=False):
+ output_path=None, load_image=True, development=False, cleanup=False):
self.log = log
self.docker = docker
self.image = image
@@ -24,6 +24,7 @@ class Squash(object):
self.output_path = output_path
self.load_image = load_image
self.development = development
+ self.cleanup = cleanup
if not docker:
self.docker = common.docker_client(self.log)
@@ -65,12 +66,22 @@ class Squash(object):
raise
+ def _cleanup(self):
+ try:
+ image_id = self.docker.inspect_image(self.image)['Id']
+ except:
+ self.log.warn("Could not get the image ID for %s image, skipping cleanup after squashing" % self.image)
+ return
+
+ self.log.info("Removing old %s image..." % self.image)
+ self.docker.remove_image(image_id, force=False, noprune=False)
+ self.log.info("Image removed!")
+
def squash(self, image):
# Do the actual squashing
new_image_id = image.squash()
- self.log.info(
- "New squashed image ID is %s" % new_image_id)
+ self.log.info("New squashed image ID is %s" % new_image_id)
if self.output_path:
# Move the tar archive to the specified path
@@ -80,8 +91,16 @@ class Squash(object):
# Load squashed image into Docker
image.load_squashed_image()
+ # Clean up all temporary files
image.cleanup()
+ # Remove the source image - this is the only possible way
+ # to remove orphaned layers from Docker daemon at the build time.
+ # We cannot use here a tag name because it could be used as the target,
+ # squashed image tag - we need to use the image ID.
+ if self.cleanup:
+ self._cleanup()
+
self.log.info("Done")
return new_image_id
diff --git a/docker_squash/v2_image.py b/docker_squash/v2_image.py
index c2fa1ac..4df00d7 100644
--- a/docker_squash/v2_image.py
+++ b/docker_squash/v2_image.py
@@ -280,10 +280,10 @@ class V2Image(Image):
if self.layer_paths_to_move:
config['parent'] = self.layer_paths_to_move[-1]
else:
- del config['parent']
+ config.pop("parent", None)
# Update 'id' - it should be the path to the layer
config['id'] = layer_path_id
- del config['container']
+ config.pop("container", None)
return config
def _generate_image_metadata(self):
@@ -294,7 +294,7 @@ class V2Image(Image):
metadata['created'] = self.date
# Remove unnecessary or old fields
- del metadata['container']
+ metadata.pop("container", None)
# Remove squashed layers from history
metadata['history'] = metadata['history'][:len(self.layers_to_move)]
| Remove squashed layers from Docker daemon
In most cases we're not interested in layers that were squashed. In case where the input and target image has the same tag - all squashed layers will be "orphaned" leaving them in Docker daemon and wasting space. We should probably include a `--cleanup` switch or something similar to remove layers without tag that were "orphaned" during the squashing process. | goldmann/docker-squash | diff --git a/tests/test_unit_squash.py b/tests/test_unit_squash.py
new file mode 100644
index 0000000..c136761
--- /dev/null
+++ b/tests/test_unit_squash.py
@@ -0,0 +1,43 @@
+import unittest
+import mock
+import six
+import logging
+
+from docker_squash.squash import Squash
+from docker_squash.image import Image
+from docker_squash.errors import SquashError
+
+
+class TestSquash(unittest.TestCase):
+
+ def setUp(self):
+ self.log = mock.Mock()
+ self.docker_client = mock.Mock()
+ self.docker_client.version.return_value = {'GitCommit': "commit/9.9.9", 'ApiVersion': "9.99"}
+
+ def test_handle_case_when_no_image_is_provided(self):
+ squash = Squash(self.log, None, self.docker_client)
+ with self.assertRaises(SquashError) as cm:
+ squash.run()
+ self.assertEquals(
+ str(cm.exception), "Image is not provided")
+
+ def test_exit_if_no_output_path_provided_and_loading_is_disabled_too(self):
+ squash = Squash(self.log, 'image', self.docker_client, load_image=False, output_path=None)
+ squash.run()
+ self.log.warn.assert_called_with("No output path specified and loading into Docker is not selected either; squashed image would not accessible, proceeding with squashing doesn't make sense")
+
+ @mock.patch('docker_squash.squash.V2Image')
+ def test_should_not_cleanup_after_squashing(self, v2_image):
+ squash = Squash(self.log, 'image', self.docker_client, load_image=True)
+ squash.run()
+
+ v2_image.cleanup.assert_not_called()
+
+ @mock.patch('docker_squash.squash.V2Image')
+ def test_should_cleanup_after_squashing(self, v2_image):
+ self.docker_client.inspect_image.return_value = {'Id': "abcdefgh"}
+ squash = Squash(self.log, 'image', self.docker_client, load_image=True, cleanup=True)
+ squash.run()
+
+ self.docker_client.remove_image.assert_called_with('abcdefgh', force=False, noprune=False)
diff --git a/tests/test_unit_v1_image.py b/tests/test_unit_v1_image.py
index 6316399..07081ab 100644
--- a/tests/test_unit_v1_image.py
+++ b/tests/test_unit_v1_image.py
@@ -299,24 +299,5 @@ class TestAddMarkers(unittest.TestCase):
self.assertTrue(tar_info.isfile())
-class TestGeneral(unittest.TestCase):
-
- def setUp(self):
- self.log = mock.Mock()
- self.docker_client = mock.Mock()
- self.docker_client.version.return_value = {'GitCommit': "commit/9.9.9", 'ApiVersion': "9.99"}
-
- def test_handle_case_when_no_image_is_provided(self):
- squash = Squash(self.log, None, self.docker_client)
- with self.assertRaises(SquashError) as cm:
- squash.run()
- self.assertEquals(
- str(cm.exception), "Image is not provided")
-
- def test_exit_if_no_output_path_provided_and_loading_is_disabled_too(self):
- squash = Squash(self.log, 'image', self.docker_client, load_image=False, output_path=None)
- squash.run()
- self.log.warn.assert_called_with("No output path specified and loading into Docker is not selected either; squashed image would not accessible, proceeding with squashing doesn't make sense")
-
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 3
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docker-py==1.10.6
docker-pycreds==0.4.0
-e git+https://github.com/goldmann/docker-squash.git@15bda09f82d4031fcf3e493d18133ffbbd09fdbd#egg=docker_squash
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
websocket-client==1.8.0
| name: docker-squash
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docker-py==1.10.6
- docker-pycreds==0.4.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- websocket-client==1.8.0
prefix: /opt/conda/envs/docker-squash
| [
"tests/test_unit_squash.py::TestSquash::test_should_cleanup_after_squashing"
]
| []
| [
"tests/test_unit_squash.py::TestSquash::test_exit_if_no_output_path_provided_and_loading_is_disabled_too",
"tests/test_unit_squash.py::TestSquash::test_handle_case_when_no_image_is_provided",
"tests/test_unit_squash.py::TestSquash::test_should_not_cleanup_after_squashing",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_not_skip_file_not_in_path_to_skip",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_not_skip_the_file_that_name_is_similar_to_skipped_path",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_skip_exact_files",
"tests/test_unit_v1_image.py::TestSkippingFiles::test_should_skip_files_in_subdirectory",
"tests/test_unit_v1_image.py::TestParseImageName::test_should_parse_name_name_with_proper_tag",
"tests/test_unit_v1_image.py::TestParseImageName::test_should_parse_name_name_without_tag",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_create_tmp_directory_if_not_provided",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_should_raise_if_directory_already_exists",
"tests/test_unit_v1_image.py::TestPrepareTemporaryDirectory::test_should_use_provided_tmp_dir",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_generate_list_of_layers",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_not_fail_with_empty_list_of_layers",
"tests/test_unit_v1_image.py::TestPrepareLayersToSquash::test_should_return_all_layers_if_from_layer_is_not_found",
"tests/test_unit_v1_image.py::TestGenerateV1ImageId::test_should_generate_id",
"tests/test_unit_v1_image.py::TestGenerateV1ImageId::test_should_generate_id_that_is_not_integer_shen_shortened",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_generate_json",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_handle_empty_image_id",
"tests/test_unit_v1_image.py::TestGenerateRepositoriesJSON::test_should_not_generate_repositories_if_name_and_tag_is_missing",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_find_all_marker_files",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_return_empty_dict_when_no_files_are_in_the_tar",
"tests/test_unit_v1_image.py::TestMarkerFiles::test_should_return_empty_dict_when_no_marker_files_are_found",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_add_all_marker_files_to_empty_tar",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_add_marker_file_when_tar_has_prefixed_entries",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_not_add_any_marker_files",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_not_fail_with_empty_list_of_markers_to_add",
"tests/test_unit_v1_image.py::TestAddMarkers::test_should_skip_a_marker_file_if_file_is_in_unsquashed_layers"
]
| []
| MIT License | 667 | [
"docker_squash/v2_image.py",
"docker_squash/squash.py",
"docker_squash/cli.py"
]
| [
"docker_squash/v2_image.py",
"docker_squash/squash.py",
"docker_squash/cli.py"
]
|
|
docker__docker-py-1143 | 2d3bda84de39a75e560fc79512143d43e5d61226 | 2016-07-28 01:43:06 | a44d65be370c28abd666a299456b83659dd1a1df | diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 4d218692..1cfc8acc 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -22,8 +22,8 @@ import tarfile
import tempfile
import warnings
from distutils.version import StrictVersion
-from fnmatch import fnmatch
from datetime import datetime
+from fnmatch import fnmatch
import requests
import six
@@ -33,6 +33,10 @@ from .. import errors
from .. import tls
from .types import Ulimit, LogConfig
+if six.PY2:
+ from urllib import splitnport
+else:
+ from urllib.parse import splitnport
DEFAULT_HTTP_HOST = "127.0.0.1"
DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock"
@@ -387,7 +391,6 @@ def parse_repository_tag(repo_name):
# Protocol translation: tcp -> http, unix -> http+unix
def parse_host(addr, is_win32=False, tls=False):
proto = "http+unix"
- host = DEFAULT_HTTP_HOST
port = None
path = ''
@@ -427,32 +430,27 @@ def parse_host(addr, is_win32=False, tls=False):
)
proto = "https" if tls else "http"
- if proto != "http+unix" and ":" in addr:
- host_parts = addr.split(':')
- if len(host_parts) != 2:
- raise errors.DockerException(
- "Invalid bind address format: {0}".format(addr)
- )
- if host_parts[0]:
- host = host_parts[0]
+ if proto in ("http", "https"):
+ address_parts = addr.split('/', 1)
+ host = address_parts[0]
+ if len(address_parts) == 2:
+ path = '/' + address_parts[1]
+ host, port = splitnport(host)
- port = host_parts[1]
- if '/' in port:
- port, path = port.split('/', 1)
- path = '/{0}'.format(path)
- try:
- port = int(port)
- except Exception:
+ if port is None:
raise errors.DockerException(
"Invalid port: {0}".format(addr)
)
- elif proto in ("http", "https") and ':' not in addr:
- raise errors.DockerException(
- "Bind address needs a port: {0}".format(addr))
+ if not host:
+ host = DEFAULT_HTTP_HOST
else:
host = addr
+ if proto in ("http", "https") and port == -1:
+ raise errors.DockerException(
+ "Bind address needs a port: {0}".format(addr))
+
if proto == "http+unix" or proto == 'npipe':
return "{0}://{1}".format(proto, host)
return "{0}://{1}:{2}{3}".format(proto, host, port, path)
| Support IPv6 addresses in DOCKER_HOST
Raised in https://github.com/docker/compose/issues/2879.
See https://github.com/docker/docker/pull/16950 for the Engine implementation. | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 68484fe5..0f7a58c9 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -404,10 +404,18 @@ class ParseHostTest(base.BaseTestCase):
'https://kokia.jp:2375': 'https://kokia.jp:2375',
'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock',
'unix://': 'http+unix://var/run/docker.sock',
+ '12.234.45.127:2375/docker/engine': (
+ 'http://12.234.45.127:2375/docker/engine'
+ ),
'somehost.net:80/service/swarm': (
'http://somehost.net:80/service/swarm'
),
'npipe:////./pipe/docker_engine': 'npipe:////./pipe/docker_engine',
+ '[fd12::82d1]:2375': 'http://[fd12::82d1]:2375',
+ 'https://[fd12:5672::12aa]:1090': 'https://[fd12:5672::12aa]:1090',
+ '[fd12::82d1]:2375/docker/engine': (
+ 'http://[fd12::82d1]:2375/docker/engine'
+ ),
}
for host in invalid_hosts:
@@ -415,7 +423,7 @@ class ParseHostTest(base.BaseTestCase):
parse_host(host, None)
for host, expected in valid_hosts.items():
- self.assertEqual(parse_host(host, None), expected, msg=host)
+ assert parse_host(host, None) == expected
def test_parse_host_empty_value(self):
unix_socket = 'http+unix://var/run/docker.sock'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"coverage",
"pytest-cov",
"flake8"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@2d3bda84de39a75e560fc79512143d43e5d61226#egg=docker_py
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::ParseHostTest::test_parse_host"
]
| []
| [
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 668 | [
"docker/utils/utils.py"
]
| [
"docker/utils/utils.py"
]
|
|
google__yapf-286 | b8d20bd2d9d69fda6c1b102c0c07f97931e87132 | 2016-07-28 04:51:31 | b8d20bd2d9d69fda6c1b102c0c07f97931e87132 | diff --git a/CHANGELOG b/CHANGELOG
index d331f8d..fd92065 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -5,8 +5,11 @@
## [0.11.1] UNRELEASED
### Fixed
- Enforce splitting each element in a dictionary if comma terminated.
-- It's okay to split in the middle of a dotted name if the whole expression is
- going to go over the column limit.
+
+### Changed
+- Issue #228: Return exit code 0 on success, regardless of whether files were
+ changed. (Previously, 0 meant success with no files
+ modified, and 2 meant success with at least one file modified.)
## [0.11.0] 2016-07-17
### Added
diff --git a/yapf/__init__.py b/yapf/__init__.py
index dfbb8ee..145d8f4 100644
--- a/yapf/__init__.py
+++ b/yapf/__init__.py
@@ -161,7 +161,7 @@ def main(argv):
lines=lines,
verify=args.verify)
sys.stdout.write(reformatted_source)
- return 2 if changed else 0
+ return 0
files = file_resources.GetCommandLineFiles(args.files, args.recursive,
args.exclude)
@@ -175,7 +175,7 @@ def main(argv):
in_place=args.in_place,
print_diff=args.diff,
verify=args.verify)
- return 2 if changed else 0
+ return 0
def FormatFiles(filenames,
diff --git a/yapf/yapflib/split_penalty.py b/yapf/yapflib/split_penalty.py
index 50796de..4b802c0 100644
--- a/yapf/yapflib/split_penalty.py
+++ b/yapf/yapflib/split_penalty.py
@@ -185,8 +185,8 @@ class _TreePenaltyAssigner(pytree_visitor.PyTreeVisitor):
# atom tr1() tr2
# It may be necessary (though undesirable) to split up a previous
# function call's parentheses to the next line.
- self._SetStronglyConnected(prev_trailer.children[-1])
- self._SetStronglyConnected(cur_trailer.children[0])
+ self._SetUnbreakable(prev_trailer.children[-1])
+ self._SetUnbreakable(cur_trailer.children[0])
prev_trailer_idx = cur_trailer_idx
else:
break
| yapf returns an exit code of 2 on unformatted but valid input.
Hello there and thanks for developing this much appreciated python formatter.
I’m opening this issue as I’m not sure to understand the logic behind yapf's exit code return when applying it on unformatted and valid input. Indeed, yapf returns an exit code of 2 in such case (which implies something went wrong), yet a valid formatted output is returned. I would consider the operation successful, hence expect an exit code of 0.
Consequently, this behavior tends to break scripts where flow execution rely on the format's success.
I saw a related issue: https://github.com/google/yapf/issues/186 but I don't see the need to inform output has been modified compared to input (i.e. formatted) by returning a status code other than 0 as formatting is the purpose of the tool. Otherwise, _--diff_ option seems the right fit if this information is needed (not to mention UNIX tools can also answer this).
Am I missing something or is this behavior indeed odd? Thanks! | google/yapf | diff --git a/yapftests/main_test.py b/yapftests/main_test.py
index e5a5a74..42e6f9b 100644
--- a/yapftests/main_test.py
+++ b/yapftests/main_test.py
@@ -91,7 +91,7 @@ class MainTest(unittest.TestCase):
with patched_input(code):
with captured_output() as (out, err):
ret = yapf.main(['-', '--style=chromium'])
- self.assertEqual(ret, 2)
+ self.assertEqual(ret, 0)
self.assertEqual(out.getvalue(), chromium_code)
def testEchoBadInput(self):
@@ -116,3 +116,4 @@ class MainTest(unittest.TestCase):
self.assertEqual(ret, 0)
version = 'yapf {}\n'.format(yapf.__version__)
self.assertEqual(version, out.getvalue())
+
diff --git a/yapftests/reformatter_test.py b/yapftests/reformatter_test.py
index cb2e6d5..3092344 100644
--- a/yapftests/reformatter_test.py
+++ b/yapftests/reformatter_test.py
@@ -1750,20 +1750,6 @@ class BuganizerFixes(ReformatterTest):
def setUpClass(cls):
style.SetGlobalStyle(style.CreateChromiumStyle())
- def testB30442148(self):
- unformatted_code = textwrap.dedent("""\
- def lulz():
- return (some_long_module_name.SomeLongClassName.
- some_long_attribute_name.some_long_method_name())
- """)
- expected_formatted_code = textwrap.dedent("""\
- def lulz():
- return (some_long_module_name.SomeLongClassName.some_long_attribute_name.
- some_long_method_name())
- """)
- uwlines = _ParseAndUnwrap(unformatted_code)
- self.assertCodeEqual(expected_formatted_code, reformatter.Reformat(uwlines))
-
def testB26868213(self):
unformatted_code = textwrap.dedent("""\
def _():
@@ -1843,10 +1829,9 @@ class BuganizerFixes(ReformatterTest):
""")
expected_formatted_code = textwrap.dedent("""\
def _():
- _xxxxxxxxxxxxxxx(
- aaaaaaaa,
- bbbbbbbbbbbbbb.cccccccccc[dddddddddddddddddddddddddddd.
- eeeeeeeeeeeeeeeeeeeeee.fffffffffffffffffffff])
+ _xxxxxxxxxxxxxxx(aaaaaaaa, bbbbbbbbbbbbbb.cccccccccc[
+ dddddddddddddddddddddddddddd.eeeeeeeeeeeeeeeeeeeeee.
+ fffffffffffffffffffff])
""")
uwlines = _ParseAndUnwrap(unformatted_code)
self.assertCodeEqual(expected_formatted_code, reformatter.Reformat(uwlines))
diff --git a/yapftests/split_penalty_test.py b/yapftests/split_penalty_test.py
index 51f8f3e..b21d227 100644
--- a/yapftests/split_penalty_test.py
+++ b/yapftests/split_penalty_test.py
@@ -221,10 +221,10 @@ class SplitPenaltyTest(unittest.TestCase):
self._CheckPenalties(tree, [
('foo', None),
('.', UNBREAKABLE),
- ('bar', DOTTED_NAME),
- ('.', STRONGLY_CONNECTED),
- ('baz', DOTTED_NAME),
- ('(', STRONGLY_CONNECTED),
+ ('bar', UNBREAKABLE),
+ ('.', UNBREAKABLE),
+ ('baz', UNBREAKABLE),
+ ('(', UNBREAKABLE),
('1', CONTIGUOUS_LIST),
(',', CONTIGUOUS_LIST),
('2', CONTIGUOUS_LIST),
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/google/yapf.git@b8d20bd2d9d69fda6c1b102c0c07f97931e87132#egg=yapf
| name: yapf
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/yapf
| [
"yapftests/main_test.py::MainTest::testEchoInputWithStyle",
"yapftests/reformatter_test.py::BuganizerFixes::testB29093579"
]
| [
"yapftests/split_penalty_test.py::SplitPenaltyTest::testFuncCalls",
"yapftests/split_penalty_test.py::SplitPenaltyTest::testStronglyConnected",
"yapftests/split_penalty_test.py::SplitPenaltyTest::testUnbreakable"
]
| [
"yapftests/main_test.py::RunMainTest::testShouldHandleYapfError",
"yapftests/main_test.py::MainTest::testEchoBadInput",
"yapftests/main_test.py::MainTest::testEchoInput",
"yapftests/main_test.py::MainTest::testHelp",
"yapftests/main_test.py::MainTest::testNoPythonFilesMatched",
"yapftests/main_test.py::MainTest::testVersion",
"yapftests/reformatter_test.py::BasicReformatterTest::testArgsAndKwargsFormatting",
"yapftests/reformatter_test.py::BasicReformatterTest::testBinaryOperators",
"yapftests/reformatter_test.py::BasicReformatterTest::testBlankLinesAtEndOfFile",
"yapftests/reformatter_test.py::BasicReformatterTest::testBlankLinesBeforeDecorators",
"yapftests/reformatter_test.py::BasicReformatterTest::testBlankLinesBeforeFunctionsNotInColumnZero",
"yapftests/reformatter_test.py::BasicReformatterTest::testClosingBracketIndent",
"yapftests/reformatter_test.py::BasicReformatterTest::testClosingBracketsInlinedInCall",
"yapftests/reformatter_test.py::BasicReformatterTest::testCommentBeforeFuncDef",
"yapftests/reformatter_test.py::BasicReformatterTest::testCommentBetweenDecorators",
"yapftests/reformatter_test.py::BasicReformatterTest::testCommentColumnLimitOverflow",
"yapftests/reformatter_test.py::BasicReformatterTest::testComments",
"yapftests/reformatter_test.py::BasicReformatterTest::testCommentsInDataLiteral",
"yapftests/reformatter_test.py::BasicReformatterTest::testCommentsWithContinuationMarkers",
"yapftests/reformatter_test.py::BasicReformatterTest::testComprehensionForAndIf",
"yapftests/reformatter_test.py::BasicReformatterTest::testContiguousList",
"yapftests/reformatter_test.py::BasicReformatterTest::testContinuationIndent",
"yapftests/reformatter_test.py::BasicReformatterTest::testContinuationMarkers",
"yapftests/reformatter_test.py::BasicReformatterTest::testContinuationSpaceRetention",
"yapftests/reformatter_test.py::BasicReformatterTest::testDictSetGenerator",
"yapftests/reformatter_test.py::BasicReformatterTest::testDictionaryMakerFormatting",
"yapftests/reformatter_test.py::BasicReformatterTest::testDocstringAndMultilineComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testDocstrings",
"yapftests/reformatter_test.py::BasicReformatterTest::testDontAddBlankLineAfterMultilineString",
"yapftests/reformatter_test.py::BasicReformatterTest::testDontSplitKeywordValueArguments",
"yapftests/reformatter_test.py::BasicReformatterTest::testEmptyContainers",
"yapftests/reformatter_test.py::BasicReformatterTest::testEndingComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testEndingWhitespaceAfterSimpleStatement",
"yapftests/reformatter_test.py::BasicReformatterTest::testExcessCharacters",
"yapftests/reformatter_test.py::BasicReformatterTest::testExcessLineCountWithDefaultKeywords",
"yapftests/reformatter_test.py::BasicReformatterTest::testExpressionPenalties",
"yapftests/reformatter_test.py::BasicReformatterTest::testFormattingListComprehensions",
"yapftests/reformatter_test.py::BasicReformatterTest::testFunctionCallArguments",
"yapftests/reformatter_test.py::BasicReformatterTest::testFunctionCallContinuationLine",
"yapftests/reformatter_test.py::BasicReformatterTest::testFunctionCallInDict",
"yapftests/reformatter_test.py::BasicReformatterTest::testFunctionCallInNestedDict",
"yapftests/reformatter_test.py::BasicReformatterTest::testI18n",
"yapftests/reformatter_test.py::BasicReformatterTest::testI18nCommentsInDataLiteral",
"yapftests/reformatter_test.py::BasicReformatterTest::testI18nNonFormatting",
"yapftests/reformatter_test.py::BasicReformatterTest::testIfConditionalParens",
"yapftests/reformatter_test.py::BasicReformatterTest::testIfExpressionWithFunctionCall",
"yapftests/reformatter_test.py::BasicReformatterTest::testImportAsList",
"yapftests/reformatter_test.py::BasicReformatterTest::testLineDepthOfSingleLineStatement",
"yapftests/reformatter_test.py::BasicReformatterTest::testLineWrapInForExpression",
"yapftests/reformatter_test.py::BasicReformatterTest::testListComprehension",
"yapftests/reformatter_test.py::BasicReformatterTest::testMatchingParenSplittingMatching",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineCommentReformatted",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineDocstringAndMultilineComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineLambdas",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineShebang",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultilineString",
"yapftests/reformatter_test.py::BasicReformatterTest::testMultipleUgliness",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoBreakOutsideOfBracket",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoKeywordArgumentBreakage",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoPenaltySplitting",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoQueueSeletionInMiddleOfLine",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSpaceBetweenUnaryOpAndOpeningParen",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSpacesAroundKeywordDefaultValues",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSpacesBetweenOpeningBracketAndStartingOperator",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSpacesBetweenSubscriptsAndCalls",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSplittingAroundTermOperators",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSplittingBeforeEndingSubscriptBracket",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSplittingOnSingleArgument",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSplittingWhenBinPacking",
"yapftests/reformatter_test.py::BasicReformatterTest::testNoSplittingWithinSubscriptList",
"yapftests/reformatter_test.py::BasicReformatterTest::testNotSplittingAfterSubscript",
"yapftests/reformatter_test.py::BasicReformatterTest::testOpeningAndClosingBrackets",
"yapftests/reformatter_test.py::BasicReformatterTest::testOverColumnLimit",
"yapftests/reformatter_test.py::BasicReformatterTest::testRelativeImportStatements",
"yapftests/reformatter_test.py::BasicReformatterTest::testRelaxArraySubscriptAffinity",
"yapftests/reformatter_test.py::BasicReformatterTest::testSimple",
"yapftests/reformatter_test.py::BasicReformatterTest::testSimpleFunctions",
"yapftests/reformatter_test.py::BasicReformatterTest::testSimpleFunctionsWithTrailingComments",
"yapftests/reformatter_test.py::BasicReformatterTest::testSimpleMultilineCode",
"yapftests/reformatter_test.py::BasicReformatterTest::testSimpleMultilineWithComments",
"yapftests/reformatter_test.py::BasicReformatterTest::testSingleComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testSingleLineFunctions",
"yapftests/reformatter_test.py::BasicReformatterTest::testSingleLineList",
"yapftests/reformatter_test.py::BasicReformatterTest::testSpaceAfterNotOperator",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplitListWithComment",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplitListWithInterspersedComments",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplitListWithTerminatingComma",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplitStringsIfSurroundedByParens",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplittingArgumentsTerminatedByComma",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplittingArraysSensibly",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplittingBeforeFirstElementListArgument",
"yapftests/reformatter_test.py::BasicReformatterTest::testSplittingOneArgumentList",
"yapftests/reformatter_test.py::BasicReformatterTest::testStableDictionaryFormatting",
"yapftests/reformatter_test.py::BasicReformatterTest::testTrailerOnSingleLine",
"yapftests/reformatter_test.py::BasicReformatterTest::testTrailingCommaAndBracket",
"yapftests/reformatter_test.py::BasicReformatterTest::testTupleCommaBeforeLastParen",
"yapftests/reformatter_test.py::BasicReformatterTest::testUnaryNotOperator",
"yapftests/reformatter_test.py::BasicReformatterTest::testUnaryOpInDictionaryValue",
"yapftests/reformatter_test.py::BasicReformatterTest::testUnbreakableNot",
"yapftests/reformatter_test.py::BasicReformatterTest::testUnformattedAfterMultilineString",
"yapftests/reformatter_test.py::BuganizerFixes::testB13900309",
"yapftests/reformatter_test.py::BuganizerFixes::testB14406499",
"yapftests/reformatter_test.py::BuganizerFixes::testB14468247",
"yapftests/reformatter_test.py::BuganizerFixes::testB15438132",
"yapftests/reformatter_test.py::BuganizerFixes::testB15542157",
"yapftests/reformatter_test.py::BuganizerFixes::testB15597568",
"yapftests/reformatter_test.py::BuganizerFixes::testB15697268",
"yapftests/reformatter_test.py::BuganizerFixes::testB15884241",
"yapftests/reformatter_test.py::BuganizerFixes::testB16572361",
"yapftests/reformatter_test.py::BuganizerFixes::testB16783631",
"yapftests/reformatter_test.py::BuganizerFixes::testB17011869",
"yapftests/reformatter_test.py::BuganizerFixes::testB17133019",
"yapftests/reformatter_test.py::BuganizerFixes::testB17489866",
"yapftests/reformatter_test.py::BuganizerFixes::testB17534869",
"yapftests/reformatter_test.py::BuganizerFixes::testB18255697",
"yapftests/reformatter_test.py::BuganizerFixes::testB18256666",
"yapftests/reformatter_test.py::BuganizerFixes::testB18256826",
"yapftests/reformatter_test.py::BuganizerFixes::testB18257115",
"yapftests/reformatter_test.py::BuganizerFixes::testB19073499",
"yapftests/reformatter_test.py::BuganizerFixes::testB19194420",
"yapftests/reformatter_test.py::BuganizerFixes::testB19287512",
"yapftests/reformatter_test.py::BuganizerFixes::testB19353268",
"yapftests/reformatter_test.py::BuganizerFixes::testB19372573",
"yapftests/reformatter_test.py::BuganizerFixes::testB19377034",
"yapftests/reformatter_test.py::BuganizerFixes::testB19547210",
"yapftests/reformatter_test.py::BuganizerFixes::testB19626808",
"yapftests/reformatter_test.py::BuganizerFixes::testB20016122",
"yapftests/reformatter_test.py::BuganizerFixes::testB20073838",
"yapftests/reformatter_test.py::BuganizerFixes::testB20127686",
"yapftests/reformatter_test.py::BuganizerFixes::testB20128830",
"yapftests/reformatter_test.py::BuganizerFixes::testB20551180",
"yapftests/reformatter_test.py::BuganizerFixes::testB20559654",
"yapftests/reformatter_test.py::BuganizerFixes::testB20562732",
"yapftests/reformatter_test.py::BuganizerFixes::testB20605036",
"yapftests/reformatter_test.py::BuganizerFixes::testB20813997",
"yapftests/reformatter_test.py::BuganizerFixes::testB20849933",
"yapftests/reformatter_test.py::BuganizerFixes::testB22527411",
"yapftests/reformatter_test.py::BuganizerFixes::testB23445244",
"yapftests/reformatter_test.py::BuganizerFixes::testB23935890",
"yapftests/reformatter_test.py::BuganizerFixes::testB23943842",
"yapftests/reformatter_test.py::BuganizerFixes::testB23944849",
"yapftests/reformatter_test.py::BuganizerFixes::testB25131481",
"yapftests/reformatter_test.py::BuganizerFixes::testB25136704",
"yapftests/reformatter_test.py::BuganizerFixes::testB25136820",
"yapftests/reformatter_test.py::BuganizerFixes::testB25157123",
"yapftests/reformatter_test.py::BuganizerFixes::testB25165602",
"yapftests/reformatter_test.py::BuganizerFixes::testB25324261",
"yapftests/reformatter_test.py::BuganizerFixes::testB25505359",
"yapftests/reformatter_test.py::BuganizerFixes::testB26382315",
"yapftests/reformatter_test.py::BuganizerFixes::testB26868213",
"yapftests/reformatter_test.py::BuganizerFixes::testB27266946",
"yapftests/reformatter_test.py::BuganizerFixes::testB27590179",
"yapftests/reformatter_test.py::BuganizerFixes::testB27616132",
"yapftests/reformatter_test.py::BuganizerFixes::testB28414371",
"yapftests/reformatter_test.py::BuganizerFixes::testB29908765",
"yapftests/reformatter_test.py::BuganizerFixes::testB30087362",
"yapftests/reformatter_test.py::BuganizerFixes::testB30173198",
"yapftests/reformatter_test.py::TestsForPEP8Style::testAlignClosingBracketWithVisualIndentation",
"yapftests/reformatter_test.py::TestsForPEP8Style::testB20016122",
"yapftests/reformatter_test.py::TestsForPEP8Style::testContiguousListEndingWithComment",
"yapftests/reformatter_test.py::TestsForPEP8Style::testContinuedNonOudentedLine",
"yapftests/reformatter_test.py::TestsForPEP8Style::testHangingIndentCollision",
"yapftests/reformatter_test.py::TestsForPEP8Style::testIndent4",
"yapftests/reformatter_test.py::TestsForPEP8Style::testIndentSizeChanging",
"yapftests/reformatter_test.py::TestsForPEP8Style::testNoBlankBetweenClassAndDef",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSingleLineIfStatements",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSingleWhiteBeforeTrailingComment",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSpaceBetweenEndingCommandAndClosingBracket",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSplittingBeforeFirstArgument",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSplittingBeforeLogicalOperator",
"yapftests/reformatter_test.py::TestsForPEP8Style::testSplittingSemicolonStatements",
"yapftests/reformatter_test.py::TestsForPEP8Style::testWrappingPercentExpressions",
"yapftests/reformatter_test.py::TestingNotInParameters::testNotInParams",
"yapftests/reformatter_test.py::TestsForPython3Code::testAnnotations",
"yapftests/reformatter_test.py::TestsForPython3Code::testAsyncFunctions",
"yapftests/reformatter_test.py::TestsForPython3Code::testExecAsNonKeyword",
"yapftests/reformatter_test.py::TestsForPython3Code::testKeywordOnlyArgSpecifier",
"yapftests/reformatter_test.py::TestsForPython3Code::testNoSpacesAroundPowerOparator",
"yapftests/reformatter_test.py::TestsForFBStyle::testBreakAfterOpeningBracketIfContentsTooBig",
"yapftests/reformatter_test.py::TestsForFBStyle::testBrokenIdempotency",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentClosingBracket",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentClosingBracketWithComments",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentIfConditional",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentImportAsNames",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentSet",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentTestListGexp",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentingCallsWithInnerLists",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentingInnerScope",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentingListComprehension",
"yapftests/reformatter_test.py::TestsForFBStyle::testDedentingWithSubscripts",
"yapftests/reformatter_test.py::TestsForFBStyle::testIfExprHangingIndent",
"yapftests/reformatter_test.py::TestsForFBStyle::testMustSplitDedenting",
"yapftests/reformatter_test.py::TestsForFBStyle::testNoNeedForLineBreaks",
"yapftests/reformatter_test.py::TestsForFBStyle::testSimpleDedenting"
]
| []
| Apache License 2.0 | 669 | [
"yapf/__init__.py",
"yapf/yapflib/split_penalty.py",
"CHANGELOG"
]
| [
"yapf/__init__.py",
"yapf/yapflib/split_penalty.py",
"CHANGELOG"
]
|
|
simphony__simphony-remote-186 | adbc9aa11254a094516cdba10234134ab2d71df1 | 2016-07-28 08:47:05 | adbc9aa11254a094516cdba10234134ab2d71df1 | diff --git a/doc/Makefile b/doc/Makefile
index 580483d..22a433c 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -44,6 +44,7 @@ help:
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
@echo " uml to convert all uml files into images"
+ @echo " apidoc to rebuild the apidoc documents"
.PHONY: clean
clean:
diff --git a/doc/source/api/remoteappmanager.rest.http.rst b/doc/source/api/remoteappmanager.rest.http.rst
new file mode 100644
index 0000000..d045864
--- /dev/null
+++ b/doc/source/api/remoteappmanager.rest.http.rst
@@ -0,0 +1,30 @@
+remoteappmanager.rest.http package
+==================================
+
+Submodules
+----------
+
+remoteappmanager.rest.http.httpstatus module
+--------------------------------------------
+
+.. automodule:: remoteappmanager.rest.http.httpstatus
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+remoteappmanager.rest.http.payloaded_http_error module
+------------------------------------------------------
+
+.. automodule:: remoteappmanager.rest.http.payloaded_http_error
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: remoteappmanager.rest.http
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/source/api/remoteappmanager.rest.rst b/doc/source/api/remoteappmanager.rest.rst
index a84fc02..5615c69 100644
--- a/doc/source/api/remoteappmanager.rest.rst
+++ b/doc/source/api/remoteappmanager.rest.rst
@@ -1,6 +1,13 @@
remoteappmanager.rest package
=============================
+Subpackages
+-----------
+
+.. toctree::
+
+ remoteappmanager.rest.http
+
Submodules
----------
@@ -12,14 +19,6 @@ remoteappmanager.rest.exceptions module
:undoc-members:
:show-inheritance:
-remoteappmanager.rest.httpstatus module
----------------------------------------
-
-.. automodule:: remoteappmanager.rest.httpstatus
- :members:
- :undoc-members:
- :show-inheritance:
-
remoteappmanager.rest.registry module
-------------------------------------
diff --git a/doc/source/api/remoteappmanager.rst b/doc/source/api/remoteappmanager.rst
index 84edd77..8db8b7d 100644
--- a/doc/source/api/remoteappmanager.rst
+++ b/doc/source/api/remoteappmanager.rst
@@ -58,6 +58,14 @@ remoteappmanager.jinja2_adapters module
:undoc-members:
:show-inheritance:
+remoteappmanager.netutils module
+--------------------------------
+
+.. automodule:: remoteappmanager.netutils
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
remoteappmanager.paths module
-----------------------------
diff --git a/remoteappmanager/rest/exceptions.py b/remoteappmanager/rest/exceptions.py
index 4be58ac..6ccc584 100644
--- a/remoteappmanager/rest/exceptions.py
+++ b/remoteappmanager/rest/exceptions.py
@@ -1,4 +1,4 @@
-from remoteappmanager.rest import httpstatus
+from remoteappmanager.rest.http import httpstatus
class RESTException(Exception):
@@ -9,6 +9,26 @@ class RESTException(Exception):
#: Missing any better info, default is a server error.
http_code = httpstatus.INTERNAL_SERVER_ERROR
+ def __init__(self, message=None, **kwargs):
+ """Initializes the exception. keyword arguments will become
+ part of the representation as key/value pairs."""
+ self.message = message
+ self.info = kwargs if len(kwargs) else None
+
+ def representation(self):
+ """Returns a dictionary with the representation of the exception.
+ """
+ data = {
+ "type": type(self).__name__
+ }
+ if self.message is not None:
+ data["message"] = self.message
+
+ if self.info is not None:
+ data.update(self.info)
+
+ return data
+
class NotFound(RESTException):
"""Exception raised when the resource is not found.
@@ -17,6 +37,11 @@ class NotFound(RESTException):
"""
http_code = httpstatus.NOT_FOUND
+ def representation(self):
+ """NotFound is special as it does not have a representation,
+ just an error status"""
+ return None
+
class BadRequest(RESTException):
"""Exception raised when the resource representation is
@@ -27,5 +52,8 @@ class BadRequest(RESTException):
http_code = httpstatus.BAD_REQUEST
-class InternalServerError(RESTException):
- pass
+class Unable(RESTException):
+ """Exception raised when the CRUD request cannot be performed
+ for whatever reason that is not dependent on the client.
+ """
+ http_code = httpstatus.INTERNAL_SERVER_ERROR
diff --git a/remoteappmanager/rest/http/__init__.py b/remoteappmanager/rest/http/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/remoteappmanager/rest/httpstatus.py b/remoteappmanager/rest/http/httpstatus.py
similarity index 100%
rename from remoteappmanager/rest/httpstatus.py
rename to remoteappmanager/rest/http/httpstatus.py
diff --git a/remoteappmanager/rest/http/payloaded_http_error.py b/remoteappmanager/rest/http/payloaded_http_error.py
new file mode 100644
index 0000000..dd37b1e
--- /dev/null
+++ b/remoteappmanager/rest/http/payloaded_http_error.py
@@ -0,0 +1,36 @@
+from tornado.web import HTTPError
+
+
+class PayloadedHTTPError(HTTPError):
+ def __init__(self, status_code,
+ payload=None,
+ content_type=None,
+ log_message=None,
+ *args, **kwargs):
+ """Provides a HTTPError that contains a string payload to output
+ as a response. If the payload is None, behaves like a regular
+ HTTPError, producing no payload in the response.
+
+ Parameters
+ ----------
+ payload: str or None
+ The payload as a string
+ content_type: str or None
+ The content type of the payload
+ log_message: str or None
+ The log message. Passed to the HTTPError.
+ """
+ super().__init__(status_code, log_message, *args, **kwargs)
+
+ if payload is not None:
+ if not isinstance(payload, str):
+ raise ValueError("payload must be a string.")
+
+ if content_type is None:
+ content_type = "text/plain"
+ else:
+ if content_type is not None:
+ raise ValueError("Content type specified, but no payload")
+
+ self.content_type = content_type
+ self.payload = payload
diff --git a/remoteappmanager/rest/rest_handler.py b/remoteappmanager/rest/rest_handler.py
index 77a65aa..78a97c9 100644
--- a/remoteappmanager/rest/rest_handler.py
+++ b/remoteappmanager/rest/rest_handler.py
@@ -1,9 +1,10 @@
from remoteappmanager.handlers.base_handler import BaseHandler
-from tornado import gen, web, escape
-
+from remoteappmanager.rest import exceptions
+from remoteappmanager.rest.http import httpstatus
+from remoteappmanager.rest.http.payloaded_http_error import PayloadedHTTPError
from remoteappmanager.rest.registry import registry
from remoteappmanager.utils import url_path_join, with_end_slash
-from remoteappmanager.rest import httpstatus, exceptions
+from tornado import gen, web, escape
class RESTBaseHandler(BaseHandler):
@@ -25,6 +26,44 @@ class RESTBaseHandler(BaseHandler):
except KeyError:
raise web.HTTPError(httpstatus.NOT_FOUND)
+ def write_error(self, status_code, **kwargs):
+ """Provides appropriate payload to the response in case of error.
+ """
+ exc_info = kwargs.get("exc_info")
+
+ if exc_info is None:
+ self.clear_header('Content-Type')
+ self.finish()
+
+ exc = exc_info[1]
+
+ if isinstance(exc, PayloadedHTTPError) and exc.payload is not None:
+ self.set_header('Content-Type', exc.content_type)
+ self.finish(exc.payload)
+ else:
+ # For non-payloaded http errors or any other exception
+ # we don't want to return anything as payload.
+ # The error code is enough.
+ self.clear_header('Content-Type')
+ self.finish()
+
+ def rest_to_http_exception(self, rest_exc):
+ """Converts a REST exception into the appropriate HTTP one."""
+
+ representation = rest_exc.representation()
+ payload = None
+ content_type = None
+
+ if representation is not None:
+ payload = escape.json_encode(representation)
+ content_type = "application/json"
+
+ return PayloadedHTTPError(
+ status_code=rest_exc.http_code,
+ payload=payload,
+ content_type=content_type
+ )
+
class RESTCollectionHandler(RESTBaseHandler):
"""Handler for URLs addressing a collection.
@@ -38,7 +77,7 @@ class RESTCollectionHandler(RESTBaseHandler):
try:
items = yield res_handler.items()
except exceptions.RESTException as e:
- raise web.HTTPError(e.http_code)
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -67,7 +106,7 @@ class RESTCollectionHandler(RESTBaseHandler):
try:
resource_id = yield res_handler.create(data)
except exceptions.RESTException as e:
- raise web.HTTPError(e.http_code)
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -87,6 +126,7 @@ class RESTCollectionHandler(RESTBaseHandler):
self.set_status(httpstatus.CREATED)
self.set_header("Location", location)
+ self.clear_header('Content-Type')
self.flush()
@@ -104,7 +144,7 @@ class RESTResourceHandler(RESTBaseHandler):
try:
representation = yield res_handler.retrieve(identifier)
except exceptions.RESTException as e:
- raise web.HTTPError(e.http_code)
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -128,6 +168,8 @@ class RESTResourceHandler(RESTBaseHandler):
try:
exists = yield res_handler.exists(identifier)
+ except exceptions.RESTException as e:
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -156,7 +198,7 @@ class RESTResourceHandler(RESTBaseHandler):
try:
yield res_handler.update(identifier, representation)
except exceptions.RESTException as e:
- raise web.HTTPError(e.http_code)
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -166,6 +208,7 @@ class RESTResourceHandler(RESTBaseHandler):
identifier))
raise web.HTTPError(httpstatus.INTERNAL_SERVER_ERROR)
+ self.clear_header('Content-Type')
self.set_status(httpstatus.NO_CONTENT)
@web.authenticated
@@ -176,7 +219,7 @@ class RESTResourceHandler(RESTBaseHandler):
try:
yield res_handler.delete(identifier)
except exceptions.RESTException as e:
- raise web.HTTPError(e.http_code)
+ raise self.rest_to_http_exception(e)
except NotImplementedError:
raise web.HTTPError(httpstatus.METHOD_NOT_ALLOWED)
except Exception:
@@ -186,4 +229,5 @@ class RESTResourceHandler(RESTBaseHandler):
identifier))
raise web.HTTPError(httpstatus.INTERNAL_SERVER_ERROR)
+ self.clear_header('Content-Type')
self.set_status(httpstatus.NO_CONTENT)
diff --git a/remoteappmanager/restresources/container.py b/remoteappmanager/restresources/container.py
index 5ad389e..90299cb 100644
--- a/remoteappmanager/restresources/container.py
+++ b/remoteappmanager/restresources/container.py
@@ -3,10 +3,8 @@ from datetime import timedelta
from tornado import gen
-from remoteappmanager.docker.docker_labels import SIMPHONY_NS
from remoteappmanager.rest import exceptions
from remoteappmanager.rest.resource import Resource
-from remoteappmanager.docker.container import Container as DockerContainer
from remoteappmanager.utils import url_path_join
from remoteappmanager.netutils import wait_for_http_server_2xx
@@ -17,11 +15,13 @@ class Container(Resource):
"""Create the container.
The representation should accept the application mapping id we
want to start"""
- mapping_id = representation["mapping_id"]
+ try:
+ mapping_id = representation["mapping_id"]
+ except KeyError:
+ raise exceptions.BadRequest(message="missing mapping_id")
account = self.current_user.account
all_apps = self.application.db.get_apps_for_user(account)
- container_manager = self.application.container_manager
choice = [(m_id, app, policy)
for m_id, app, policy in all_apps
@@ -30,10 +30,9 @@ class Container(Resource):
if not choice:
self.log.warning("Could not find resource "
"for mapping id {}".format(mapping_id))
- raise exceptions.BadRequest()
+ raise exceptions.BadRequest(message="unrecognized mapping_id")
_, app, policy = choice[0]
- container = None
try:
container = yield self._start_container(
@@ -41,33 +40,33 @@ class Container(Resource):
app,
policy,
mapping_id)
- yield self._wait_for_container_ready(container)
except Exception as e:
- if container is not None:
- try:
- yield container_manager.stop_and_remove_container(
- container.docker_id)
- except Exception:
- self.log.exception(
- "Unable to stop container {} after "
- " failure to obtain a ready "
- "container".format(
- container.docker_id))
-
- raise exceptions.InternalServerError()
+ raise exceptions.Unable(message=str(e))
+
+ try:
+ yield self._wait_for_container_ready(container)
+ except Exception as e:
+ self._remove_container_noexcept(container)
+ raise exceptions.Unable(message=str(e))
urlpath = url_path_join(
self.application.command_line_config.base_urlpath,
container.urlpath)
- yield self.application.reverse_proxy.register(urlpath,
- container.host_url)
+
+ try:
+ yield self.application.reverse_proxy.register(
+ urlpath, container.host_url)
+ except Exception as e:
+ self._remove_container_noexcept(container)
+ raise exceptions.Unable(message=str(e))
return container.url_id
@gen.coroutine
def retrieve(self, identifier):
"""Return the representation of the running container."""
- container = yield self._container_from_url_id(identifier)
+ container_manager = self.application.container_manager
+ container = yield container_manager.container_from_url_id(identifier)
if container is None:
self.log.warning("Could not find container for id {}".format(
@@ -82,7 +81,9 @@ class Container(Resource):
@gen.coroutine
def delete(self, identifier):
"""Stop the container."""
- container = yield self._container_from_url_id(identifier)
+ container_manager = self.application.container_manager
+ container = yield container_manager.container_from_url_id(identifier)
+
if not container:
self.log.warning("Could not find container for id {}".format(
identifier))
@@ -91,9 +92,22 @@ class Container(Resource):
urlpath = url_path_join(
self.application.command_line_config.base_urlpath,
container.urlpath)
- yield self.application.reverse_proxy.unregister(urlpath)
- yield self.application.container_manager.stop_and_remove_container(
- container.docker_id)
+
+ try:
+ yield self.application.reverse_proxy.unregister(urlpath)
+ except Exception:
+ # If we can't remove the reverse proxy, we cannot do much more
+ # than log the problem and keep going, because we want to stop
+ # the container regardless.
+ self.log.exception("Could not remove reverse "
+ "proxy for id {}".format(identifier))
+
+ try:
+ yield container_manager.stop_and_remove_container(
+ container.docker_id)
+ except Exception:
+ self.log.exception("Could not stop and remove container "
+ "for id {}".format(identifier))
@gen.coroutine
def items(self):
@@ -125,24 +139,26 @@ class Container(Resource):
return running_containers
- @gen.coroutine
- def _container_from_url_id(self, container_url_id):
- """Retrieves and returns the container if valid and present.
+ ##################
+ # Private
- If not present, returns None
- """
+ @gen.coroutine
+ def _remove_container_noexcept(self, container):
+ """Removes container and silences (but logs) all exceptions
+ during this circumstance."""
+ # Note, can't use a context manager to perform this, because
+ # context managers are only allowed to yield once
container_manager = self.application.container_manager
-
- container_dict = yield container_manager.docker_client.containers(
- filters={'label': "{}={}".format(
- SIMPHONY_NS+"url_id",
- container_url_id)})
-
- if not container_dict:
- return None
-
- return DockerContainer.from_docker_dict(container_dict[0])
+ try:
+ yield container_manager.stop_and_remove_container(
+ container.docker_id)
+ except Exception:
+ self.log.exception(
+ "Unable to stop container {} after "
+ " failure to obtain a ready "
+ "container".format(
+ container.docker_id))
@gen.coroutine
def _start_container(self, user_name, app, policy, mapping_id):
diff --git a/remoteappmanager/static/js/remoteappapi.js b/remoteappmanager/static/js/remoteappapi.js
index 3de176a..f7fdb51 100644
--- a/remoteappmanager/static/js/remoteappapi.js
+++ b/remoteappmanager/static/js/remoteappapi.js
@@ -9,7 +9,7 @@ define(['jquery', 'utils'], function ($, utils) {
type: 'GET',
contentType: "application/json",
cache: false,
- dataType : "json",
+ dataType : null,
processData: false,
success: null,
error: null
@@ -46,7 +46,6 @@ define(['jquery', 'utils'], function ($, utils) {
options = options || {};
options = update(options, {
type: 'POST',
- dataType: null,
data: JSON.stringify({
mapping_id: id
})});
@@ -58,7 +57,7 @@ define(['jquery', 'utils'], function ($, utils) {
RemoteAppAPI.prototype.stop_application = function (id, options) {
options = options || {};
- options = update(options, {type: 'DELETE', dataType: null});
+ options = update(options, {type: 'DELETE'});
this.api_request(
utils.url_path_join('containers', id),
options
| Introduce REST error payload delivery
The current design of the REST handler does not deliver error payload.
This is needed now because we want to report REST exceptions with more detail.
| simphony/simphony-remote | diff --git a/tests/rest/http/__init__.py b/tests/rest/http/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/rest/http/test_payloaded_http_error.py b/tests/rest/http/test_payloaded_http_error.py
new file mode 100644
index 0000000..94e4dfa
--- /dev/null
+++ b/tests/rest/http/test_payloaded_http_error.py
@@ -0,0 +1,27 @@
+import unittest
+
+from remoteappmanager.rest.http.payloaded_http_error import PayloadedHTTPError
+
+
+class TestPayloadedHTTPError(unittest.TestCase):
+ def test_init(self):
+ payloaded = PayloadedHTTPError(500, payload=None)
+ self.assertEqual(payloaded.payload, None)
+ self.assertEqual(payloaded.content_type, None)
+
+ with self.assertRaises(ValueError):
+ PayloadedHTTPError(500, payload=123)
+
+ with self.assertRaises(ValueError):
+ PayloadedHTTPError(500, content_type="text/plain")
+
+ payloaded = PayloadedHTTPError(500,
+ payload="hello",
+ content_type="text/html")
+
+ self.assertEqual(payloaded.payload, "hello")
+ self.assertEqual(payloaded.content_type, "text/html")
+
+ payloaded = PayloadedHTTPError(500, payload="hello")
+ self.assertEqual(payloaded.content_type, "text/plain")
+ self.assertEqual(payloaded.status_code, 500)
diff --git a/tests/rest/test_rest.py b/tests/rest/test_rest.py
index db15d58..5e45b51 100644
--- a/tests/rest/test_rest.py
+++ b/tests/rest/test_rest.py
@@ -1,18 +1,17 @@
import unittest
import urllib.parse
-from unittest import mock
-
-from tests import utils
-from tornado import web, gen, escape
from collections import OrderedDict
+from unittest import mock
from remoteappmanager import rest
+from remoteappmanager.rest import registry, exceptions
+from remoteappmanager.rest.http import httpstatus
from remoteappmanager.rest.resource import Resource
from remoteappmanager.rest.rest_handler import RESTResourceHandler, \
RESTCollectionHandler
-from remoteappmanager.rest import registry, httpstatus, exceptions
-
+from tests import utils
from tests.utils import AsyncHTTPTestCase
+from tornado import web, gen, escape
def prepare_side_effect(*args, **kwargs):
@@ -50,7 +49,7 @@ class Student(Resource):
@gen.coroutine
def delete(self, identifier):
if identifier not in self.collection:
- raise exceptions.NotFound
+ raise exceptions.NotFound()
del self.collection[identifier]
@@ -66,11 +65,19 @@ class UnsupportAll(Resource):
class Unprocessable(Resource):
@gen.coroutine
def create(self, representation):
- raise exceptions.BadRequest()
+ raise exceptions.BadRequest("unprocessable", foo="bar")
@gen.coroutine
def update(self, identifier, representation):
- raise exceptions.BadRequest()
+ raise exceptions.BadRequest("unprocessable", foo="bar")
+
+ @gen.coroutine
+ def retrieve(self, identifier):
+ raise exceptions.BadRequest("unprocessable", foo="bar")
+
+ @gen.coroutine
+ def items(self):
+ raise exceptions.BadRequest("unprocessable", foo="bar")
class UnsupportsCollection(Resource):
@@ -183,6 +190,7 @@ class TestREST(AsyncHTTPTestCase):
res = self.fetch("/api/v1/students/1/")
self.assertEqual(res.code, httpstatus.NOT_FOUND)
+ self.assertNotIn("Content-Type", res.headers)
def test_post_on_resource(self):
with mock.patch("remoteappmanager.handlers.base_handler.BaseHandler"
@@ -349,6 +357,24 @@ class TestREST(AsyncHTTPTestCase):
body="{}"
)
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(res.headers["Content-Type"], 'application/json')
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "BadRequest",
+ "message": "unprocessable",
+ "foo": "bar",
+ })
+
+ res = self.fetch(
+ "/api/v1/unprocessables/",
+ method="GET",
+ )
+ self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(res.headers["Content-Type"], 'application/json')
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "BadRequest",
+ "message": "unprocessable",
+ "foo": "bar",
+ })
res = self.fetch(
"/api/v1/unprocessables/0/",
@@ -356,6 +382,37 @@ class TestREST(AsyncHTTPTestCase):
body="{}"
)
self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(res.headers["Content-Type"], 'application/json')
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "BadRequest",
+ "message": "unprocessable",
+ "foo": "bar",
+ })
+
+ res = self.fetch(
+ "/api/v1/unprocessables/0/",
+ method="GET",
+ )
+ self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(res.headers["Content-Type"], 'application/json')
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "BadRequest",
+ "message": "unprocessable",
+ "foo": "bar",
+ })
+
+ res = self.fetch(
+ "/api/v1/unprocessables/0/",
+ method="POST",
+ body="{}"
+ )
+ self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(res.headers["Content-Type"], 'application/json')
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "BadRequest",
+ "message": "unprocessable",
+ "foo": "bar",
+ })
def test_broken(self):
collection_url = "/api/v1/brokens/"
diff --git a/tests/restmodel/test_application.py b/tests/restmodel/test_application.py
index a08a816..a4fd9de 100644
--- a/tests/restmodel/test_application.py
+++ b/tests/restmodel/test_application.py
@@ -1,13 +1,12 @@
from unittest.mock import Mock, patch
+from remoteappmanager import rest
+from remoteappmanager.rest import registry
+from remoteappmanager.rest.http import httpstatus
from remoteappmanager.restresources import Application
from tests import utils
-from tornado import web, escape
-
-from remoteappmanager import rest
-from remoteappmanager.rest import registry, httpstatus
-
from tests.utils import AsyncHTTPTestCase
+from tornado import web, escape
class TestApplication(AsyncHTTPTestCase):
diff --git a/tests/restmodel/test_container.py b/tests/restmodel/test_container.py
index e125b13..97d2bcc 100644
--- a/tests/restmodel/test_container.py
+++ b/tests/restmodel/test_container.py
@@ -1,115 +1,281 @@
-from unittest.mock import Mock, patch
-
-from tornado import escape
-
-from remoteappmanager.rest import httpstatus
+import os
+from unittest.mock import patch
+from remoteappmanager.docker.image import Image
+from remoteappmanager.rest.http import httpstatus
+from remoteappmanager.docker.container import Container as DockerContainer
+from tests.mocking import dummy
+from tests.temp_mixin import TempMixin
from tests.utils import (AsyncHTTPTestCase, mock_coro_factory,
mock_coro_new_callable)
-from tests.mocking import dummy
-from tests.mocking.virtual.docker_client import create_docker_client
+from tornado import escape
-class TestContainer(AsyncHTTPTestCase):
+class TestContainer(TempMixin, AsyncHTTPTestCase):
def setUp(self):
- super().setUp()
+ self._old_proxy_api_token = os.environ.get("PROXY_API_TOKEN", None)
+ os.environ["PROXY_API_TOKEN"] = "dummy_token"
+
+ def cleanup():
+ if self._old_proxy_api_token is not None:
+ os.environ["PROXY_API_TOKEN"] = self._old_proxy_api_token
+ else:
+ del os.environ["PROXY_API_TOKEN"]
- def prepare_side_effect(*args, **kwargs):
- user = Mock()
- user.name = 'user_name'
- args[0].current_user = user
+ self.addCleanup(cleanup)
- self.mock_prepare = mock_coro_new_callable(
- side_effect=prepare_side_effect)
+ super().setUp()
def get_app(self):
- command_line_config = dummy.basic_command_line_config()
- command_line_config.base_urlpath = '/'
- return dummy.create_application(command_line_config)
+ app = dummy.create_application()
+ app.hub.verify_token.return_value = {
+ 'pending': None,
+ 'name': app.settings['user'],
+ 'admin': False,
+ 'server': app.settings['base_urlpath']}
+ return app
def test_items(self):
- with patch("remoteappmanager.handlers.base_handler.BaseHandler.prepare", # noqa
- new_callable=self.mock_prepare):
- res = self.fetch("/api/v1/containers/")
-
- self.assertEqual(res.code, httpstatus.OK)
- self.assertEqual(escape.json_decode(res.body),
- {"items": ['url_id']})
-
- # We have another container running
- self._app.container_manager.docker_client._sync_client = (
- create_docker_client(
- container_ids=('container_id1',),
- container_labels=(
- {'eu.simphony-project.docker.user': 'user_name',
- 'eu.simphony-project.docker.mapping_id': 'mapping_id',
- 'eu.simphony-project.docker.url_id': 'url_id1234'},)))
-
- res = self.fetch("/api/v1/containers/")
- self.assertEqual(res.code, httpstatus.OK)
- self.assertEqual(escape.json_decode(res.body),
- {"items": ["url_id1234"]})
+ manager = self._app.container_manager
+ manager.image = mock_coro_factory(Image())
+ manager.containers_from_mapping_id = mock_coro_factory(
+ [DockerContainer()])
+
+ res = self.fetch(
+ "/user/username/api/v1/containers/",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ )
+
+ self.assertEqual(res.code, httpstatus.OK)
+
+ self.assertEqual(escape.json_decode(res.body),
+ {"items": ["", ""]})
def test_create(self):
- with patch("remoteappmanager.handlers.base_handler.BaseHandler.prepare", # noqa
- new_callable=self.mock_prepare), \
- patch("remoteappmanager.restresources.container.wait_for_http_server_2xx", # noqa
- new_callable=mock_coro_factory), \
- patch("remoteappmanager.docker.container_manager._generate_container_url_id", # noqa
- return_value="12345678"):
+ with patch("remoteappmanager"
+ ".restresources"
+ ".container"
+ ".wait_for_http_server_2xx",
+ new_callable=mock_coro_new_callable()):
+ manager = self._app.container_manager
+ manager.start_container = mock_coro_factory(DockerContainer(
+ url_id="3456"
+ ))
res = self.fetch(
- "/api/v1/containers/",
+ "/user/username/api/v1/containers/",
method="POST",
- body=escape.json_encode({'mapping_id': 'mapping_id'}))
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode(dict(
+ mapping_id="mapping_id"
+ )))
self.assertEqual(res.code, httpstatus.CREATED)
# The port is random due to testing env. Check if it's absolute
self.assertIn("http://", res.headers["Location"])
- self.assertIn("/api/v1/containers/12345678",
- res.headers["Location"])
+ self.assertIn("/api/v1/containers/3456/", res.headers["Location"])
def test_create_fails(self):
- with patch("remoteappmanager.handlers.base_handler.BaseHandler.prepare", # noqa
- new_callable=self.mock_prepare), \
- patch("remoteappmanager.restresources.container.wait_for_http_server_2xx", # noqa
- new_callable=mock_coro_new_callable(
- side_effect=TimeoutError())):
+ with patch("remoteappmanager"
+ ".restresources"
+ ".container"
+ ".wait_for_http_server_2xx",
+ new_callable=mock_coro_new_callable(
+ side_effect=TimeoutError("timeout"))):
+ self._app.container_manager.stop_and_remove_container = \
+ mock_coro_factory()
res = self.fetch(
- "/api/v1/containers/",
+ "/user/username/api/v1/containers/",
method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
body=escape.json_encode(dict(
mapping_id="mapping_id"
)))
self.assertEqual(res.code, httpstatus.INTERNAL_SERVER_ERROR)
- client = self._app.container_manager.docker_client._sync_client
- self.assertTrue(client.stop.called)
- self.assertTrue(client.remove_container.called)
+ self.assertTrue(
+ self._app.container_manager.stop_and_remove_container.called)
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "Unable",
+ "message": "timeout"})
- def test_retrieve(self):
- with patch("remoteappmanager.handlers.base_handler.BaseHandler.prepare", # noqa
- new_callable=self.mock_prepare):
+ def test_create_fails_for_reverse_proxy_failure(self):
+ with patch("remoteappmanager"
+ ".restresources"
+ ".container"
+ ".wait_for_http_server_2xx",
+ new_callable=mock_coro_new_callable()):
+
+ self._app.container_manager.stop_and_remove_container = \
+ mock_coro_factory()
+ self._app.reverse_proxy.register = mock_coro_factory(
+ side_effect=Exception("Boom!"))
+
+ res = self.fetch(
+ "/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode(dict(
+ mapping_id="mapping_id"
+ )))
- res = self.fetch("/api/v1/containers/notfound/")
- self.assertEqual(res.code, httpstatus.NOT_FOUND)
+ self.assertEqual(res.code, httpstatus.INTERNAL_SERVER_ERROR)
+ self.assertTrue(
+ self._app.container_manager.stop_and_remove_container.called)
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "Unable",
+ "message": "Boom!"})
- res = self.fetch("/api/v1/containers/url_id/")
- self.assertEqual(res.code, httpstatus.OK)
+ def test_create_fails_for_start_container_failure(self):
+ with patch("remoteappmanager"
+ ".restresources"
+ ".container"
+ ".wait_for_http_server_2xx",
+ new_callable=mock_coro_new_callable()):
- content = escape.json_decode(res.body)
- self.assertEqual(content["image_name"], "image_name1")
- self.assertEqual(content["name"],
- "/remoteexec-username-mapping_5Fid")
+ self._app.container_manager.stop_and_remove_container = \
+ mock_coro_factory()
+ self._app.container_manager.start_container = mock_coro_factory(
+ side_effect=Exception("Boom!"))
+
+ res = self.fetch(
+ "/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode(dict(
+ mapping_id="mapping_id"
+ )))
+
+ self.assertEqual(res.code, httpstatus.INTERNAL_SERVER_ERROR)
+ self.assertEqual(escape.json_decode(res.body), {
+ "type": "Unable",
+ "message": "Boom!"})
+
+ def test_create_fails_for_missing_mapping_id(self):
+ res = self.fetch(
+ "/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode(dict(
+ whatever="123"
+ )))
+
+ self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(escape.json_decode(res.body),
+ {"type": "BadRequest",
+ "message": "missing mapping_id"})
+
+ def test_create_fails_for_invalid_mapping_id(self):
+ res = self.fetch(
+ "/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode(dict(
+ mapping_id="whatever"
+ )))
+
+ self.assertEqual(res.code, httpstatus.BAD_REQUEST)
+ self.assertEqual(escape.json_decode(res.body),
+ {"type": "BadRequest",
+ "message": "unrecognized mapping_id"})
+
+ def test_retrieve(self):
+ self._app.container_manager.container_from_url_id = mock_coro_factory(
+ DockerContainer()
+ )
+ res = self.fetch("/user/username/api/v1/containers/found/",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ })
+ self.assertEqual(res.code, httpstatus.OK)
+
+ content = escape.json_decode(res.body)
+ self.assertEqual(content["image_name"], "")
+ self.assertEqual(content["name"], "")
+
+ self._app.container_manager.container_from_url_id = \
+ mock_coro_factory(return_value=None)
+ res = self.fetch("/user/username/api/v1/containers/notfound/",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ })
+ self.assertEqual(res.code, httpstatus.NOT_FOUND)
def test_delete(self):
- with patch("remoteappmanager.handlers.base_handler.BaseHandler.prepare", # noqa
- new_callable=self.mock_prepare):
+ self._app.container_manager.container_from_url_id = mock_coro_factory(
+ DockerContainer()
+ )
+ res = self.fetch("/user/username/api/v1/containers/found/",
+ method="DELETE",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ })
+ self.assertEqual(res.code, httpstatus.NO_CONTENT)
+
+ self._app.container_manager.container_from_url_id = \
+ mock_coro_factory(return_value=None)
+ res = self.fetch("/user/username/api/v1/containers/notfound/",
+ method="DELETE",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ })
+ self.assertEqual(res.code, httpstatus.NOT_FOUND)
+
+ def test_post_start(self):
+ with patch("remoteappmanager"
+ ".restresources"
+ ".container"
+ ".wait_for_http_server_2xx",
+ new_callable=mock_coro_factory):
+ self._app.container_manager.containers_from_mapping_id = \
+ mock_coro_factory(return_value=[DockerContainer()])
+
+ self.assertFalse(self._app.reverse_proxy.register.called)
+ self.fetch("/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode({"mapping_id": "mapping_id"}))
+
+ self.assertTrue(self._app.reverse_proxy.register.called)
+
+ def test_post_failed_auth(self):
+ self._app.hub.verify_token.return_value = {}
+
+ res = self.fetch("/user/username/api/v1/containers/",
+ method="POST",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ },
+ body=escape.json_encode({"mapping_id": "12345"}))
+
+ self.assertGreaterEqual(res.code, 400)
- res = self.fetch("/api/v1/containers/notfound/", method="DELETE")
- self.assertEqual(res.code, httpstatus.NOT_FOUND)
+ def test_stop(self):
+ self._app.container_manager.container_from_url_id = mock_coro_factory(
+ DockerContainer()
+ )
+ self.fetch("/user/username/api/v1/containers/12345/",
+ method="DELETE",
+ headers={
+ "Cookie": "jupyter-hub-token-username=foo"
+ })
- res = self.fetch("/api/v1/containers/url_id/", method="DELETE")
- self.assertEqual(res.code, httpstatus.NO_CONTENT)
+ self.assertTrue(self._app.reverse_proxy.unregister.called)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 7
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
coverage==7.8.0
cryptography==44.0.2
docker-py==1.10.6
docker-pycreds==0.4.0
escapism==1.0.1
exceptiongroup==1.2.2
flake8==7.2.0
fqdn==1.5.1
greenlet==3.1.1
h11==0.14.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyter_client==8.6.3
jupyter_core==5.7.2
jupyterhub==5.2.1
Mako==1.3.9
MarkupSafe==3.0.2
mccabe==0.7.0
oauthlib==3.2.2
outcome==1.3.0.post0
packaging==24.2
pamela==1.2.0
platformdirs==4.3.7
pluggy==1.5.0
prometheus_client==0.21.1
pycodestyle==2.13.0
pycparser==2.22
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
PySocks==1.7.1
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
pyzmq==26.3.0
referencing==0.36.2
-e git+https://github.com/simphony/simphony-remote.git@adbc9aa11254a094516cdba10234134ab2d71df1#egg=remoteappmanager
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
selenium==4.30.0
six==1.17.0
sniffio==1.3.1
sortedcontainers==2.4.0
SQLAlchemy==2.0.40
tabulate==0.9.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
trio==0.29.0
trio-websocket==0.12.2
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
websocket-client==1.8.0
wsproto==1.2.0
zipp==3.21.0
| name: simphony-remote
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- coverage==7.8.0
- cryptography==44.0.2
- docker-py==1.10.6
- docker-pycreds==0.4.0
- escapism==1.0.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- fqdn==1.5.1
- greenlet==3.1.1
- h11==0.14.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- mccabe==0.7.0
- oauthlib==3.2.2
- outcome==1.3.0.post0
- packaging==24.2
- pamela==1.2.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycodestyle==2.13.0
- pycparser==2.22
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pysocks==1.7.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- selenium==4.30.0
- six==1.17.0
- sniffio==1.3.1
- sortedcontainers==2.4.0
- sqlalchemy==2.0.40
- tabulate==0.9.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- trio==0.29.0
- trio-websocket==0.12.2
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- websocket-client==1.8.0
- wsproto==1.2.0
- zipp==3.21.0
prefix: /opt/conda/envs/simphony-remote
| [
"tests/rest/http/test_payloaded_http_error.py::TestPayloadedHTTPError::test_init",
"tests/rest/test_rest.py::TestREST::test_broken",
"tests/rest/test_rest.py::TestREST::test_create",
"tests/rest/test_rest.py::TestREST::test_delete",
"tests/rest/test_rest.py::TestREST::test_items",
"tests/rest/test_rest.py::TestREST::test_post_non_json",
"tests/rest/test_rest.py::TestREST::test_post_on_resource",
"tests/rest/test_rest.py::TestREST::test_retrieve",
"tests/rest/test_rest.py::TestREST::test_unexistent_resource_type",
"tests/rest/test_rest.py::TestREST::test_unprocessable",
"tests/rest/test_rest.py::TestREST::test_unsupported_methods",
"tests/rest/test_rest.py::TestREST::test_unsupports_collections",
"tests/rest/test_rest.py::TestREST::test_update",
"tests/rest/test_rest.py::TestRESTFunctions::test_api_handlers",
"tests/restmodel/test_application.py::TestApplication::test_items",
"tests/restmodel/test_application.py::TestApplication::test_retrieve"
]
| [
"tests/restmodel/test_container.py::TestContainer::test_create",
"tests/restmodel/test_container.py::TestContainer::test_create_fails",
"tests/restmodel/test_container.py::TestContainer::test_create_fails_for_invalid_mapping_id",
"tests/restmodel/test_container.py::TestContainer::test_create_fails_for_missing_mapping_id",
"tests/restmodel/test_container.py::TestContainer::test_create_fails_for_reverse_proxy_failure",
"tests/restmodel/test_container.py::TestContainer::test_create_fails_for_start_container_failure",
"tests/restmodel/test_container.py::TestContainer::test_delete",
"tests/restmodel/test_container.py::TestContainer::test_items",
"tests/restmodel/test_container.py::TestContainer::test_post_failed_auth",
"tests/restmodel/test_container.py::TestContainer::test_post_start",
"tests/restmodel/test_container.py::TestContainer::test_retrieve",
"tests/restmodel/test_container.py::TestContainer::test_stop"
]
| []
| []
| BSD 3-Clause "New" or "Revised" License | 670 | [
"doc/source/api/remoteappmanager.rst",
"doc/source/api/remoteappmanager.rest.http.rst",
"remoteappmanager/rest/http/payloaded_http_error.py",
"remoteappmanager/rest/rest_handler.py",
"doc/source/api/remoteappmanager.rest.rst",
"remoteappmanager/rest/httpstatus.py",
"remoteappmanager/rest/exceptions.py",
"remoteappmanager/restresources/container.py",
"remoteappmanager/rest/http/__init__.py",
"remoteappmanager/static/js/remoteappapi.js",
"doc/Makefile"
]
| [
"doc/source/api/remoteappmanager.rst",
"doc/source/api/remoteappmanager.rest.http.rst",
"remoteappmanager/rest/http/payloaded_http_error.py",
"remoteappmanager/rest/rest_handler.py",
"remoteappmanager/restresources/container.py",
"doc/source/api/remoteappmanager.rest.rst",
"remoteappmanager/static/js/remoteappapi.js",
"remoteappmanager/rest/exceptions.py",
"remoteappmanager/rest/http/httpstatus.py",
"remoteappmanager/rest/http/__init__.py",
"doc/Makefile"
]
|
|
glyph__txsni-10 | e8a243a05f0f6eb81e06098fb18f35c11b7cdc3e | 2016-07-29 16:55:46 | e8a243a05f0f6eb81e06098fb18f35c11b7cdc3e | Lukasa: It's also entirely possible that I'm an idiot and that this code is overly complex. Who knows? There aren't any tests, as well, so that makes me nervous.
codecov-io: ## [Current coverage](https://codecov.io/gh/glyph/txsni/pull/10?src=pr) is 39.41% (diff: 37.97%)
> No coverage report found for **master** at 005403a.
> Powered by [Codecov](https://codecov.io?src=pr). Last update [005403a...939c817](https://codecov.io/gh/glyph/txsni/compare/005403abb6b5e53643dae922b88e385706f46275...939c817dc8e319b99a6a78da13aff3a2b2bf2053?src=pr)
glyph: @Lukasa Looks like codecov disagrees (did it just break)
glyph: Oh okay there are actually no tests.
@Lukasa - This actually looks like a fine direction, but it needs some tweaking:
1. `SNIMap.getContext` is dead code now, and can be removed
2. It should no longer inherit from `ContextFactory`
3. Similarly we should be calling `serverConnectionForTLS` to get the `Connection` rather than instantiating it ourselves; we can retrieve its `context` via `Connection.get_context` after the fact.
4. write an test
Also we should probably check in with @mithrandi before doing a release
mithrandi: txacme + h2 seems to work just fine with this branch:
```
* issuer: CN=Fake LE Intermediate X1
* ALPN, server accepted to use h2
* Using HTTP2, server supports multi-use
* Connection state changed (HTTP/2 confirmed)
```
Anything else you need from me? :)
glyph: @mithrandi Huh. Good to know! I am wondering if we need to leave `getContext` in place though; does `txacme` touch it at that level?
mithrandi: Nope; the only "touching" that happens is passing a wrapped host mapping to `SNIMap`.
mithrandi: Basically the only thing `txacme` does with `SNIMap` is `TLSMemoryBIOFactory(contextFactory=SNIMap(...))` so it doesn't care about the implementation details at all.
glyph: Fantastic, then the cleanup shouldn't matter.
mithrandi: Could someone put "fixes #10" in the description of this PR? :)
glyph: @mithrandi I stuffed some words into @Lukasa's mouth on the first comment there, hopefully that will do
Lukasa: Ok, assuming I'm not caught up with family stuff this weekend imma have a swing at @glyph's changes for this PR.
Lukasa: Ok I've done the first two. The third one I can't do (@glyph incorrectly believed that `CertificateOptions` implemented `IOpenSSLServerConnectionCreator`, but it doesn't). The fourth one is coming.
Lukasa: Ok, I added tests.
I'm not really happy about this. There are a lot of things I can't easily test in this manner: for example, TxSNI's fallback to use the DEFAULT.pem file is hard to test with Twisted's endpoints because they don't actually let you omit SNI.
Anyway, these tests include a basic bit of "does this legitimately work at all" stuff, including for protocol negotiation. So that is something.
Lukasa: @glyph, you monster! The tests against Twisted 13.2 blow up because IOpenSSLConnectionCreator doesn't exist. So...why did I add it? ;)
mithrandi: @Lukasa As the one who picked "Twisted 13.2" as the minimum version to test on, I can tell you that the choice was essentially arbitrary; let's just bump the minimum version to whatever we need to support this?
Lukasa: Twisted 14.0, I think, then.
Lukasa: So on the coverage front, the thing we seem to be missing is the ALPN stuff. This isn't a surprise: it turns out ubuntu sucks and doesn't have a recent OpenSSL.
glyph: 14.0 is the minimum acceptable version of Twisted for *anything* in production. Before that we don't even verify certs.
Lukasa: Bump for @glyph.
glyph: @Lukasa If I understand the question for me, it is: Travis-CI isn't covering the NPN stuff in this PR because the base image that Travis is testing with has an OpenSSL that's too old?
My solution to that would be: could you submit a separate PR that just bumps the OpenSSL version, either by building one in the travis config, or by selecting a more recent OS to run on travis (perhaps with their "docker" support, I dunno) and then we can land that first? | diff --git a/.travis.yml b/.travis.yml
index 0db1bb1..229d059 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,10 +12,10 @@ env:
- TOXENV=py27-twlatest
- TOXENV=pypy-twlatest OPENSSL_VERSION=1.0.2h
- TOXENV=pypy-twlatest
- - TOXENV=py27-tw132 OPENSSL_VERSION=1.0.2h
- - TOXENV=py27-tw132
- - TOXENV=pypy-tw132 OPENSSL_VERSION=1.0.2h
- - TOXENV=pypy-tw132
+ - TOXENV=py27-tw14 OPENSSL_VERSION=1.0.2h
+ - TOXENV=py27-tw14
+ - TOXENV=pypy-tw14 OPENSSL_VERSION=1.0.2h
+ - TOXENV=pypy-tw14
- TOXENV=py27-twtrunk OPENSSL_VERSION=1.0.2h
- TOXENV=py27-twtrunk
- TOXENV=pypy-twtrunk OPENSSL_VERSION=1.0.2h
diff --git a/setup.py b/setup.py
index d6dd2e2..dd239b4 100644
--- a/setup.py
+++ b/setup.py
@@ -13,10 +13,12 @@ setup(
description="easy-to-use SNI endpoint for twisted",
packages=[
"txsni",
+ "txsni.test",
+ "txsni.test.certs",
"twisted.plugins",
],
install_requires=[
- "Twisted[tls]>=13.2",
+ "Twisted[tls]>=14.0",
"pyOpenSSL>=0.14",
],
version="0.1.6",
diff --git a/tox.ini b/tox.ini
index 0981094..a55d144 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = coverage-clean,{py27,pypy}-{twtrunk,twlatest,tw132},coverage-report
+envlist = coverage-clean,{py27,pypy}-{twtrunk,twlatest,tw14},coverage-report
[testenv:coverage-clean]
deps = coverage
@@ -19,10 +19,11 @@ whitelist_externals =
deps =
twlatest: Twisted[tls]
twtrunk: https://github.com/twisted/twisted/archive/trunk.zip#egg=Twisted[tls]
- tw132: Twisted[tls]==13.2
+ tw132: Twisted[tls]==14.0
coverage
+ cryptography
commands =
pip list
mkdir -p {envtmpdir}
coverage run --parallel-mode \
- {envdir}/bin/trial --temp-directory={envtmpdir}/_trial_temp {posargs:txsni}
+ -m twisted.trial --temp-directory={envtmpdir}/_trial_temp {posargs:txsni}
diff --git a/txsni/snimap.py b/txsni/snimap.py
index b436aa7..ea75d23 100644
--- a/txsni/snimap.py
+++ b/txsni/snimap.py
@@ -1,12 +1,125 @@
+import collections
+
+from zope.interface import implementer
+
+from OpenSSL.SSL import Connection
+
+from twisted.internet.interfaces import IOpenSSLServerConnectionCreator
from twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
-class SNIMap(ContextFactory, object):
+
+class _NegotiationData(object):
+ """
+ A container for the negotiation data.
+ """
+ __slots__ = [
+ 'npnAdvertiseCallback',
+ 'npnSelectCallback',
+ 'alpnSelectCallback',
+ 'alpnProtocols'
+ ]
+
+ def __init__(self):
+ self.npnAdvertiseCallback = None
+ self.npnSelectCallback = None
+ self.alpnSelectCallback = None
+ self.alpnProtocols = None
+
+ def negotiateNPN(self, context):
+ if self.npnAdvertiseCallback is None or self.npnSelectCallback is None:
+ return
+
+ context.set_npn_advertise_callback(self.npnAdvertiseCallback)
+ context.set_npn_select_callback(self.npnSelectCallback)
+
+ def negotiateALPN(self, context):
+ if self.alpnSelectCallback is None or self.alpnProtocols is None:
+ return
+
+ context.set_alpn_select_callback(self.alpnSelectCallback)
+ context.set_alpn_protos(self.alpnProtocols)
+
+
+class _ConnectionProxy(object):
+ """
+ A basic proxy for an OpenSSL Connection object that returns a ContextProxy
+ wrapping the actual OpenSSL Context whenever it's asked for.
+ """
+ def __init__(self, original, factory):
+ self._obj = original
+ self._factory = factory
+
+ def get_context(self):
+ """
+ A basic override of get_context to ensure that the appropriate proxy
+ object is returned.
+ """
+ ctx = self._obj.get_context()
+ return _ContextProxy(ctx, self._factory)
+
+ def __getattr__(self, attr):
+ return getattr(self._obj, attr)
+
+ def __setattr__(self, attr, val):
+ if attr in ('_obj', '_factory'):
+ self.__dict__[attr] = val
+
+ return setattr(self._obj, attr, val)
+
+ def __delattr__(self, attr):
+ return delattr(self._obj, attr)
+
+
+class _ContextProxy(object):
+ """
+ A basic proxy object for the OpenSSL Context object that records the
+ values of the NPN/ALPN callbacks, to ensure that they get set appropriately
+ if a context is swapped out during connection setup.
+ """
+ def __init__(self, original, factory):
+ self._obj = original
+ self._factory = factory
+
+ def set_npn_advertise_callback(self, cb):
+ self._factory._npnAdvertiseCallbackForContext(self._obj, cb)
+ return self._obj.set_npn_advertise_callback(cb)
+
+ def set_npn_select_callback(self, cb):
+ self._factory._npnSelectCallbackForContext(self._obj, cb)
+ return self._obj.set_npn_select_callback(cb)
+
+ def set_alpn_select_callback(self, cb):
+ self._factory._alpnSelectCallbackForContext(self._obj, cb)
+ return self._obj.set_alpn_select_callback(cb)
+
+ def set_alpn_protos(self, protocols):
+ self._factory._alpnProtocolsForContext(self._obj, protocols)
+ return self._obj.set_alpn_protos(protocols)
+
+ def __getattr__(self, attr):
+ return getattr(self._obj, attr)
+
+ def __setattr__(self, attr, val):
+ if attr in ('_obj', '_factory'):
+ self.__dict__[attr] = val
+
+ return setattr(self._obj, attr, val)
+
+ def __delattr__(self, attr):
+ return delattr(self._obj, attr)
+
+
+@implementer(IOpenSSLServerConnectionCreator)
+class SNIMap(object):
def __init__(self, mapping):
self.mapping = mapping
+ self._negotiationDataForContext = collections.defaultdict(
+ _NegotiationData
+ )
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
@@ -15,15 +128,42 @@ class SNIMap(ContextFactory, object):
self.selectContext
)
- def getContext(self):
- return self.context
-
def selectContext(self, connection):
- connection.set_context(
- self.mapping[connection.get_servername()]
- .getContext()
+ oldContext = connection.get_context()
+ newContext = self.mapping[connection.get_servername()].getContext()
+
+ negotiationData = self._negotiationDataForContext[oldContext]
+ negotiationData.negotiateNPN(newContext)
+ negotiationData.negotiateALPN(newContext)
+
+ connection.set_context(newContext)
+
+ def serverConnectionForTLS(self, protocol):
+ """
+ Construct an OpenSSL server connection.
+
+ @param protocol: The protocol initiating a TLS connection.
+ @type protocol: L{TLSMemoryBIOProtocol}
+
+ @return: a connection
+ @rtype: L{OpenSSL.SSL.Connection}
+ """
+ conn = Connection(self.context, None)
+ return _ConnectionProxy(conn, self)
+
+ def _npnAdvertiseCallbackForContext(self, context, callback):
+ self._negotiationDataForContext[context].npnAdvertiseCallback = (
+ callback
)
+ def _npnSelectCallbackForContext(self, context, callback):
+ self._negotiationDataForContext[context].npnSelectCallback = callback
+
+ def _alpnSelectCallbackForContext(self, context, callback):
+ self._negotiationDataForContext[context].alpnSelectCallback = callback
+
+ def _alpnProtocolsForContext(self, context, protocols):
+ self._negotiationDataForContext[context].alpnProtocols = protocols
class HostDirectoryMap(object):
| txsni breaks protocol negotiation.
In Twisted 16.3 we added `IProtocolNegotiationFactory`, which `TLSMemoryBIOProtocol` uses to determine what protocols (if any) should be negotiated via ALPN/NPN. `TLSMemoryBIOProtocol` does this when `_createConnection` fires, potentially setting callbacks and data on the context returned by calling `.getContext()` on the the connection object from `serverConnectionForTLS`.
Unfortunately, txsni potentially swaps out the TLS contexts midway through the TLS handshake. This means that the callbacks and data that `TLSMemoryBIOProtocol` sets are only ever set on the *default* TLS context: the others, potentially swapped in later in the connection process, do not have this data provided to them.
This means that, in practice, txsni and protocol negotiation do not work together. | glyph/txsni | diff --git a/txsni/test/__init__.py b/txsni/test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/txsni/test/certs/__init__.py b/txsni/test/certs/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/txsni/test/certs/cert_builder.py b/txsni/test/certs/cert_builder.py
new file mode 100644
index 0000000..3e826f7
--- /dev/null
+++ b/txsni/test/certs/cert_builder.py
@@ -0,0 +1,175 @@
+from __future__ import print_function, absolute_import
+
+from cryptography import x509
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+from cryptography.x509.oid import NameOID
+import datetime
+import uuid
+import os
+import tempfile
+
+
+ONE_DAY = datetime.timedelta(1, 0, 0)
+THIRTYISH_YEARS = datetime.timedelta(30 * 365, 0, 0)
+TENISH_YEARS = datetime.timedelta(10 * 365, 0, 0)
+
+
+# Various exportable constants that the tests can (and should!) use.
+CERT_DIR = tempfile.mkdtemp()
+ROOT_CERT_PATH = os.path.join(CERT_DIR, 'root_cert.pem')
+ROOT_KEY_PATH = os.path.join(CERT_DIR, 'root_cert.key')
+DEFAULT_CERT_PATH = os.path.join(CERT_DIR, 'DEFAULT.pem')
+DEFAULT_KEY_PATH = os.path.join(CERT_DIR, 'DEFAULT.key')
+HTTP2BIN_CERT_PATH = os.path.join(CERT_DIR, 'http2bin.org.pem')
+HTTP2BIN_KEY_PATH = os.path.join(CERT_DIR, 'http2bin.org.key')
+
+
+# A list of tuples that controls what certs get built and signed by the root.
+# Each tuple is (hostname, cert_path)
+# We'll probably never need the easy extensibility this provides, but hey, nvm!
+_CERTS = [
+ (u'localhost', DEFAULT_CERT_PATH),
+ (u'http2bin.org', HTTP2BIN_CERT_PATH),
+]
+
+
+def _build_root_cert():
+ """
+ Builds a single root certificate that can be used to sign the others. This
+ root cert is basically pretty legit, except for being totally bonkers.
+ Returns a tuple of (certificate, key) for the CA, which can be used to
+ build the leaves.
+ """
+ if os.path.isfile(ROOT_CERT_PATH) and os.path.isfile(ROOT_KEY_PATH):
+ print("Root already exists, not regenerating.")
+ with open(ROOT_CERT_PATH, 'rb') as f:
+ certificate = x509.load_pem_x509_certificate(
+ f.read(), default_backend()
+ )
+ with open(ROOT_KEY_PATH, 'rb') as f:
+ key = serialization.load_pem_private_key(
+ f.read(), password=None, backend=default_backend()
+ )
+ return certificate, key
+
+ private_key = rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=2048,
+ backend=default_backend()
+ )
+ public_key = private_key.public_key()
+ builder = x509.CertificateBuilder()
+ builder = builder.subject_name(x509.Name([
+ x509.NameAttribute(NameOID.COMMON_NAME, u'txsni signing service'),
+ ]))
+ builder = builder.issuer_name(x509.Name([
+ x509.NameAttribute(NameOID.COMMON_NAME, u'txsni signing service'),
+ ]))
+ builder = builder.not_valid_before(datetime.datetime.today() - ONE_DAY)
+ builder = builder.not_valid_after(
+ datetime.datetime.today() + THIRTYISH_YEARS
+ )
+ builder = builder.serial_number(int(uuid.uuid4()))
+ builder = builder.public_key(public_key)
+
+ # Don't allow intermediates.
+ builder = builder.add_extension(
+ x509.BasicConstraints(ca=True, path_length=0), critical=True,
+ )
+
+ certificate = builder.sign(
+ private_key=private_key, algorithm=hashes.SHA256(),
+ backend=default_backend()
+ )
+
+ # Write it out.
+ with open(ROOT_KEY_PATH, 'wb') as f:
+ f.write(
+ private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption()
+ )
+ )
+
+ with open(ROOT_CERT_PATH, 'wb') as f:
+ f.write(
+ certificate.public_bytes(serialization.Encoding.PEM)
+ )
+
+ print("Built root certificate.")
+
+ return certificate, private_key
+
+
+def _build_single_leaf(hostname, certfile, ca_cert, ca_key):
+ """
+ Builds a single leaf certificate, signed by the CA's private key.
+ """
+ if os.path.isfile(certfile):
+ print("%s already exists, not regenerating" % hostname)
+ return
+
+ private_key = rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=2048,
+ backend=default_backend()
+ )
+ public_key = private_key.public_key()
+ builder = x509.CertificateBuilder()
+ builder = builder.subject_name(x509.Name([
+ x509.NameAttribute(NameOID.COMMON_NAME, hostname),
+ ]))
+ builder = builder.issuer_name(ca_cert.subject)
+ builder = builder.not_valid_before(datetime.datetime.today() - ONE_DAY)
+ builder = builder.not_valid_after(
+ datetime.datetime.today() + TENISH_YEARS
+ )
+ builder = builder.serial_number(int(uuid.uuid4()))
+ builder = builder.public_key(public_key)
+
+ builder = builder.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None), critical=True,
+ )
+ builder = builder.add_extension(
+ x509.SubjectAlternativeName([
+ x509.DNSName(hostname)
+ ]),
+ critical=True,
+ )
+
+ certificate = builder.sign(
+ private_key=ca_key, algorithm=hashes.SHA256(),
+ backend=default_backend()
+ )
+
+ # Write it out.
+ with open(certfile, 'wb') as f:
+ f.write(
+ private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption()
+ )
+ )
+ f.write(
+ certificate.public_bytes(serialization.Encoding.PEM)
+ )
+
+ print("Built certificate for %s" % hostname)
+
+
+def _build_certs():
+ """
+ Builds all certificates.
+ """
+ ca_cert, ca_key = _build_root_cert()
+
+ for hostname, certfile in _CERTS:
+ _build_single_leaf(hostname, certfile, ca_cert, ca_key)
+
+
+if __name__ == '__main__':
+ _build_certs()
diff --git a/txsni/test/test_txsni.py b/txsni/test/test_txsni.py
new file mode 100644
index 0000000..5e84369
--- /dev/null
+++ b/txsni/test/test_txsni.py
@@ -0,0 +1,257 @@
+from __future__ import absolute_import
+
+import os
+
+from txsni.snimap import SNIMap, HostDirectoryMap
+from txsni.tlsendpoint import TLSEndpoint
+
+from OpenSSL.crypto import load_certificate, FILETYPE_PEM
+
+from twisted.internet import protocol, endpoints, reactor, defer, interfaces
+from twisted.internet.ssl import (
+ CertificateOptions, optionsForClientTLS, Certificate
+)
+from twisted.python.filepath import FilePath
+from twisted.trial import unittest
+
+from zope.interface import implementer
+
+from .certs.cert_builder import (
+ ROOT_CERT_PATH, HTTP2BIN_CERT_PATH, _build_certs, CERT_DIR
+)
+
+# We need some temporary certs.
+_build_certs()
+
+with open(ROOT_CERT_PATH, 'rb') as f:
+ PEM_ROOT = Certificate.loadPEM(f.read())
+
+
+def sni_endpoint():
+ """
+ Builds a TxSNI TLSEndpoint populated with the default certificates. These
+ are built from cert_builder.py, and have the following certs in the SNI
+ map:
+
+ - DEFAULT.pem, which contains a SAN for 'localhost'.
+ - http2bin.org.pem, which contains a SAN for 'http2bin.org'
+ """
+ base_endpoint = endpoints.TCP4ServerEndpoint(
+ reactor=reactor,
+ port=0,
+ interface='127.0.0.1',
+ )
+ path = FilePath(CERT_DIR)
+ mapping = SNIMap(HostDirectoryMap(path))
+ wrapper_endpoint = TLSEndpoint(base_endpoint, mapping)
+ return wrapper_endpoint
+
+
+def handshake(client_factory, server_factory, hostname, server_endpoint):
+ """
+ Connect a basic Twisted TLS client endpoint to the provided TxSNI
+ TLSEndpoint. Returns a Deferred that fires when the connection has been
+ established with a tuple of an instance of the client protocol and the
+ listening port.
+ """
+ def connect_client(listening_port):
+ port_number = listening_port.getHost().port
+
+ client = endpoints.TCP4ClientEndpoint(
+ reactor, '127.0.0.1', port_number
+ )
+ options = optionsForClientTLS(
+ hostname=hostname, trustRoot=PEM_ROOT
+ )
+ client = endpoints.wrapClientTLS(options, client)
+ connectDeferred = client.connect(client_factory)
+
+ def aggregate(client_proto):
+ return (client_proto, listening_port)
+
+ connectDeferred.addCallback(aggregate)
+ return connectDeferred
+
+ listenDeferred = server_endpoint.listen(server_factory)
+ listenDeferred.addCallback(connect_client)
+ return listenDeferred
+
+
+class WritingProtocol(protocol.Protocol):
+ """
+ A really basic Twisted protocol that fires a Deferred when the TLS
+ handshake has been completed. It detects this using dataReceived, because
+ we can't rely on IHandshakeListener.
+ """
+ def __init__(self, handshake_deferred):
+ self.handshake_deferred = handshake_deferred
+
+ def dataReceived(self, data):
+ cert = self.transport.getPeerCertificate()
+
+ if not skipNegotiation:
+ proto = self.transport.negotiatedProtocol
+ else:
+ proto = None
+ self.transport.abortConnection()
+ self.handshake_deferred.callback((cert, proto))
+ self.handshake_deferred = None
+
+
+class WritingProtocolFactory(protocol.Factory):
+ protocol = WritingProtocol
+
+ def __init__(self, handshake_deferred):
+ self.handshake_deferred = handshake_deferred
+
+ def buildProtocol(self, addr):
+ p = self.protocol(self.handshake_deferred)
+ p.factory = self
+ return p
+
+
+class WriteBackProtocol(protocol.Protocol):
+ """
+ A really basic Twisted protocol that just writes some data to the
+ connection.
+ """
+ def connectionMade(self):
+ self.transport.write('PING')
+ self.transport.loseConnection()
+
+
+try:
+ @implementer(interfaces.IProtocolNegotiationFactory)
+ class NegotiatingFactory(protocol.Factory):
+ """
+ A Twisted Protocol Factory that implements the protocol negotiation
+ extensions
+ """
+ def acceptableProtocols(self):
+ return [b'h2', b'http/1.1']
+
+ class WritingNegotiatingFactory(WritingProtocolFactory,
+ NegotiatingFactory):
+ pass
+
+ skipNegotiation = False
+except AttributeError:
+ skipNegotiation = "IProtocolNegotiationFactory not supported"
+
+
+class TestSNIMap(unittest.TestCase):
+ """
+ Tests of the basic SNIMap logic.
+ """
+ def test_snimap_default(self):
+ """
+ SNIMap preferentially loads the DEFAULT value from the mapping if it's
+ present.
+ """
+ options = CertificateOptions()
+ mapping = {'DEFAULT': options}
+ sni_map = SNIMap(mapping)
+
+ conn = sni_map.serverConnectionForTLS(protocol.Protocol())
+ self.assertIs(conn.get_context()._obj, options.getContext())
+
+ def test_snimap_makes_its_own_defaults(self):
+ """
+ If passed a mapping without a DEFAULT key, SNIMap will make its own
+ default context.
+ """
+ options = CertificateOptions()
+ mapping = {'example.com': options}
+ sni_map = SNIMap(mapping)
+
+ conn = sni_map.serverConnectionForTLS(protocol.Protocol())
+ self.assertIsNot(conn.get_context(), options.getContext())
+ self.assertIsNotNone(conn.get_context())
+
+
+class TestCommunication(unittest.TestCase):
+ """
+ Tests that use the full Twisted logic to validate that txsni works as
+ expected.
+ """
+ def assertCertIs(self, protocol_cert, cert_path):
+ """
+ Assert that ``protocol_cert`` is the same certificate as the one at
+ ``cert_path``.
+ """
+ with open(cert_path, 'rb') as f:
+ target_cert = load_certificate(FILETYPE_PEM, f.read())
+
+ self.assertEqual(
+ protocol_cert.digest('sha256'),
+ target_cert.digest('sha256')
+ )
+
+ def test_specific_certificate(self):
+ """
+ When a hostname TxSNI does know about, in this case 'http2bin.org', is
+ provided, TxSNI returns the specific certificate.
+ """
+ handshake_deferred = defer.Deferred()
+ client_factory = WritingProtocolFactory(handshake_deferred)
+ server_factory = protocol.Factory.forProtocol(WriteBackProtocol)
+
+ endpoint = sni_endpoint()
+ d = handshake(
+ client_factory=client_factory,
+ server_factory=server_factory,
+ hostname=u'http2bin.org',
+ server_endpoint=endpoint,
+ )
+
+ def confirm_cert(args):
+ cert, proto = args
+ self.assertCertIs(cert, HTTP2BIN_CERT_PATH)
+ return d
+
+ def close(args):
+ client, port = args
+ port.stopListening()
+
+ handshake_deferred.addCallback(confirm_cert)
+ handshake_deferred.addCallback(close)
+ return handshake_deferred
+
+
+class TestNegotiationStillWorks(unittest.TestCase):
+ """
+ Tests that TxSNI doesn't break protocol negotiation.
+ """
+ if skipNegotiation:
+ skip = skipNegotiation
+
+ def test_specific_cert_still_negotiates(self):
+ """
+ When TxSNI selects a specific cert, protocol negotiation still works.
+ """
+ handshake_deferred = defer.Deferred()
+ client_factory = WritingNegotiatingFactory(handshake_deferred)
+ server_factory = NegotiatingFactory.forProtocol(
+ WriteBackProtocol
+ )
+
+ endpoint = sni_endpoint()
+ d = handshake(
+ client_factory=client_factory,
+ server_factory=server_factory,
+ hostname=u'http2bin.org',
+ server_endpoint=endpoint,
+ )
+
+ def confirm_cert(args):
+ cert, proto = args
+ self.assertEqual(proto, b'h2')
+ return d
+
+ def close(args):
+ client, port = args
+ port.stopListening()
+
+ handshake_deferred.addCallback(confirm_cert)
+ handshake_deferred.addCallback(close)
+ return handshake_deferred
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 4
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"pip install -U pip setuptools wheel",
"pip install tox"
],
"python": "2.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Automat==22.10.0
certifi==2021.5.30
cffi==1.15.1
constantly==15.1.0
cryptography==40.0.2
distlib==0.3.9
filelock==3.4.1
hyperlink==21.0.0
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
incremental==22.10.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21
pyOpenSSL==23.2.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
service-identity==21.1.0
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tox==3.28.0
Twisted==22.4.0
-e git+https://github.com/glyph/txsni.git@e8a243a05f0f6eb81e06098fb18f35c11b7cdc3e#egg=TxSNI
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
virtualenv==20.17.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
zope.interface==5.5.2
| name: txsni
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- automat==22.10.0
- cffi==1.15.1
- constantly==15.1.0
- cryptography==40.0.2
- distlib==0.3.9
- filelock==3.4.1
- hyperlink==21.0.0
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- incremental==22.10.0
- pip==21.3.1
- platformdirs==2.4.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycparser==2.21
- pyopenssl==23.2.0
- service-identity==21.1.0
- setuptools==59.6.0
- six==1.17.0
- tox==3.28.0
- twisted==22.4.0
- virtualenv==20.17.1
- zope-interface==5.5.2
prefix: /opt/conda/envs/txsni
| [
"txsni/test/test_txsni.py::TestSNIMap::test_snimap_default",
"txsni/test/test_txsni.py::TestSNIMap::test_snimap_makes_its_own_defaults"
]
| [
"txsni/test/test_txsni.py::TestCommunication::test_specific_certificate",
"txsni/test/test_txsni.py::TestNegotiationStillWorks::test_specific_cert_still_negotiates"
]
| []
| []
| MIT License | 671 | [
"setup.py",
"txsni/snimap.py",
".travis.yml",
"tox.ini"
]
| [
"setup.py",
"txsni/snimap.py",
".travis.yml",
"tox.ini"
]
|
keredson__peewee-7 | 817a27e0968d38dc974d89c19431ec88f871f3d3 | 2016-07-29 20:11:25 | 86cb70e5cf9695add1e7cde1aa60d461290d71db | diff --git a/README.rst b/README.rst
index 719c92c..e0501ae 100644
--- a/README.rst
+++ b/README.rst
@@ -399,6 +399,28 @@ This removes the need for the extra variable in the global namespace and the coo
Our patch for this has been incorporated upstream, so this is forwards-compatible too, following Peewee's next release.
+Passing an Empty List/Set/Tuple into IN Doesn't Gen Invalid SQL
+---------------------------------------------------------------
+
+If you try to do a IN operation on an empty list:
+
+.. code-block:: python
+
+ User.select().where(User.id << [])
+
+Peewee will generate the following SQL:
+
+.. code-block:: sql
+
+ SELECT "t1"."id", "t1"."username" FROM "user" AS t1 WHERE ("t1"."id" IN ())
+
+Which the database will reject as invalid, throwing an exception. We instead generate a "false" statement:
+
+.. code-block:: sql
+
+ SELECT u1.id, u1.username FROM "user" AS u1 WHERE (0 = 1)
+
+So you don't have to manually test for empty lists every time you use a SQL IN.
diff --git a/peewee.py b/peewee.py
index d609c21..de9fb2e 100644
--- a/peewee.py
+++ b/peewee.py
@@ -1598,6 +1598,8 @@ class QueryCompiler(object):
lhs, lparams = self.parse_node(node.lhs, alias_map, conv)
rhs, rparams = self.parse_node(node.rhs, alias_map, conv)
template = '%s %s %s' if node.flat else '(%s %s %s)'
+ if node.op == OP.IN and node.rhs.__class__ in (list,set,tuple) and len(node.rhs)==0:
+ return template % ('0', self.get_op(OP.EQ), '1'), lparams
sql = template % (lhs, self.get_op(node.op), rhs)
return sql, lparams + rparams
| Using << with an empty list creates an invalid SQL query
from @Prillan in https://github.com/coleifer/peewee/issues/860:
```
psycopg2.ProgrammingError: syntax error at or near ")"
```
I think either of the following solutions are acceptable:
- Throw an error in peewee indicating that the list (iterator) is empty.
- Transform column << [] into False before compiling to SQL.
Not handling this before sending it to the database causes all queries after it to fail (at least on our flask server) with error:
```
peewee.InternalError: current transaction is aborted, commands ignored until end of transaction block
```
| keredson/peewee | diff --git a/playhouse/tests/test_models.py b/playhouse/tests/test_models.py
index e23e99d..2d9a691 100644
--- a/playhouse/tests/test_models.py
+++ b/playhouse/tests/test_models.py
@@ -131,6 +131,11 @@ class TestQueryingModels(ModelTestCase):
users = User.select(fn.Count(fn.Distinct(User.username))).scalar()
self.assertEqual(users, 6)
+ def test_empty_in(self):
+ for empty_collection in [list(), set(), tuple()]:
+ users = User.select().where(User.username << empty_collection)
+ self.assertEqual(users.count(), 0)
+
def test_update(self):
User.create_users(5)
uq = User.update(username='u-edited').where(User.username << ['u1', 'u2', 'u3'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 2.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/keredson/peewee.git@817a27e0968d38dc974d89c19431ec88f871f3d3#egg=peewee
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: peewee
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/peewee
| [
"playhouse/tests/test_models.py::TestQueryingModels::test_empty_in"
]
| [
"playhouse/tests/test_models.py::TestModelAPIs::test_create_or_get",
"playhouse/tests/test_models.py::TestModelAPIs::test_first",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_cache_invalidated",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_caching",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_fk"
]
| [
"playhouse/tests/test_models.py::TestQueryingModels::test_delete",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_from",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_many",
"playhouse/tests/test_models.py::TestQueryingModels::test_insert_many_fallback",
"playhouse/tests/test_models.py::TestQueryingModels::test_limits_offsets",
"playhouse/tests/test_models.py::TestQueryingModels::test_model_iter",
"playhouse/tests/test_models.py::TestQueryingModels::test_raw",
"playhouse/tests/test_models.py::TestQueryingModels::test_raw_fn",
"playhouse/tests/test_models.py::TestQueryingModels::test_scalar",
"playhouse/tests/test_models.py::TestQueryingModels::test_select",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_all",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_get",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_subquery",
"playhouse/tests/test_models.py::TestQueryingModels::test_select_with_bind_to",
"playhouse/tests/test_models.py::TestQueryingModels::test_update",
"playhouse/tests/test_models.py::TestQueryingModels::test_update_subquery",
"playhouse/tests/test_models.py::TestInsertEmptyModel::test_insert_empty",
"playhouse/tests/test_models.py::TestInsertEmptyModel::test_no_pk",
"playhouse/tests/test_models.py::TestModelAPIs::test_callable_related_name",
"playhouse/tests/test_models.py::TestModelAPIs::test_category_select_related_alias",
"playhouse/tests/test_models.py::TestModelAPIs::test_count_transaction",
"playhouse/tests/test_models.py::TestModelAPIs::test_counting",
"playhouse/tests/test_models.py::TestModelAPIs::test_creation",
"playhouse/tests/test_models.py::TestModelAPIs::test_deleting",
"playhouse/tests/test_models.py::TestModelAPIs::test_dirty_from_query",
"playhouse/tests/test_models.py::TestModelAPIs::test_exists",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_exceptions",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_ints",
"playhouse/tests/test_models.py::TestModelAPIs::test_fk_object_id",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_exception",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_or_create",
"playhouse/tests/test_models.py::TestModelAPIs::test_get_or_create_extended",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_get_field_index",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_rel_for_model",
"playhouse/tests/test_models.py::TestModelAPIs::test_meta_remove_field",
"playhouse/tests/test_models.py::TestModelAPIs::test_modify_model_cause_it_dirty",
"playhouse/tests/test_models.py::TestModelAPIs::test_on_conflict",
"playhouse/tests/test_models.py::TestModelAPIs::test_on_conflict_many",
"playhouse/tests/test_models.py::TestModelAPIs::test_ordering",
"playhouse/tests/test_models.py::TestModelAPIs::test_peek",
"playhouse/tests/test_models.py::TestModelAPIs::test_reading",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_id",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_name",
"playhouse/tests/test_models.py::TestModelAPIs::test_related_name_collision",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_dirty_auto",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_only",
"playhouse/tests/test_models.py::TestModelAPIs::test_save_only_dirty_fields",
"playhouse/tests/test_models.py::TestModelAPIs::test_saving",
"playhouse/tests/test_models.py::TestModelAPIs::test_saving_via_create_gh111",
"playhouse/tests/test_models.py::TestModelAPIs::test_unicode",
"playhouse/tests/test_models.py::TestModelAPIs::test_unicode_issue202",
"playhouse/tests/test_models.py::TestModelAPIs::test_zero_id",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_aggregate_datetime",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_aggregate_int",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_annotate_datetime",
"playhouse/tests/test_models.py::TestAggregatesWithModels::test_annotate_int",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_from_multi_table",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_join_on_query",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect_with_column",
"playhouse/tests/test_models.py::TestMultiTableFromClause::test_subselect_with_join",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete_child_queries",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_delete_parent_sql",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_non_pk_fk",
"playhouse/tests/test_models.py::TestDeleteRecursive::test_recursive_update",
"playhouse/tests/test_models.py::TestTruncate::test_truncate",
"playhouse/tests/test_models.py::TestManyToMany::test_m2m",
"playhouse/tests/test_models.py::TestManyToMany::test_many_to_many_prefetch",
"playhouse/tests/test_models.py::TestCustomModelOptionsBase::test_custom_model_options_base",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_custom_options",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_db_table",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_option_inheritance",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_order_by_inheritance",
"playhouse/tests/test_models.py::TestModelOptionInheritance::test_table_name_function",
"playhouse/tests/test_models.py::TestModelInheritance::test_model_inheritance_attrs",
"playhouse/tests/test_models.py::TestModelInheritance::test_model_inheritance_flow",
"playhouse/tests/test_models.py::TestAliasBehavior::test_alias_with_coerce",
"playhouse/tests/test_models.py::TestModelHash::test_hash",
"playhouse/tests/test_models.py::TestDeleteNullableForeignKeys::test_delete",
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_no_empty_instances",
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_non_null_fk_unselected_fk",
"playhouse/tests/test_models.py::TestJoinNullableForeignKey::test_unselected_fk_pk"
]
| []
| MIT License | 672 | [
"README.rst",
"peewee.py"
]
| [
"README.rst",
"peewee.py"
]
|
|
ActivisionGameScience__assertpy-55 | ed43bee91eadd55f6cc9004e6f3862a97e0d2190 | 2016-07-30 23:24:36 | ed43bee91eadd55f6cc9004e6f3862a97e0d2190 | diff --git a/README.md b/README.md
index 91b1eb5..99edf06 100644
--- a/README.md
+++ b/README.md
@@ -282,7 +282,7 @@ Fluent assertions against the value of a given key can be done by prepending `ha
```py
fred = {'first_name': 'Fred', 'last_name': 'Smith', 'shoe_size': 12}
-
+
assert_that(fred).has_first_name('Fred')
assert_that(fred).has_last_name('Smith')
assert_that(fred).has_shoe_size(12)
@@ -534,7 +534,7 @@ As noted above, dynamic assertions also work on dicts:
```py
fred = {'first_name': 'Fred', 'last_name': 'Smith'}
-
+
assert_that(fred).has_first_name('Fred')
assert_that(fred).has_last_name('Smith')
```
@@ -613,24 +613,24 @@ Expected <3> to be equal to <2>, but was not.
The `described_as()` helper causes the custom message `adding stuff` to be prepended to the front of the second error.
-#### Soft Assertions
+#### Just A Warning
-There are times when you don't want to a test to fail at all, instead you only want a warning message. In this case, just replace `assert_that` with `assert_soft`.
+There are times when you only want a warning message instead of an failing test. In this case, just replace `assert_that` with `assert_warn`.
```py
-assert_soft('foo').is_length(4)
-assert_soft('foo').is_empty()
-assert_soft('foo').is_false()
-assert_soft('foo').is_digit()
-assert_soft('123').is_alpha()
-assert_soft('foo').is_upper()
-assert_soft('FOO').is_lower()
-assert_soft('foo').is_equal_to('bar')
-assert_soft('foo').is_not_equal_to('foo')
-assert_soft('foo').is_equal_to_ignoring_case('BAR')
+assert_warn('foo').is_length(4)
+assert_warn('foo').is_empty()
+assert_warn('foo').is_false()
+assert_warn('foo').is_digit()
+assert_warn('123').is_alpha()
+assert_warn('foo').is_upper()
+assert_warn('FOO').is_lower()
+assert_warn('foo').is_equal_to('bar')
+assert_warn('foo').is_not_equal_to('foo')
+assert_warn('foo').is_equal_to_ignoring_case('BAR')
```
-The above soft assertions print the following warning messages (but an `AssertionError` is never raised):
+The above assertions just print the following warning messages, and an `AssertionError` is never raised:
```
Expected <foo> to be of length <4>, but was <3>.
diff --git a/assertpy/__init__.py b/assertpy/__init__.py
index 385ab52..f673973 100644
--- a/assertpy/__init__.py
+++ b/assertpy/__init__.py
@@ -1,2 +1,2 @@
from __future__ import absolute_import
-from .assertpy import assert_that, assert_soft, contents_of, fail, __version__
+from .assertpy import assert_that, assert_warn, soft_assertions, contents_of, fail, __version__
diff --git a/assertpy/assertpy.py b/assertpy/assertpy.py
index 462eee4..a1a644d 100644
--- a/assertpy/assertpy.py
+++ b/assertpy/assertpy.py
@@ -36,6 +36,7 @@ import datetime
import numbers
import collections
import inspect
+from contextlib import contextmanager
__version__ = '0.9'
@@ -48,14 +49,43 @@ else:
xrange = xrange
unicode = unicode
+
+### soft assertions ###
+_soft_ctx = False
+_soft_err = []
+
+@contextmanager
+def soft_assertions():
+ global _soft_ctx
+ global _soft_err
+
+ _soft_ctx = True
+ _soft_err = []
+
+ yield
+
+ if _soft_err:
+ out = 'soft assertion failures:'
+ for i,msg in enumerate(_soft_err):
+ out += '\n%d. %s' % (i+1, msg)
+ raise AssertionError(out)
+
+ _soft_err = []
+ _soft_ctx = False
+
+
+### factory methods ###
def assert_that(val, description=''):
"""Factory method for the assertion builder with value to be tested and optional description."""
+ global _soft_ctx
+ if _soft_ctx:
+ return AssertionBuilder(val, description, 'soft')
return AssertionBuilder(val, description)
-def assert_soft(val, description=''):
+def assert_warn(val, description=''):
"""Factory method for the assertion builder with value to be tested, optional description, and
- just print assertion failures, don't raise exceptions."""
- return AssertionBuilder(val, description, True)
+ just warn on assertion failures instead of raisings exceptions."""
+ return AssertionBuilder(val, description, 'warn')
def contents_of(f, encoding='utf-8'):
"""Helper to read the contents of the given file or path into a string with the given encoding.
@@ -96,14 +126,15 @@ def fail(msg=''):
else:
raise AssertionError('Fail: %s!' % msg)
+
class AssertionBuilder(object):
"""Assertion builder."""
- def __init__(self, val, description, soft=False, expected=None):
+ def __init__(self, val, description='', kind=None, expected=None):
"""Construct the assertion builder."""
self.val = val
self.description = description
- self.soft = soft
+ self.kind = kind
self.expected = expected
def described_as(self, description):
@@ -833,7 +864,7 @@ class AssertionBuilder(object):
else:
raise ValueError('val does not have property or zero-arg method <%s>' % name)
extracted.append(tuple(items) if len(items) > 1 else items[0])
- return AssertionBuilder(extracted, self.description)
+ return AssertionBuilder(extracted, self.description, self.kind)
### dynamic assertions ###
def __getattr__(self, attr):
@@ -878,7 +909,7 @@ class AssertionBuilder(object):
raise TypeError('val must be function')
if not issubclass(ex, BaseException):
raise TypeError('given arg must be exception')
- return AssertionBuilder(self.val, self.description, expected=ex)
+ return AssertionBuilder(self.val, self.description, self.kind, ex)
def when_called_with(self, *some_args, **some_kwargs):
"""Asserts the val function when invoked with the given args and kwargs raises the expected exception."""
@@ -889,7 +920,7 @@ class AssertionBuilder(object):
except BaseException as e:
if issubclass(type(e), self.expected):
# chain on with exception message as val
- return AssertionBuilder(str(e), self.description)
+ return AssertionBuilder(str(e), self.description, self.kind)
else:
# got exception, but wrong type, so raise
self._err('Expected <%s> to raise <%s> when called with (%s), but raised <%s>.' % (
@@ -908,9 +939,13 @@ class AssertionBuilder(object):
def _err(self, msg):
"""Helper to raise an AssertionError, and optionally prepend custom description."""
out = '%s%s' % ('[%s] ' % self.description if len(self.description) > 0 else '', msg)
- if self.soft:
+ if self.kind == 'warn':
print(out)
return self
+ elif self.kind == 'soft':
+ global _soft_err
+ _soft_err.append(out)
+ return self
else:
raise AssertionError(out)
| correct implementation of soft assertions
Hi!
This is not a bug report, but more like a discussion kick-starter regarding soft assertions. And if we happen to agree on a different implementation, I'll be more than happy to create a PR.
What I suggest is soft assertions to be implemented as in other languages libraries. E.g [soft assertions in AssertJ](http://joel-costigliola.github.io/assertj/assertj-core-features-highlight.html#soft-assertions).
Soft assertions usually have a special value in higher levels of testing than unit, e.g. integration or system tests, when the result feedback is not as fast as with unit test.
So basically a test is more "expensive" to run in terms of resources like CPU, memory, and specially time. And we want to take the most value out of each execution.
Let's assume that I have a test that:
- Logs in
- Creates a new user in the system with default settings
- And verifies that the user has a
* default locale = X
* default timezone = Y
* default privileges = Z
* etc, etc
If any of these are missing or wrong, I want the test to fail. However, with regular assertions if the locale is missing the test will fail as expected but I won't have any information whether the system meets the other requirements.
As you know, that's when soft assertions come handy. The problem I see though, is that in your implementation, you silently pass, I mean.. you print a warning in stdout, but the test will pass. And that's a wrong approach IMO, as it requires human intervention (someone reading the screen), so those assertions won't have any effect if tests are run, for instance, in jenkins as part of CI/CD pipeline.
What I suggest is what AssertJ does. You run assertions in a group, so even if the "locale" assertion fails, you still run the "timezone" and the "privileges" assertions. After all the assertions have been executed an AssertionError is raised if at least one assertion in the group failed. The error will contain the details of all those assertions in the group that failed.
Does all this make sense to you? WDYT?
Regards! | ActivisionGameScience/assertpy | diff --git a/tests/test_readme.py b/tests/test_readme.py
index 2ad4554..2179166 100644
--- a/tests/test_readme.py
+++ b/tests/test_readme.py
@@ -29,7 +29,7 @@
import sys
import os
import datetime
-from assertpy import assert_that, assert_soft, contents_of, fail
+from assertpy import assert_that, assert_warn, contents_of, fail
class TestReadme(object):
@@ -382,16 +382,16 @@ class TestReadme(object):
assert_that(str(e)).is_equal_to('[adding stuff] Expected <3> to be equal to <2>, but was not.')
def test_soft_assertions(self):
- assert_soft('foo').is_length(4)
- assert_soft('foo').is_empty()
- assert_soft('foo').is_false()
- assert_soft('foo').is_digit()
- assert_soft('123').is_alpha()
- assert_soft('foo').is_upper()
- assert_soft('FOO').is_lower()
- assert_soft('foo').is_equal_to('bar')
- assert_soft('foo').is_not_equal_to('foo')
- assert_soft('foo').is_equal_to_ignoring_case('BAR')
+ assert_warn('foo').is_length(4)
+ assert_warn('foo').is_empty()
+ assert_warn('foo').is_false()
+ assert_warn('foo').is_digit()
+ assert_warn('123').is_alpha()
+ assert_warn('foo').is_upper()
+ assert_warn('FOO').is_lower()
+ assert_warn('foo').is_equal_to('bar')
+ assert_warn('foo').is_not_equal_to('foo')
+ assert_warn('foo').is_equal_to_ignoring_case('BAR')
def test_chaining(self):
fred = Person('Fred','Smith')
diff --git a/tests/test_soft.py b/tests/test_soft.py
index 8731c99..ca39c30 100644
--- a/tests/test_soft.py
+++ b/tests/test_soft.py
@@ -28,48 +28,37 @@
import sys
-from assertpy import assert_that, assert_soft, fail
+from assertpy import assert_that, soft_assertions, fail
-class TestSoft(object):
-
- def test_success(self):
- assert_soft('foo').is_length(3)
- assert_soft('foo').is_not_empty()
- assert_soft('foo').is_true()
- assert_soft('foo').is_alpha()
- assert_soft('123').is_digit()
- assert_soft('foo').is_lower()
- assert_soft('FOO').is_upper()
- assert_soft('foo').is_equal_to('foo')
- assert_soft('foo').is_not_equal_to('bar')
- assert_soft('foo').is_equal_to_ignoring_case('FOO')
-
- def test_failures(self):
- if sys.version_info[0] == 3:
- from io import StringIO
- else:
- from StringIO import StringIO
-
- # capture stdout
- old = sys.stdout
- sys.stdout = StringIO()
-
- assert_soft('foo').is_length(4)
- assert_soft('foo').is_empty()
- assert_soft('foo').is_false()
- assert_soft('foo').is_digit()
- assert_soft('123').is_alpha()
- assert_soft('foo').is_upper()
- assert_soft('FOO').is_lower()
- assert_soft('foo').is_equal_to('bar')
- assert_soft('foo').is_not_equal_to('foo')
- assert_soft('foo').is_equal_to_ignoring_case('BAR')
-
- # stop capturing stdout
- out = sys.stdout.getvalue()
- sys.stdout.close()
- sys.stdout = old
+def test_success():
+ with soft_assertions():
+ assert_that('foo').is_length(3)
+ assert_that('foo').is_not_empty()
+ assert_that('foo').is_true()
+ assert_that('foo').is_alpha()
+ assert_that('123').is_digit()
+ assert_that('foo').is_lower()
+ assert_that('FOO').is_upper()
+ assert_that('foo').is_equal_to('foo')
+ assert_that('foo').is_not_equal_to('bar')
+ assert_that('foo').is_equal_to_ignoring_case('FOO')
+def test_failure():
+ try:
+ with soft_assertions():
+ assert_that('foo').is_length(4)
+ assert_that('foo').is_empty()
+ assert_that('foo').is_false()
+ assert_that('foo').is_digit()
+ assert_that('123').is_alpha()
+ assert_that('foo').is_upper()
+ assert_that('FOO').is_lower()
+ assert_that('foo').is_equal_to('bar')
+ assert_that('foo').is_not_equal_to('foo')
+ assert_that('foo').is_equal_to_ignoring_case('BAR')
+ fail('should have raised error')
+ except AssertionError as e:
+ out = str(e)
assert_that(out).contains('Expected <foo> to be of length <4>, but was <3>.')
assert_that(out).contains('Expected <foo> to be empty string, but was not.')
assert_that(out).contains('Expected <False>, but was not.')
@@ -81,3 +70,41 @@ class TestSoft(object):
assert_that(out).contains('Expected <foo> to be not equal to <foo>, but was.')
assert_that(out).contains('Expected <foo> to be case-insensitive equal to <BAR>, but was not.')
+def test_failure_chain():
+ try:
+ with soft_assertions():
+ assert_that('foo').is_length(4).is_empty().is_false().is_digit().is_upper()\
+ .is_equal_to('bar').is_not_equal_to('foo').is_equal_to_ignoring_case('BAR')
+ fail('should have raised error')
+ except AssertionError as e:
+ out = str(e)
+ assert_that(out).contains('Expected <foo> to be of length <4>, but was <3>.')
+ assert_that(out).contains('Expected <foo> to be empty string, but was not.')
+ assert_that(out).contains('Expected <False>, but was not.')
+ assert_that(out).contains('Expected <foo> to contain only digits, but did not.')
+ assert_that(out).contains('Expected <foo> to contain only uppercase chars, but did not.')
+ assert_that(out).contains('Expected <foo> to be equal to <bar>, but was not.')
+ assert_that(out).contains('Expected <foo> to be not equal to <foo>, but was.')
+ assert_that(out).contains('Expected <foo> to be case-insensitive equal to <BAR>, but was not.')
+
+def test_expected_exception_success():
+ with soft_assertions():
+ assert_that(func_err).raises(RuntimeError).when_called_with('foo').is_equal_to('err')
+
+def test_expected_exception_failure():
+ try:
+ with soft_assertions():
+ assert_that(func_err).raises(RuntimeError).when_called_with('foo').is_equal_to('bar')
+ assert_that(func_ok).raises(RuntimeError).when_called_with('baz')
+ fail('should have raised error')
+ except AssertionError as e:
+ out = str(e)
+ assert_that(out).contains('Expected <err> to be equal to <bar>, but was not.')
+ assert_that(out).contains("Expected <func_ok> to raise <RuntimeError> when called with ('baz').")
+
+def func_ok(arg):
+ pass
+
+def func_err(arg):
+ raise RuntimeError('err')
+
diff --git a/tests/test_warn.py b/tests/test_warn.py
new file mode 100644
index 0000000..6de80c2
--- /dev/null
+++ b/tests/test_warn.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2015-2016, Activision Publishing, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import sys
+
+from assertpy import assert_that, assert_warn, fail
+
+class TestSoft(object):
+
+ def test_success(self):
+ assert_warn('foo').is_length(3)
+ assert_warn('foo').is_not_empty()
+ assert_warn('foo').is_true()
+ assert_warn('foo').is_alpha()
+ assert_warn('123').is_digit()
+ assert_warn('foo').is_lower()
+ assert_warn('FOO').is_upper()
+ assert_warn('foo').is_equal_to('foo')
+ assert_warn('foo').is_not_equal_to('bar')
+ assert_warn('foo').is_equal_to_ignoring_case('FOO')
+
+ def test_failures(self):
+ if sys.version_info[0] == 3:
+ from io import StringIO
+ else:
+ from StringIO import StringIO
+
+ # capture stdout
+ old = sys.stdout
+ sys.stdout = StringIO()
+
+ assert_warn('foo').is_length(4)
+ assert_warn('foo').is_empty()
+ assert_warn('foo').is_false()
+ assert_warn('foo').is_digit()
+ assert_warn('123').is_alpha()
+ assert_warn('foo').is_upper()
+ assert_warn('FOO').is_lower()
+ assert_warn('foo').is_equal_to('bar')
+ assert_warn('foo').is_not_equal_to('foo')
+ assert_warn('foo').is_equal_to_ignoring_case('BAR')
+
+ # stop capturing stdout
+ out = sys.stdout.getvalue()
+ sys.stdout.close()
+ sys.stdout = old
+
+ assert_that(out).contains('Expected <foo> to be of length <4>, but was <3>.')
+ assert_that(out).contains('Expected <foo> to be empty string, but was not.')
+ assert_that(out).contains('Expected <False>, but was not.')
+ assert_that(out).contains('Expected <foo> to contain only digits, but did not.')
+ assert_that(out).contains('Expected <123> to contain only alphabetic chars, but did not.')
+ assert_that(out).contains('Expected <foo> to contain only uppercase chars, but did not.')
+ assert_that(out).contains('Expected <FOO> to contain only lowercase chars, but did not.')
+ assert_that(out).contains('Expected <foo> to be equal to <bar>, but was not.')
+ assert_that(out).contains('Expected <foo> to be not equal to <foo>, but was.')
+ assert_that(out).contains('Expected <foo> to be case-insensitive equal to <BAR>, but was not.')
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 3
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ActivisionGameScience/assertpy.git@ed43bee91eadd55f6cc9004e6f3862a97e0d2190#egg=assertpy
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: assertpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- pytest-cov==4.0.0
- tomli==1.2.3
prefix: /opt/conda/envs/assertpy
| [
"tests/test_readme.py::TestReadme::test_something",
"tests/test_readme.py::TestReadme::test_strings",
"tests/test_readme.py::TestReadme::test_ints",
"tests/test_readme.py::TestReadme::test_floats",
"tests/test_readme.py::TestReadme::test_lists",
"tests/test_readme.py::TestReadme::test_tuples",
"tests/test_readme.py::TestReadme::test_dicts",
"tests/test_readme.py::TestReadme::test_sets",
"tests/test_readme.py::TestReadme::test_booleans",
"tests/test_readme.py::TestReadme::test_dates",
"tests/test_readme.py::TestReadme::test_files",
"tests/test_readme.py::TestReadme::test_objects",
"tests/test_readme.py::TestReadme::test_dyn",
"tests/test_readme.py::TestReadme::test_expected_exceptions",
"tests/test_readme.py::TestReadme::test_custom_error_message",
"tests/test_readme.py::TestReadme::test_soft_assertions",
"tests/test_readme.py::TestReadme::test_chaining",
"tests/test_soft.py::test_success",
"tests/test_soft.py::test_failure",
"tests/test_soft.py::test_failure_chain",
"tests/test_soft.py::test_expected_exception_success",
"tests/test_soft.py::test_expected_exception_failure",
"tests/test_warn.py::TestSoft::test_success",
"tests/test_warn.py::TestSoft::test_failures"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 673 | [
"assertpy/__init__.py",
"assertpy/assertpy.py",
"README.md"
]
| [
"assertpy/__init__.py",
"assertpy/assertpy.py",
"README.md"
]
|
|
setokinto__slack-shogi-58 | ba7768484d154d0ac8f2d3852748462d10a78a83 | 2016-07-31 12:41:12 | f5175be50a09220713ceb5547cd04f80f43f84fb | diff --git a/app/slack_utils/user.py b/app/slack_utils/user.py
index fe8e3c6..176e788 100644
--- a/app/slack_utils/user.py
+++ b/app/slack_utils/user.py
@@ -22,8 +22,10 @@ class User: # TODO: rename this class
return user["name"]
def user_in_channel(self, user_id, channel_id):
- users = self._slacker.channels.info(channel_id).body[
- "channel"]["members"]
+ if channel_id[0] == "G":
+ users = self._slacker.groups.info(channel_id).body["group"]["members"]
+ else:
+ users = self._slacker.channels.info(channel_id).body["channel"]["members"]
for user in users:
if user == user_id:
return True
| privateチャンネル対応
動かない気がしてる | setokinto/slack-shogi | diff --git a/test/slack_utils/user_test.py b/test/slack_utils/user_test.py
index 30500a5..39eee15 100644
--- a/test/slack_utils/user_test.py
+++ b/test/slack_utils/user_test.py
@@ -10,6 +10,10 @@ class MockedSlacker:
def users(self):
return MockedUser()
+ @property
+ def groups(self):
+ return MockedGroup()
+
@property
def channels(self):
return MockedChannel()
@@ -71,10 +75,42 @@ class MockedUser:
}
)
+class MockedGroup:
+
+ def info(self, group_id):
+ if not group_id[0] == "G":
+ raise Exception()
+ return MockedBody(
+ {
+ "ok": True,
+ "group": {
+ "id": "G023BECGF",
+ "name": "fun",
+ "created": 1360782804,
+ "creator": "U024BE7LH",
+ "is_archived": False,
+ "is_general": False,
+ "is_member": True,
+ "is_starred": True,
+ "members": [
+ "U023BECGA",
+ ],
+ "topic": {},
+ "purpose": {},
+ "last_read": "1401383885.000061",
+ "latest": {},
+ "unread_count": 0,
+ "unread_count_display": 0
+ }
+ }
+ )
class MockedChannel:
def info(self, channel_id):
+ if not channel_id[0] == "C":
+ raise Exception()
+
return MockedBody(
{
"ok": True,
@@ -148,3 +184,12 @@ class UserTest(unittest.TestCase):
def test_user_in_channel_return_False_when_user_not_exists(self):
notexists = self.user.user_in_channel("UNONONONONO", "C023BECGA")
self.assertFalse(notexists)
+
+ def test_user_in_privatechannel_return_True_when_user_exists(self):
+ exists = self.user.user_in_channel("U023BECGA", "G023BECGF")
+ self.assertTrue(exists)
+
+ def test_user_in_private_channel_return_False_when_user_not_exists(self):
+ notexists = self.user.user_in_channel("UNONONONONO", "G023BECGF")
+ self.assertFalse(notexists)
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
six==1.17.0
-e git+https://github.com/setokinto/slack-shogi.git@ba7768484d154d0ac8f2d3852748462d10a78a83#egg=Slack_Shogi
slackbot==1.0.5
slacker==0.14.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
websocket-client==1.6.0
| name: slack-shogi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- idna==3.10
- requests==2.32.3
- six==1.17.0
- slackbot==1.0.5
- slacker==0.14.0
- urllib3==2.3.0
- websocket-client==1.6.0
prefix: /opt/conda/envs/slack-shogi
| [
"test/slack_utils/user_test.py::UserTest::test_user_in_private_channel_return_False_when_user_not_exists",
"test/slack_utils/user_test.py::UserTest::test_user_in_privatechannel_return_True_when_user_exists"
]
| []
| [
"test/slack_utils/user_test.py::UserTest::test_find_userid_from_username",
"test/slack_utils/user_test.py::UserTest::test_find_userid_with_atmark_prefix",
"test/slack_utils/user_test.py::UserTest::test_find_username_from_userid",
"test/slack_utils/user_test.py::UserTest::test_return_None_non_exists_user_name",
"test/slack_utils/user_test.py::UserTest::test_user_in_channel_return_False_when_user_not_exists",
"test/slack_utils/user_test.py::UserTest::test_user_in_channel_return_True_when_user_exists"
]
| []
| MIT License | 674 | [
"app/slack_utils/user.py"
]
| [
"app/slack_utils/user.py"
]
|
|
wireservice__csvkit-645 | 70d641c60202c8c8d596d1bf90fb03b10a1a4614 | 2016-08-01 18:29:57 | 311f07ba5eada3ab54fb60d46fad9a77e094ca30 | diff --git a/CHANGELOG b/CHANGELOG
index 0929e08..b5b78f9 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -44,6 +44,7 @@ Fixes:
* csvgrep can match multiline values.
* csvgrep correctly operates on ragged rows.
* csvsql correctly escapes `%` characters in SQL queries.
+* csvsql adds standard input only if explicitly requested.
* csvstack supports stacking a single file.
* csvstat always reports frequencies.
* FilteringCSVReader's any_match argument works correctly.
diff --git a/csvkit/utilities/csvsql.py b/csvkit/utilities/csvsql.py
index 98c5e84..1da023e 100644
--- a/csvkit/utilities/csvsql.py
+++ b/csvkit/utilities/csvsql.py
@@ -11,7 +11,7 @@ from csvkit.cli import CSVKitUtility
class CSVSQL(CSVKitUtility):
- description = 'Generate SQL statements for one or more CSV files, create execute those statements directly on a database, and execute one or more SQL queries.'
+ description = 'Generate SQL statements for one or more CSV files, or execute those statements directly on a database, and execute one or more SQL queries.'
override_flags = ['l', 'f']
def add_arguments(self):
@@ -56,14 +56,6 @@ class CSVSQL(CSVKitUtility):
else:
table_names = []
- # If one or more filenames are specified, we need to add stdin ourselves (if available)
- if sys.stdin not in self.input_files:
- try:
- if not sys.stdin.isatty():
- self.input_files.insert(0, sys.stdin)
- except:
- pass
-
# Create an SQLite database in memory if no connection string is specified
if query and not connection_string:
connection_string = "sqlite:///:memory:"
diff --git a/docs/scripts/csvsql.rst b/docs/scripts/csvsql.rst
index 3a9b4b0..f2dd003 100644
--- a/docs/scripts/csvsql.rst
+++ b/docs/scripts/csvsql.rst
@@ -16,7 +16,7 @@ Generate SQL statements for a CSV file or execute those statements directly on a
[--blanks] [--no-inference] [--db-schema DB_SCHEMA]
[FILE [FILE ...]]
- Generate SQL statements for one or more CSV files, create execute those
+ Generate SQL statements for one or more CSV files, or execute those
statements directly on a database, and execute one or more SQL queries.
positional arguments:
| csvsql without tty always tries to read stdin
The following snip works from a terminal but fails in a non-interactive session (we hit it in Jenkins, but I'd guess it also fails in cron)
```
csvsql --table foo --query "select * from foo" foo.csv
```
You get a `StopIteration` exception because csvsql is trying to read from stdin, which has nothing coming (this line: https://github.com/wireservice/csvkit/blob/205175fb70745b80db19acd4c314ad6c774b7fc0/csvkit/utilities/csvsql.py#L57). There's a previous discussion of the issue at https://github.com/wireservice/csvkit/issues/342 and https://github.com/wireservice/csvkit/issues/627, but the linked commit doesn't solve the issue.
We're working around it by always sending something in to stdin when running from a job.
I think csvsql should require naming stdin with a "-" when you want to read from both files named as arguments and stdin. This is how `cat` works:
```
echo "foo" | cat /tmp/file.csv # just prints file.csv
echo "foo" | cat - /tmp/file.csv # prints foo, then file.csv
echo "foo" | cat /tmp/file.csv - # prints file.csv, then foo
```
| wireservice/csvkit | diff --git a/tests/test_utilities/test_csvsql.py b/tests/test_utilities/test_csvsql.py
index be7d54f..2135fad 100644
--- a/tests/test_utilities/test_csvsql.py
+++ b/tests/test_utilities/test_csvsql.py
@@ -67,7 +67,7 @@ class TestCSVSQL(CSVKitTestCase, EmptyFileTests):
input_file = six.StringIO("a,b,c\n1,2,3\n")
with stdin_as_string(input_file):
- sql = self.get_output(['examples/dummy.csv'])
+ sql = self.get_output(['-', 'examples/dummy.csv'])
self.assertTrue('CREATE TABLE stdin' in sql)
self.assertTrue('CREATE TABLE dummy' in sql)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 3
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements-py3.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | agate==1.13.0
agate-dbf==0.2.3
agate-excel==0.4.1
alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
-e git+https://github.com/wireservice/csvkit.git@70d641c60202c8c8d596d1bf90fb03b10a1a4614#egg=csvkit
dbfread==2.0.7
distlib==0.3.9
docutils==0.18.1
et-xmlfile==1.1.0
filelock==3.4.1
greenlet==2.0.2
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
isodate==0.6.1
Jinja2==3.0.3
leather==0.4.0
MarkupSafe==2.0.1
nose==1.3.7
olefile==0.47
openpyxl==3.1.3
packaging==21.3
parsedatetime==2.6
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.2
python-slugify==6.1.2
pytimeparse==1.1.8
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
text-unidecode==1.3
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
xlrd==2.0.1
zipp==3.6.0
| name: csvkit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- agate==1.13.0
- agate-dbf==0.2.3
- agate-excel==0.4.1
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- dbfread==2.0.7
- distlib==0.3.9
- docutils==0.18.1
- et-xmlfile==1.1.0
- filelock==3.4.1
- greenlet==2.0.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- isodate==0.6.1
- jinja2==3.0.3
- leather==0.4.0
- markupsafe==2.0.1
- nose==1.3.7
- olefile==0.47
- openpyxl==3.1.3
- packaging==21.3
- parsedatetime==2.6
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.2
- python-slugify==6.1.2
- pytimeparse==1.1.8
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- text-unidecode==1.3
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- xlrd==2.0.1
- zipp==3.6.0
prefix: /opt/conda/envs/csvkit
| [
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_create_table",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_launch_new_instance",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_no_header_row",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_no_inference"
]
| [
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_query"
]
| [
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_empty",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_empty_with_query",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_stdin",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_stdin_and_filename"
]
| []
| MIT License | 675 | [
"docs/scripts/csvsql.rst",
"csvkit/utilities/csvsql.py",
"CHANGELOG"
]
| [
"docs/scripts/csvsql.rst",
"csvkit/utilities/csvsql.py",
"CHANGELOG"
]
|
|
conjure-up__conjure-up-314 | 0237d91780f13541affc3de3771ac2be54bcb4b6 | 2016-08-02 17:03:17 | 2c8b2b9a848e19ffc77cade081472db13b94004b | diff --git a/conjureup/controllers/clouds/gui.py b/conjureup/controllers/clouds/gui.py
index bef8da0..37898cb 100644
--- a/conjureup/controllers/clouds/gui.py
+++ b/conjureup/controllers/clouds/gui.py
@@ -5,7 +5,8 @@ from conjureup import controllers
from conjureup.app_config import app
from conjureup import juju
import petname
-from . import common
+from conjureup.controllers.clouds.common import (get_controller_in_cloud,
+ list_clouds)
class CloudsController:
@@ -28,7 +29,7 @@ class CloudsController:
cloud: Cloud to create the controller/model on.
"""
utils.pollinate(app.session_id, 'CS')
- existing_controller = common.get_controller_in_cloud(cloud)
+ existing_controller = get_controller_in_cloud(cloud)
if existing_controller is None:
return controllers.use('newcloud').render(cloud)
@@ -46,7 +47,7 @@ class CloudsController:
return controllers.use('variants').render()
def render(self):
- clouds = common.list_clouds()
+ clouds = list_clouds()
excerpt = app.config.get(
'description',
"Please select from a list of available clouds")
diff --git a/conjureup/controllers/clouds/tui.py b/conjureup/controllers/clouds/tui.py
index d7d7e87..cc8a7b1 100644
--- a/conjureup/controllers/clouds/tui.py
+++ b/conjureup/controllers/clouds/tui.py
@@ -2,7 +2,8 @@ from conjureup import controllers
from conjureup import juju
from conjureup import utils
from conjureup.app_config import app
-from . import common
+from conjureup.controllers.clouds.common import get_controller_in_cloud
+
import petname
import sys
import os
@@ -17,7 +18,7 @@ class CloudsController:
"/usr/share/conjure-up/run-lxd-config",
back)
- existing_controller = common.get_controller_in_cloud(app.argv.cloud)
+ existing_controller = get_controller_in_cloud(app.argv.cloud)
if existing_controller is None:
return controllers.use('newcloud').render(app.argv.cloud)
| test failure if no juju found on system
travis-ci caught this when no juju is found:
```
======================================================================
ERROR: call finish
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/travis/build/conjure-up/conjure-up/test/test_controllers_clouds_gui.py", line 94, in test_finish
self.controller.finish(self.cloudname)
File "/home/travis/build/conjure-up/conjure-up/conjureup/controllers/clouds/gui.py", line 31, in finish
existing_controller = common.get_controller_in_cloud(cloud)
File "/home/travis/build/conjure-up/conjure-up/conjureup/controllers/clouds/common.py", line 63, in get_controller_in_cloud
controllers = juju.get_controllers()['controllers'].items()
File "/home/travis/build/conjure-up/conjure-up/conjureup/juju.py", line 515, in get_controllers
"Unable to list controllers: {}".format(sh.stderr.decode('utf8')))
LookupError: Unable to list controllers: /bin/sh: 1: juju: not found
======================================================================
ERROR: call finish
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/travis/build/conjure-up/conjure-up/test/test_controllers_clouds_tui.py", line 97, in test_finish
self.controller.finish()
File "/home/travis/build/conjure-up/conjure-up/conjureup/controllers/clouds/tui.py", line 20, in finish
existing_controller = common.get_controller_in_cloud(app.argv.cloud)
File "/home/travis/build/conjure-up/conjure-up/conjureup/controllers/clouds/common.py", line 63, in get_controller_in_cloud
controllers = juju.get_controllers()['controllers'].items()
File "/home/travis/build/conjure-up/conjure-up/conjureup/juju.py", line 515, in get_controllers
"Unable to list controllers: {}".format(sh.stderr.decode('utf8')))
LookupError: Unable to list controllers: /bin/sh: 1: juju: not found
``` | conjure-up/conjure-up | diff --git a/test/test_controllers_clouds_gui.py b/test/test_controllers_clouds_gui.py
index 3202389..2736bc7 100644
--- a/test/test_controllers_clouds_gui.py
+++ b/test/test_controllers_clouds_gui.py
@@ -19,8 +19,8 @@
import unittest
-# from unittest.mock import ANY, call, MagicMock, patch, sentinel
-from unittest.mock import patch, MagicMock
+
+from unittest.mock import call, patch, MagicMock
from conjureup.controllers.clouds.gui import CloudsController
@@ -44,17 +44,17 @@ class CloudsGUIRenderTestCase(unittest.TestCase):
'conjureup.controllers.clouds.gui.app')
mock_app = self.app_patcher.start()
mock_app.ui = MagicMock(name="app.ui")
- self.common_patcher = patch(
- 'conjureup.controllers.clouds.gui.common')
- self.mock_common = self.common_patcher.start()
- self.mock_common.list_clouds.return_value = ['test1', 'test2']
+ self.list_clouds_patcher = patch(
+ 'conjureup.controllers.clouds.gui.list_clouds')
+ self.mock_list_clouds = self.list_clouds_patcher.start()
+ self.mock_list_clouds.return_value = ['test1', 'test2']
def tearDown(self):
self.utils_patcher.stop()
self.finish_patcher.stop()
self.view_patcher.stop()
self.app_patcher.stop()
- self.common_patcher.start()
+ self.list_clouds_patcher.start()
def test_render(self):
"call render"
@@ -82,13 +82,32 @@ class CloudsGUIFinishTestCase(unittest.TestCase):
self.mock_app.ui = MagicMock(name="app.ui")
self.cloudname = 'testcloudname'
+ self.juju_patcher = patch(
+ 'conjureup.controllers.clouds.gui.juju')
+ self.mock_juju = self.juju_patcher.start()
+
+ self.gcc_patcher = patch(
+ 'conjureup.controllers.clouds.gui.get_controller_in_cloud')
+ self.mock_gcc = self.gcc_patcher.start()
def tearDown(self):
self.controllers_patcher.stop()
self.utils_patcher.stop()
self.render_patcher.stop()
self.app_patcher.stop()
-
- def test_finish(self):
- "call finish"
- self.controller.finish(self.cloudname)
+ self.juju_patcher.stop()
+ self.gcc_patcher.stop()
+
+ def test_finish_w_controller(self):
+ "clouds.finish with an existing controller"
+ self.mock_gcc.return_value = 'testcontroller'
+ self.controller.finish('testcloud')
+ self.mock_juju.assert_has_calls([
+ call.switch_controller('testcontroller')])
+
+ def test_finish_no_controller(self):
+ "clouds.finish without existing controller"
+ self.mock_gcc.return_value = None
+ self.controller.finish('testcloud')
+ self.mock_controllers.use.assert_has_calls([
+ call('newcloud'), call().render('testcloud')])
diff --git a/test/test_controllers_clouds_tui.py b/test/test_controllers_clouds_tui.py
index 222b244..44a4c20 100644
--- a/test/test_controllers_clouds_tui.py
+++ b/test/test_controllers_clouds_tui.py
@@ -19,8 +19,8 @@
import unittest
-# from unittest.mock import ANY, call, MagicMock, patch, sentinel
-from unittest.mock import patch, MagicMock
+
+from unittest.mock import call, patch, MagicMock
from conjureup.controllers.clouds.tui import CloudsController
@@ -85,13 +85,33 @@ class CloudsTUIFinishTestCase(unittest.TestCase):
'conjureup.controllers.clouds.tui.app')
self.mock_app = self.app_patcher.start()
self.mock_app.ui = MagicMock(name="app.ui")
+ self.juju_patcher = patch(
+ 'conjureup.controllers.clouds.tui.juju')
+ self.mock_juju = self.juju_patcher.start()
+
+ self.gcc_patcher = patch(
+ 'conjureup.controllers.clouds.tui.get_controller_in_cloud')
+ self.mock_gcc = self.gcc_patcher.start()
def tearDown(self):
self.controllers_patcher.stop()
self.utils_patcher.stop()
self.render_patcher.stop()
self.app_patcher.stop()
+ self.juju_patcher.stop()
+ self.gcc_patcher.stop()
+
+ def test_finish_w_controller(self):
+ "clouds.finish with an existing controller"
+ self.mock_gcc.return_value = 'testcontroller'
+ self.controller.finish()
+ self.mock_juju.assert_has_calls([
+ call.switch_controller('testcontroller')])
- def test_finish(self):
- "call finish"
+ def test_finish_no_controller(self):
+ "clouds.finish without existing controller"
+ self.mock_gcc.return_value = None
+ self.mock_app.argv.cloud = 'testcloud'
self.controller.finish()
+ self.mock_controllers.use.assert_has_calls([
+ call('newcloud'), call().render('testcloud')])
diff --git a/test/test_controllers_deploy_tui.py b/test/test_controllers_deploy_tui.py
index 7606d20..6c8bc6d 100644
--- a/test/test_controllers_deploy_tui.py
+++ b/test/test_controllers_deploy_tui.py
@@ -92,12 +92,22 @@ class DeployTUIFinishTestCase(unittest.TestCase):
'conjureup.controllers.deploy.tui.app')
self.mock_app = self.app_patcher.start()
self.mock_app.ui = MagicMock(name="app.ui")
+ self.juju_patcher = patch(
+ 'conjureup.controllers.deploy.tui.juju')
+ self.mock_juju = self.juju_patcher.start()
+ self.mock_juju.JUJU_ASYNC_QUEUE = sentinel.JUJU_ASYNC_QUEUE
+
+ self.concurrent_patcher = patch(
+ 'conjureup.controllers.deploy.tui.concurrent')
+ self.mock_concurrent = self.concurrent_patcher.start()
def tearDown(self):
self.controllers_patcher.stop()
self.utils_patcher.stop()
self.render_patcher.stop()
self.app_patcher.stop()
+ self.juju_patcher.stop()
+ self.concurrent_patcher.stop()
def test_finish(self):
"call finish"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bson==0.5.10
certifi==2021.5.30
charset-normalizer==2.0.12
configobj==5.0.8
-e git+https://github.com/conjure-up/conjure-up.git@0237d91780f13541affc3de3771ac2be54bcb4b6#egg=conjure_up
distlib==0.3.9
filelock==3.4.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
petname==2.6
platformdirs==2.4.0
pluggy==1.0.0
progressbar2==3.55.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
python-utils==3.5.2
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
urwid==2.1.2
virtualenv==20.17.1
ws4py==0.3.4
zipp==3.6.0
| name: conjure-up
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bson==0.5.10
- charset-normalizer==2.0.12
- configobj==5.0.8
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- petname==2.6
- platformdirs==2.4.0
- pluggy==1.0.0
- progressbar2==3.55.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- python-utils==3.5.2
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- urwid==2.1.2
- virtualenv==20.17.1
- ws4py==0.3.4
- zipp==3.6.0
prefix: /opt/conda/envs/conjure-up
| [
"test/test_controllers_clouds_gui.py::CloudsGUIRenderTestCase::test_render",
"test/test_controllers_clouds_gui.py::CloudsGUIFinishTestCase::test_finish_no_controller",
"test/test_controllers_clouds_gui.py::CloudsGUIFinishTestCase::test_finish_w_controller",
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_no_controller",
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_w_controller"
]
| []
| [
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render",
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render_unknown",
"test/test_controllers_deploy_tui.py::DeployTUIRenderTestCase::test_render",
"test/test_controllers_deploy_tui.py::DeployTUIFinishTestCase::test_finish"
]
| []
| MIT License | 676 | [
"conjureup/controllers/clouds/tui.py",
"conjureup/controllers/clouds/gui.py"
]
| [
"conjureup/controllers/clouds/tui.py",
"conjureup/controllers/clouds/gui.py"
]
|
|
docker__docker-py-1150 | 650cc70e934044fcb5dfd27fd27777f91c337b6c | 2016-08-03 00:27:54 | a44d65be370c28abd666a299456b83659dd1a1df | diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py
index 7c41a5f8..46c28a80 100644
--- a/docker/utils/decorators.py
+++ b/docker/utils/decorators.py
@@ -40,7 +40,7 @@ def minimum_version(version):
def update_headers(f):
def inner(self, *args, **kwargs):
if 'HttpHeaders' in self._auth_configs:
- if 'headers' not in kwargs:
+ if not kwargs.get('headers'):
kwargs['headers'] = self._auth_configs['HttpHeaders']
else:
kwargs['headers'].update(self._auth_configs['HttpHeaders'])
| Client.build crashes when trying to pull a new image if HttpHeaders are set in config file
```python
import docker
c = docker.Client()
c.build('https://github.com/docker/compose.git')
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-d78c607c9627> in <module>()
----> 1 c.build('https://github.com/docker/compose.git')
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/api/build.pyc in build(self, path, tag, quiet, fileobj, nocache, rm, stream, timeout, custom_context, encoding, pull, forcerm, dockerfile, container_limits, decode, buildargs, gzip)
102 headers=headers,
103 stream=stream,
--> 104 timeout=timeout,
105 )
106
/home/joffrey/.envs/pydocker/local/lib/python2.7/site-packages/docker/utils/decorators.pyc in inner(self, *args, **kwargs)
44 kwargs['headers'] = self._auth_configs['HttpHeaders']
45 else:
---> 46 kwargs['headers'].update(self._auth_configs['HttpHeaders'])
47 return f(self, *args, **kwargs)
48 return inner
AttributeError: 'NoneType' object has no attribute 'update'
``` | docker/docker-py | diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 0f7a58c9..47ced433 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -20,9 +20,11 @@ from docker.utils import (
create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file,
exclude_paths, convert_volume_binds, decode_json_header, tar,
split_command, create_ipam_config, create_ipam_pool, parse_devices,
+ update_headers,
)
-from docker.utils.utils import create_endpoint_config
+
from docker.utils.ports import build_port_bindings, split_port
+from docker.utils.utils import create_endpoint_config
from .. import base
from ..helpers import make_tree
@@ -34,6 +36,37 @@ TEST_CERT_DIR = os.path.join(
)
+class DecoratorsTest(base.BaseTestCase):
+ def test_update_headers(self):
+ sample_headers = {
+ 'X-Docker-Locale': 'en-US',
+ }
+
+ def f(self, headers=None):
+ return headers
+
+ client = Client()
+ client._auth_configs = {}
+
+ g = update_headers(f)
+ assert g(client, headers=None) is None
+ assert g(client, headers={}) == {}
+ assert g(client, headers={'Content-type': 'application/json'}) == {
+ 'Content-type': 'application/json',
+ }
+
+ client._auth_configs = {
+ 'HttpHeaders': sample_headers
+ }
+
+ assert g(client, headers=None) == sample_headers
+ assert g(client, headers={}) == sample_headers
+ assert g(client, headers={'Content-type': 'application/json'}) == {
+ 'Content-type': 'application/json',
+ 'X-Docker-Locale': 'en-US',
+ }
+
+
class HostConfigTest(base.BaseTestCase):
def test_create_host_config_no_options(self):
config = create_host_config(version='1.19')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@650cc70e934044fcb5dfd27fd27777f91c337b6c#egg=docker_py
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::DecoratorsTest::test_update_headers"
]
| []
| [
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 677 | [
"docker/utils/decorators.py"
]
| [
"docker/utils/decorators.py"
]
|
|
zopefoundation__BTrees-43 | be6e6ea7fed82058dc80874eea4813d73df06f3c | 2016-08-03 13:24:53 | 1fb38674c67b084b4c0453a3d5cefb79eca967af | diff --git a/BTrees/BTreeTemplate.c b/BTrees/BTreeTemplate.c
index 42d2880..ce77853 100644
--- a/BTrees/BTreeTemplate.c
+++ b/BTrees/BTreeTemplate.c
@@ -219,6 +219,8 @@ BTree_check(BTree *self)
return result;
}
+#define _BGET_REPLACE_TYPE_ERROR 1
+#define _BGET_ALLOW_TYPE_ERROR 0
/*
** _BTree_get
**
@@ -229,6 +231,14 @@ BTree_check(BTree *self)
** keyarg the key to search for, as a Python object
** has_key true/false; when false, try to return the associated
** value; when true, return a boolean
+** replace_type_err true/false: When true, ignore the TypeError from
+** a key conversion issue, instead
+** transforming it into a KeyError set. If
+** you are just reading/searching, set to
+** true. If you will be adding/updating,
+** however, set to false. Or use
+** _BGET_REPLACE_TYPE_ERROR
+** and _BGET_ALLOW_TYPE_ERROR, respectively.
** Return
** When has_key false:
** If key exists, its associated value.
@@ -239,14 +249,22 @@ BTree_check(BTree *self)
** If key doesn't exist, 0.
*/
static PyObject *
-_BTree_get(BTree *self, PyObject *keyarg, int has_key)
+_BTree_get(BTree *self, PyObject *keyarg, int has_key, int replace_type_err)
{
KEY_TYPE key;
PyObject *result = NULL; /* guilty until proved innocent */
int copied = 1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
- UNLESS (copied) return NULL;
+ UNLESS (copied)
+ {
+ if (replace_type_err && PyErr_ExceptionMatches(PyExc_TypeError))
+ {
+ PyErr_Clear();
+ PyErr_SetObject(PyExc_KeyError, keyarg);
+ }
+ return NULL;
+ }
PER_USE_OR_RETURN(self, NULL);
if (self->len == 0)
@@ -289,7 +307,7 @@ Done:
static PyObject *
BTree_get(BTree *self, PyObject *key)
{
- return _BTree_get(self, key, 0);
+ return _BTree_get(self, key, 0, _BGET_REPLACE_TYPE_ERROR);
}
/* Create a new bucket for the BTree or TreeSet using the class attribute
@@ -1940,7 +1958,7 @@ BTree_getm(BTree *self, PyObject *args)
UNLESS (PyArg_ParseTuple(args, "O|O", &key, &d))
return NULL;
- if ((r=_BTree_get(self, key, 0)))
+ if ((r=_BTree_get(self, key, 0, _BGET_REPLACE_TYPE_ERROR)))
return r;
UNLESS (PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
@@ -1952,7 +1970,7 @@ BTree_getm(BTree *self, PyObject *args)
static PyObject *
BTree_has_key(BTree *self, PyObject *key)
{
- return _BTree_get(self, key, 1);
+ return _BTree_get(self, key, 1, _BGET_REPLACE_TYPE_ERROR);
}
static PyObject *
@@ -1965,7 +1983,7 @@ BTree_setdefault(BTree *self, PyObject *args)
if (! PyArg_UnpackTuple(args, "setdefault", 2, 2, &key, &failobj))
return NULL;
- value = _BTree_get(self, key, 0);
+ value = _BTree_get(self, key, 0, _BGET_ALLOW_TYPE_ERROR);
if (value != NULL)
return value;
@@ -1998,7 +2016,7 @@ BTree_pop(BTree *self, PyObject *args)
if (! PyArg_UnpackTuple(args, "pop", 1, 2, &key, &failobj))
return NULL;
- value = _BTree_get(self, key, 0);
+ value = _BTree_get(self, key, 0, _BGET_ALLOW_TYPE_ERROR);
if (value != NULL)
{
/* Delete key and associated value. */
@@ -2043,7 +2061,7 @@ BTree_pop(BTree *self, PyObject *args)
static int
BTree_contains(BTree *self, PyObject *key)
{
- PyObject *asobj = _BTree_get(self, key, 1);
+ PyObject *asobj = _BTree_get(self, key, 1, _BGET_REPLACE_TYPE_ERROR);
int result = -1;
if (asobj != NULL)
@@ -2051,6 +2069,11 @@ BTree_contains(BTree *self, PyObject *key)
result = INT_AS_LONG(asobj) ? 1 : 0;
Py_DECREF(asobj);
}
+ else if (PyErr_ExceptionMatches(PyExc_KeyError))
+ {
+ PyErr_Clear();
+ result = 0;
+ }
return result;
}
diff --git a/BTrees/_base.py b/BTrees/_base.py
index 07498a3..3158d91 100644
--- a/BTrees/_base.py
+++ b/BTrees/_base.py
@@ -269,7 +269,7 @@ def _no_default_comparison(key):
lt = None # pragma: no cover PyPy3
if (lt is None and
getattr(key, '__cmp__', None) is None):
- raise TypeError("Can't use default __cmp__")
+ raise TypeError("Object has default comparison")
class Bucket(_BucketBase):
@@ -863,7 +863,12 @@ class _Tree(_Base):
return child._findbucket(key)
def __contains__(self, key):
- return key in (self._findbucket(self._to_key(key)) or ())
+ try:
+ tree_key = self._to_key(key)
+ except TypeError:
+ # Can't convert the key, so can't possibly be in the tree
+ return False
+ return key in (self._findbucket(tree_key) or ())
def has_key(self, key):
index = self._search(key)
diff --git a/CHANGES.rst b/CHANGES.rst
index ac79910..834fbfb 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,7 +4,19 @@
4.3.2 (unreleased)
------------------
-- TBD
+- Make the CPython implementation consistent with the pure-Python
+ implementation and no longer raise ``TypeError`` for an object key
+ (in object-keyed trees) with default comparison on ``__getitem__``,
+ ``get`` or ``in`` operations. Instead, the results will be a
+ ``KeyError``, the default value, and ``False``, respectively.
+ Previously, CPython raised a ``TypeError`` in those cases, while the
+ Python implementation behaved as specified.
+
+ Likewise, non-integer keys in integer-keyed trees
+ will raise ``KeyError``, return the default and return ``False``,
+ respectively, in both implementations. Previously, pure-Python
+ raised a ``KeyError``, returned the default, and raised a
+ ``TypeError``, while CPython raised ``TypeError`` in all three cases.
4.3.1 (2016-05-16)
------------------
@@ -21,7 +33,7 @@
- When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning
the user's global wheel cache.
-- Ensure that he pure-Python implementation, used on PyPy and when a C
+- Ensure that the pure-Python implementation, used on PyPy and when a C
compiler isn't available for CPython, pickles identically to the C
version. Unpickling will choose the best available implementation.
This change prevents interoperability problems and database corruption if
| BTree.get(object()) raises TypeError on CPython, returns default on PyPy
I ran into an implementation difference between the C version and the Python versions.
Here's PyPy:
```python
Python 2.7.10 (7e8df3df9641, Jun 14 2016, 13:30:54)
[PyPy 5.3.1 with GCC 4.2.1 Compatible Apple LLVM 5.1 (clang-503.0.40)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
>>>> import BTrees
>>>> BTrees.OOBTree.OOBTree().get(object())
>>>>
```
Here's CPython:
```python
Python 2.7.12 (default, Jul 11 2016, 16:16:26)
[GCC 4.2.1 Compatible Apple LLVM 7.3.0 (clang-703.0.31)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
>>> import BTrees
>>> BTrees.OOBTree.OOBTree().get(object())
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: Object has default comparison
>>>
```
The specific type of object with default comparison doesn't really matter. And they both raise a TypeError if you try to set a key with default comparison (albeit with different error messages).
Which is the right behaviour here? I would argue that the Python behaviour is certainly nicer and friendlier to the user. It makes it easier to substitute a BTree for a dict in more places. In fact, on CPython, I had to subclass the BTree to be able to use it in place of a dict because of this TypeError (even though I had already made sure that we would never try to set such a value, sometimes we still query for them).
If there's consensus I can put together a PR to bring them into line. | zopefoundation/BTrees | diff --git a/BTrees/tests/test_IOBTree.py b/BTrees/tests/test_IOBTree.py
index 2e2e25e..aa14c4a 100644
--- a/BTrees/tests/test_IOBTree.py
+++ b/BTrees/tests/test_IOBTree.py
@@ -143,6 +143,14 @@ class _TestIOBTreesBase(TypeTest):
def _noneraises(self):
self._makeOne()[None] = 1
+ def testStringAllowedInContains(self):
+ self.assertFalse('key' in self._makeOne())
+
+ def testStringKeyRaisesKeyErrorWhenMissing(self):
+ self.assertRaises(KeyError, self._makeOne().__getitem__, 'key')
+
+ def testStringKeyReturnsDefaultFromGetWhenMissing(self):
+ self.assertEqual(self._makeOne().get('key', 42), 42)
class TestIOBTrees(_TestIOBTreesBase, unittest.TestCase):
diff --git a/BTrees/tests/test_OOBTree.py b/BTrees/tests/test_OOBTree.py
index ffc5686..7152947 100644
--- a/BTrees/tests/test_OOBTree.py
+++ b/BTrees/tests/test_OOBTree.py
@@ -109,7 +109,7 @@ class OOBTreeTest(BTreeTests, unittest.TestCase):
self.assertEqual(list(tree.byValue(22)),
[(y, x) for x, y in reversed(ITEMS[22:])])
- def testRejectDefaultComparison(self):
+ def testRejectDefaultComparisonOnSet(self):
# Check that passing int keys w default comparison fails.
# Only applies to new-style class instances. Old-style
# instances are too hard to introspect.
@@ -126,6 +126,11 @@ class OOBTreeTest(BTreeTests, unittest.TestCase):
self.assertRaises(TypeError, lambda : t.__setitem__(C(), 1))
+ with self.assertRaises(TypeError) as raising:
+ t[C()] = 1
+
+ self.assertEqual(raising.exception.args[0], "Object has default comparison")
+
if PY2: # we only check for __cmp__ on Python2
class With___cmp__(object):
@@ -145,6 +150,15 @@ class OOBTreeTest(BTreeTests, unittest.TestCase):
t.clear()
+ def testAcceptDefaultComparisonOnGet(self):
+ # Issue #42
+ t = self._makeOne()
+ class C(object):
+ pass
+
+ self.assertEqual(t.get(C(), 42), 42)
+ self.assertRaises(KeyError, t.__getitem__, C())
+ self.assertFalse(C() in t)
class OOBTreePyTest(OOBTreeTest):
#
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 4.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"nosexcover",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/zopefoundation/BTrees.git@be6e6ea7fed82058dc80874eea4813d73df06f3c#egg=BTrees
cffi==1.17.1
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
nosexcover==1.0.11
packaging @ file:///croot/packaging_1734472117206/work
persistent==6.1.1
pluggy @ file:///croot/pluggy_1733169602837/work
pycparser==2.22
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
zope.deferredimport==5.0
zope.interface==7.2
zope.proxy==6.1
| name: BTrees
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cffi==1.17.1
- coverage==7.8.0
- nose==1.3.7
- nosexcover==1.0.11
- persistent==6.1.1
- pycparser==2.22
- zope-deferredimport==5.0
- zope-interface==7.2
- zope-proxy==6.1
prefix: /opt/conda/envs/BTrees
| [
"BTrees/tests/test_IOBTree.py::TestIOBTreesPy::testStringAllowedInContains",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRejectDefaultComparisonOnSet"
]
| [
"BTrees/tests/test_IOBTree.py::TestIOBTrees::testStringAllowedInContains",
"BTrees/tests/test_IOBTree.py::TestIOBTrees::testStringKeyRaisesKeyErrorWhenMissing",
"BTrees/tests/test_IOBTree.py::TestIOBTrees::testStringKeyReturnsDefaultFromGetWhenMissing",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testFailMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTests::testMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testFailMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOBTreeConflictTestsPy::testMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testFailMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTests::testMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testFailMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeDeleteAndUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOBucketConflictTestsPy::testMergeUpdate",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOTreeSetConflictTestsPy::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOSetConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testFailMergeDelete",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testFailMergeEmptyAndFill",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testFailMergeInsert",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testMergeDelete",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testMergeEmpty",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testMergeInserts",
"BTrees/tests/test_IOBTree.py::IOSetConflictTestsPy::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testAcceptDefaultComparisonOnGet",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testFailMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBucketConflictTests::testMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testFailMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBucketPyConflictTests::testMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOSetConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOSetPyConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testFailMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreeConflictTests::testMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testFailMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testFailMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeDeleteAndUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOBTreePyConflictTests::testMergeUpdate",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOTreeSetConflictTests::testMergeInsertsFromEmpty",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testFailMergeDelete",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testFailMergeEmptyAndFill",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testFailMergeInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testMergeDelete",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testMergeEmpty",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testMergeInserts",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyConflictTests::testMergeInsertsFromEmpty"
]
| [
"BTrees/tests/test_IOBTree.py::TestLongIntKeys::testLongIntKeysWork",
"BTrees/tests/test_IOBTree.py::TestLongIntKeys::testLongIntKeysOutOfRange",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testBadUpdateTupleSize",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testClear",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testGetItemFails",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testGetReturnsDefault",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testHasKeyWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testItemsNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testItemsWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testIterators",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testKeysNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testKeysWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testLen",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testPop",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testRangedIterators",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testReplaceWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testRepr",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSetItemGetItemWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSetdefault",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSimpleExclusivRanges",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testValuesNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testValuesWorks",
"BTrees/tests/test_IOBTree.py::IOBucketTest::testValuesWorks1",
"BTrees/tests/test_IOBTree.py::IOBucketTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOBucketTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOBucketTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOBucketTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testBadUpdateTupleSize",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testClear",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testGetItemFails",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testGetReturnsDefault",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testHasKeyWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testItemsNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testItemsWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testIterators",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testKeysNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testKeysWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testLen",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testPop",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testRangedIterators",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testReplaceWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testRepr",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSetItemGetItemWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSetdefault",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSimpleExclusivRanges",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testValuesNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testValuesWorks",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::testValuesWorks1",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOBucketPyTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testAddingOneSetsChanged",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testBigInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testClear",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testDuplicateInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testHasKeyFails",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testInsertReturnsValue",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testIterator",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testKeys",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testRemoveFails",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testRemoveSucceeds",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOTreeSetTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testAddingOneSetsChanged",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testBigInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testClear",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testDuplicateInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testHasKeyFails",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testInsert",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testInsertReturnsValue",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testIterator",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testKeys",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testRemoveFails",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testRemoveSucceeds",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOTreeSetPyTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOSetTest::testAddingOneSetsChanged",
"BTrees/tests/test_IOBTree.py::IOSetTest::testBigInsert",
"BTrees/tests/test_IOBTree.py::IOSetTest::testClear",
"BTrees/tests/test_IOBTree.py::IOSetTest::testDuplicateInsert",
"BTrees/tests/test_IOBTree.py::IOSetTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOSetTest::testGetItem",
"BTrees/tests/test_IOBTree.py::IOSetTest::testHasKeyFails",
"BTrees/tests/test_IOBTree.py::IOSetTest::testInsert",
"BTrees/tests/test_IOBTree.py::IOSetTest::testInsertReturnsValue",
"BTrees/tests/test_IOBTree.py::IOSetTest::testIterator",
"BTrees/tests/test_IOBTree.py::IOSetTest::testKeys",
"BTrees/tests/test_IOBTree.py::IOSetTest::testLen",
"BTrees/tests/test_IOBTree.py::IOSetTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOSetTest::testRemoveFails",
"BTrees/tests/test_IOBTree.py::IOSetTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_IOBTree.py::IOSetTest::testRemoveSucceeds",
"BTrees/tests/test_IOBTree.py::IOSetTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOSetTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOSetTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOSetTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOSetTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOSetTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOSetTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOSetTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOSetTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testAddingOneSetsChanged",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testBigInsert",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testClear",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testDuplicateInsert",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testGetItem",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testHasKeyFails",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testInsert",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testInsertReturnsValue",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testIterator",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testKeys",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testLen",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testRemoveFails",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testRemoveSucceeds",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOSetPyTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testAddTwoSetsChanged",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testBadUpdateTupleSize",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testClear",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDamagedIterator",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteNoChildrenWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteOneChildWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteRootWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteTwoChildrenInorderSuccessorWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testDeleteTwoChildrenNoInorderSuccessorWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testGetItemFails",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testGetReturnsDefault",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testHasKeyWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testInsertMethod",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testItemsNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testItemsWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testIterators",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testKeysNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testKeysWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testLen",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testPathologicalLeftBranching",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testPathologicalRangeSearch",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testPathologicalRightBranching",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testPop",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRandomDeletes",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRandomNonOverlappingInserts",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRandomOverlappingInserts",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRangeSearchAfterRandomInsert",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRangeSearchAfterSequentialInsert",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRangedIterators",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRemoveInSmallMapSetsChanged",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testReplaceWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testRepr",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSetItemGetItemWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSetdefault",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSimpleExclusivRanges",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testSuccessorChildParentRewriteExerciseCase",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testTargetedDeletes",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testValuesNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testValuesWorks",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::testValuesWorks1",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::test_legacy_py_pickle",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOBTreeTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testAddTwoSetsChanged",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testBadUpdateTupleSize",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testClear",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDamagedIterator",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteNoChildrenWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteOneChildWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteRootWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteTwoChildrenInorderSuccessorWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testDeleteTwoChildrenNoInorderSuccessorWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testEmptyRangeSearches",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testGetItemFails",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testGetReturnsDefault",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testHasKeyWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testInsertMethod",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testItemsNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testItemsWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testIterators",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testKeysNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testKeysWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testLen",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testMaxKeyMinKey",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testPathologicalLeftBranching",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testPathologicalRangeSearch",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testPathologicalRightBranching",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testPop",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRandomDeletes",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRandomNonOverlappingInserts",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRandomOverlappingInserts",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRangeSearchAfterRandomInsert",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRangeSearchAfterSequentialInsert",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRangedIterators",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRemoveInSmallMapSetsChanged",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testReplaceWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testRepr",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSetItemGetItemWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSetdefault",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSetstateArgumentChecking",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testShortRepr",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSimpleExclusivRanges",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSlicing",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testSuccessorChildParentRewriteExerciseCase",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testTargetedDeletes",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testUpdate",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testValuesNegativeIndex",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testValuesWorks",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::testValuesWorks1",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::test_impl_pickle",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::test_isinstance_subclass",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::test_legacy_py_pickle",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::test_pickle_empty",
"BTrees/tests/test_IOBTree.py::IOBTreePyTest::test_pickle_subclass",
"BTrees/tests/test_IOBTree.py::TestIOBTrees::testBadTypeRaises",
"BTrees/tests/test_IOBTree.py::TestIOBTreesPy::testBadTypeRaises",
"BTrees/tests/test_IOBTree.py::TestIOBTreesPy::testStringKeyRaisesKeyErrorWhenMissing",
"BTrees/tests/test_IOBTree.py::TestIOBTreesPy::testStringKeyReturnsDefaultFromGetWhenMissing",
"BTrees/tests/test_IOBTree.py::TestIOSets::testBadBadKeyAfterFirst",
"BTrees/tests/test_IOBTree.py::TestIOSets::testNonIntegerInsertRaises",
"BTrees/tests/test_IOBTree.py::TestIOSetsPy::testBadBadKeyAfterFirst",
"BTrees/tests/test_IOBTree.py::TestIOSetsPy::testNonIntegerInsertRaises",
"BTrees/tests/test_IOBTree.py::TestIOTreeSets::testBadBadKeyAfterFirst",
"BTrees/tests/test_IOBTree.py::TestIOTreeSets::testNonIntegerInsertRaises",
"BTrees/tests/test_IOBTree.py::TestIOTreeSetsPy::testBadBadKeyAfterFirst",
"BTrees/tests/test_IOBTree.py::TestIOTreeSetsPy::testNonIntegerInsertRaises",
"BTrees/tests/test_IOBTree.py::PureIO::testDifference",
"BTrees/tests/test_IOBTree.py::PureIO::testEmptyDifference",
"BTrees/tests/test_IOBTree.py::PureIO::testEmptyIntersection",
"BTrees/tests/test_IOBTree.py::PureIO::testEmptyUnion",
"BTrees/tests/test_IOBTree.py::PureIO::testIntersection",
"BTrees/tests/test_IOBTree.py::PureIO::testLargerInputs",
"BTrees/tests/test_IOBTree.py::PureIO::testNone",
"BTrees/tests/test_IOBTree.py::PureIO::testUnion",
"BTrees/tests/test_IOBTree.py::PureIOPy::testDifference",
"BTrees/tests/test_IOBTree.py::PureIOPy::testEmptyDifference",
"BTrees/tests/test_IOBTree.py::PureIOPy::testEmptyIntersection",
"BTrees/tests/test_IOBTree.py::PureIOPy::testEmptyUnion",
"BTrees/tests/test_IOBTree.py::PureIOPy::testIntersection",
"BTrees/tests/test_IOBTree.py::PureIOPy::testLargerInputs",
"BTrees/tests/test_IOBTree.py::PureIOPy::testNone",
"BTrees/tests/test_IOBTree.py::PureIOPy::testUnion",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testBigInput",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testEmpty",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testFunkyKeyIteration",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testLotsOfLittleOnes",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testOne",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnion::testValuesIgnored",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testBigInput",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testEmpty",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testFunkyKeyIteration",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testLotsOfLittleOnes",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testOne",
"BTrees/tests/test_IOBTree.py::TestIOMultiUnionPy::testValuesIgnored",
"BTrees/tests/test_IOBTree.py::IOModuleTest::testFamily",
"BTrees/tests/test_IOBTree.py::IOModuleTest::testModuleProvides",
"BTrees/tests/test_IOBTree.py::IOModuleTest::testNames",
"BTrees/tests/test_IOBTree.py::IOModuleTest::test_weightedIntersection_not_present",
"BTrees/tests/test_IOBTree.py::IOModuleTest::test_weightedUnion_not_present",
"BTrees/tests/test_IOBTree.py::test_suite",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testBadUpdateTupleSize",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testClear",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testGetItemFails",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testGetReturnsDefault",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testHasKeyWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testItemsNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testItemsWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testIterators",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testKeysNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testKeysWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testLen",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testPop",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testRangedIterators",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testReplaceWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testRepr",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSetItemGetItemWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSetdefault",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSimpleExclusivRanges",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testValuesNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testValuesWorks",
"BTrees/tests/test_OOBTree.py::OOBucketTest::testValuesWorks1",
"BTrees/tests/test_OOBTree.py::OOBucketTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOBucketTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOBucketTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOBucketTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testBadUpdateTupleSize",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testClear",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testGetItemFails",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testGetReturnsDefault",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testHasKeyWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testItemsNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testItemsWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testIterators",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testKeysNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testKeysWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testLen",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testPop",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testRangedIterators",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testReplaceWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testRepr",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSetItemGetItemWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSetdefault",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSimpleExclusivRanges",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testValuesNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testValuesWorks",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::testValuesWorks1",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOBucketPyTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testAddingOneSetsChanged",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testBigInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testClear",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testDuplicateInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testHasKeyFails",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testInsertReturnsValue",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testIterator",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testKeys",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testRemoveFails",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testRemoveSucceeds",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOTreeSetTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testAddingOneSetsChanged",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testBigInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testClear",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testDuplicateInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testHasKeyFails",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testInsert",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testInsertReturnsValue",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testIterator",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testKeys",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testRemoveFails",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testRemoveSucceeds",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOTreeSetPyTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOSetTest::testAddingOneSetsChanged",
"BTrees/tests/test_OOBTree.py::OOSetTest::testBigInsert",
"BTrees/tests/test_OOBTree.py::OOSetTest::testClear",
"BTrees/tests/test_OOBTree.py::OOSetTest::testDuplicateInsert",
"BTrees/tests/test_OOBTree.py::OOSetTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOSetTest::testGetItem",
"BTrees/tests/test_OOBTree.py::OOSetTest::testHasKeyFails",
"BTrees/tests/test_OOBTree.py::OOSetTest::testInsert",
"BTrees/tests/test_OOBTree.py::OOSetTest::testInsertReturnsValue",
"BTrees/tests/test_OOBTree.py::OOSetTest::testIterator",
"BTrees/tests/test_OOBTree.py::OOSetTest::testKeys",
"BTrees/tests/test_OOBTree.py::OOSetTest::testLen",
"BTrees/tests/test_OOBTree.py::OOSetTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOSetTest::testRemoveFails",
"BTrees/tests/test_OOBTree.py::OOSetTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_OOBTree.py::OOSetTest::testRemoveSucceeds",
"BTrees/tests/test_OOBTree.py::OOSetTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOSetTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOSetTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOSetTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOSetTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOSetTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOSetTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOSetTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOSetTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testAddingOneSetsChanged",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testBigInsert",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testClear",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testDuplicateInsert",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testGetItem",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testHasKeyFails",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testInsert",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testInsertReturnsValue",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testIterator",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testKeys",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testLen",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testRemoveFails",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testRemoveInSmallSetSetsChanged",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testRemoveSucceeds",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOSetPyTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testAddTwoSetsChanged",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testBadUpdateTupleSize",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testClear",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDamagedIterator",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteNoChildrenWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteOneChildWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteRootWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteTwoChildrenInorderSuccessorWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testDeleteTwoChildrenNoInorderSuccessorWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testGetItemFails",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testGetReturnsDefault",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testHasKeyWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testInsertMethod",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testItemsNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testItemsWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testIterators",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testKeysNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testKeysWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testLen",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testPathologicalLeftBranching",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testPathologicalRangeSearch",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testPathologicalRightBranching",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testPop",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRandomDeletes",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRandomNonOverlappingInserts",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRandomOverlappingInserts",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRangeSearchAfterRandomInsert",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRangeSearchAfterSequentialInsert",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRangedIterators",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRejectDefaultComparisonOnSet",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRemoveInSmallMapSetsChanged",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testReplaceWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testRepr",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSetItemGetItemWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSetdefault",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSimpleExclusivRanges",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testSuccessorChildParentRewriteExerciseCase",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testTargetedDeletes",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testValuesNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testValuesWorks",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::testValuesWorks1",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_byValue",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_legacy_py_pickle",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOBTreeTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testAcceptDefaultComparisonOnGet",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testAddTwoSetsChanged",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testBadUpdateTupleSize",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testClear",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDamagedIterator",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteInvalidKeyRaisesKeyError",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteNoChildrenWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteOneChildWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteRootWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteTwoChildrenInorderSuccessorWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testDeleteTwoChildrenNoInorderSuccessorWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testEmptyRangeSearches",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testGetItemFails",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testGetReturnsDefault",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testHasKeyWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testInsertMethod",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testItemsNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testItemsWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testIterators",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testKeysNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testKeysWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testLen",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testMaxKeyMinKey",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testPathologicalLeftBranching",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testPathologicalRangeSearch",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testPathologicalRightBranching",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testPop",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRandomDeletes",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRandomNonOverlappingInserts",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRandomOverlappingInserts",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRangeSearchAfterRandomInsert",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRangeSearchAfterSequentialInsert",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRangedIterators",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRemoveInSmallMapSetsChanged",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testReplaceWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testRepr",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSetItemGetItemWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSetdefault",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSetstateArgumentChecking",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testShortRepr",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSimpleExclusivRanges",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSimpleExclusiveKeyRange",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSlicing",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testSuccessorChildParentRewriteExerciseCase",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testTargetedDeletes",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testUpdate",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testUpdateFromPersistentMapping",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testValuesNegativeIndex",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testValuesWorks",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::testValuesWorks1",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_byValue",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_impl_pickle",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_isinstance_subclass",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_legacy_py_pickle",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_pickle_empty",
"BTrees/tests/test_OOBTree.py::OOBTreePyTest::test_pickle_subclass",
"BTrees/tests/test_OOBTree.py::PureOO::testDifference",
"BTrees/tests/test_OOBTree.py::PureOO::testEmptyDifference",
"BTrees/tests/test_OOBTree.py::PureOO::testEmptyIntersection",
"BTrees/tests/test_OOBTree.py::PureOO::testEmptyUnion",
"BTrees/tests/test_OOBTree.py::PureOO::testIntersection",
"BTrees/tests/test_OOBTree.py::PureOO::testLargerInputs",
"BTrees/tests/test_OOBTree.py::PureOO::testNone",
"BTrees/tests/test_OOBTree.py::PureOO::testUnion",
"BTrees/tests/test_OOBTree.py::PureOOPy::testDifference",
"BTrees/tests/test_OOBTree.py::PureOOPy::testEmptyDifference",
"BTrees/tests/test_OOBTree.py::PureOOPy::testEmptyIntersection",
"BTrees/tests/test_OOBTree.py::PureOOPy::testEmptyUnion",
"BTrees/tests/test_OOBTree.py::PureOOPy::testIntersection",
"BTrees/tests/test_OOBTree.py::PureOOPy::testLargerInputs",
"BTrees/tests/test_OOBTree.py::PureOOPy::testNone",
"BTrees/tests/test_OOBTree.py::PureOOPy::testUnion",
"BTrees/tests/test_OOBTree.py::OOModuleTest::testFamily",
"BTrees/tests/test_OOBTree.py::OOModuleTest::testModuleProvides",
"BTrees/tests/test_OOBTree.py::OOModuleTest::testNames",
"BTrees/tests/test_OOBTree.py::OOModuleTest::test_multiunion_not_present",
"BTrees/tests/test_OOBTree.py::OOModuleTest::test_weightedIntersection_not_present",
"BTrees/tests/test_OOBTree.py::OOModuleTest::test_weightedUnion_not_present",
"BTrees/tests/test_OOBTree.py::test_suite"
]
| []
| Zope Public License 2.1 | 678 | [
"BTrees/_base.py",
"CHANGES.rst",
"BTrees/BTreeTemplate.c"
]
| [
"BTrees/_base.py",
"CHANGES.rst",
"BTrees/BTreeTemplate.c"
]
|
|
terryyin__lizard-144 | 1933addc0f0d4febb8b2273048f81556c0062d61 | 2016-08-03 13:54:59 | 1933addc0f0d4febb8b2273048f81556c0062d61 | rakhimov: @terryyin This PR is ready to go. | diff --git a/lizard_languages/clike.py b/lizard_languages/clike.py
index 1134e96..a17fb03 100644
--- a/lizard_languages/clike.py
+++ b/lizard_languages/clike.py
@@ -235,7 +235,7 @@ class CLikeStates(CodeStateMachine):
self.context.add_to_long_function_name(token)
def _state_dec_to_imp(self, token):
- if token == 'const' or token == 'noexcept':
+ if token in ('const', 'noexcept', '&', '&&'):
self.context.add_to_long_function_name(" " + token)
elif token == 'throw':
self._state = self._state_throw
| Bug: C++11 ref qualified functions
Lizard misses C++11 ref qualified member functions.
These functions don't appear in the report or the result database.
```cpp
struct A {
void foo() & {};
void foo() const & {};
void foo() && {};
void foo() const && {};
}; | terryyin/lizard | diff --git a/test/testCyclomaticComplexity.py b/test/testCyclomaticComplexity.py
index 346117e..d6efefa 100644
--- a/test/testCyclomaticComplexity.py
+++ b/test/testCyclomaticComplexity.py
@@ -79,3 +79,13 @@ class TestCppCyclomaticComplexity(unittest.TestCase):
""")
self.assertEqual(4, result[0].cyclomatic_complexity)
+ def test_ref_qualifiers(self):
+ """C++11 rvalue ref qualifiers look like AND operator."""
+ result = get_cpp_function_list(
+ "struct A { void foo() && { return bar() && baz(); } };")
+ self.assertEqual(1, len(result))
+ self.assertEqual(2, result[0].cyclomatic_complexity)
+ result = get_cpp_function_list(
+ "struct A { void foo() const && { return bar() && baz(); } };")
+ self.assertEqual(1, len(result))
+ self.assertEqual(2, result[0].cyclomatic_complexity)
diff --git a/test/test_languages/testCAndCPP.py b/test/test_languages/testCAndCPP.py
index 0928b15..b175fcd 100644
--- a/test/test_languages/testCAndCPP.py
+++ b/test/test_languages/testCAndCPP.py
@@ -423,6 +423,7 @@ class Test_c_cpp_lizard(unittest.TestCase):
result = get_cpp_function_list('''int fun(struct a){}''')
self.assertEqual(1, len(result))
+
def test_trailing_return_type(self):
"""C++11 trailing return type for functions."""
result = get_cpp_function_list("auto foo() -> void {}")
@@ -432,6 +433,21 @@ class Test_c_cpp_lizard(unittest.TestCase):
self.assertEqual(1, len(result))
self.assertEqual("foo", result[0].name)
+ def test_ref_qualifiers(self):
+ """C++11 ref qualifiers for member functions."""
+ result = get_cpp_function_list("struct A { void foo() & {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() const & {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() && {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+ result = get_cpp_function_list("struct A { void foo() const && {} };")
+ self.assertEqual(1, len(result))
+ self.assertEqual("A::foo", result[0].name)
+
class Test_Preprocessing(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt",
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
dill==0.3.9
exceptiongroup==1.2.2
iniconfig==2.1.0
isort==6.0.1
-e git+https://github.com/terryyin/lizard.git@1933addc0f0d4febb8b2273048f81556c0062d61#egg=lizard
mccabe==0.7.0
mock==5.2.0
nose==1.3.7
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pylint==3.3.6
pytest==8.3.5
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: lizard
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- dill==0.3.9
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pylint==3.3.6
- pytest==8.3.5
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/lizard
| [
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_ref_qualifiers",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_ref_qualifiers"
]
| []
| [
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_and",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_else_if",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_forever_loop",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_no_condition",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_non_r_value_ref_in_body",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_one_condition",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_question_mark",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_r_value_ref_in_body",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_r_value_ref_in_parameter",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_statement_no_curly_brackets",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_one_function_with_typedef",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_sharp_if_and_sharp_elif_counts_in_cc_number",
"test/testCyclomaticComplexity.py::TestCppCyclomaticComplexity::test_two_function_with_non_r_value_ref_in_body",
"test/test_languages/testCAndCPP.py::Test_C_Token_extension::test_connecting_marcro",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_1",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_braket_that_is_not_a_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_class_with_inheritance",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_complicated_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initialization_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initialization_list_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_initializer_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_constructor_uniform_initialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_destructor_implementation",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_double_nested_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_double_slash_within_string",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_empty",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_followed_with_one_word_is_ok",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_with_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_dec_with_throw",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_declaration_is_not_counted",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_name_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_operator",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_that_returns_function_pointers",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_1_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_content",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_no_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_strang_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_function_with_strang_param2",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_global_var_constructor",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_inline_operator",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_less_then_is_not_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_namespace_alias",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_class_middle",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_template",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_template_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_nested_unnamed_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_no_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_non_function_initializer_list",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_non_function_uniform_initialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_not_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_old_style_c_function_has_semicolon",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_in_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_const",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_function_with_noexcept",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_one_macro_in_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_only_word_can_be_function_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading_shift",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_overloading_with_namespace",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_operator_with_complicated_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_parentheses_before_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_pre_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_struct_in_param",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_struct_in_return_type",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_stupid_macro_before_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_as_part_of_function_name",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_as_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class_full_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_class_partial_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_function_specialization",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_pointer",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_template_with_reference_as_reference",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_trailing_return_type",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_two_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_two_simplest_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_typedef_is_not_old_style_c_function",
"test/test_languages/testCAndCPP.py::Test_c_cpp_lizard::test_underscore",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_content_macro_should_be_ignored",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_preprocessor_is_not_function",
"test/test_languages/testCAndCPP.py::Test_Preprocessing::test_preprocessors_should_be_ignored_outside_function_implementation",
"test/test_languages/testCAndCPP.py::Test_Big::test_trouble"
]
| []
| MIT License | 679 | [
"lizard_languages/clike.py"
]
| [
"lizard_languages/clike.py"
]
|
zopefoundation__zope.interface-44 | e79273048b92735fb23fcee7f695a5ed5d72f647 | 2016-08-03 15:34:56 | 8138c73db26b17ca1967daa2908845b2d2f21cb8 | diff --git a/CHANGES.rst b/CHANGES.rst
index 7ac92c4..db52ae5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,7 +4,10 @@ Changes
4.2.1 (unreleased)
------------------
-- TBD
+- Add the ability to sort the objects returned by ``implementedBy``.
+ This is compatible with the way interface classes sort so they can
+ be used together in ordered containers like BTrees.
+ (https://github.com/zopefoundation/zope.interface/issues/42)
4.2.0 (2016-06-10)
------------------
diff --git a/src/zope/interface/declarations.py b/src/zope/interface/declarations.py
index 5c4a20a..d770fa0 100644
--- a/src/zope/interface/declarations.py
+++ b/src/zope/interface/declarations.py
@@ -30,7 +30,6 @@ import sys
from types import FunctionType
from types import MethodType
from types import ModuleType
-import warnings
import weakref
from zope.interface.advice import addClassAdvisor
@@ -131,12 +130,86 @@ class Implements(Declaration):
__name__ = '?'
+ @classmethod
+ def named(cls, name, *interfaces):
+ # Implementation method: Produce an Implements interface with
+ # a fully fleshed out __name__ before calling the constructor, which
+ # sets bases to the given interfaces and which may pass this object to
+ # other objects (e.g., to adjust dependents). If they're sorting or comparing
+ # by name, this needs to be set.
+ inst = cls.__new__(cls)
+ inst.__name__ = name
+ inst.__init__(*interfaces)
+ return inst
+
def __repr__(self):
return '<implementedBy %s>' % (self.__name__)
def __reduce__(self):
return implementedBy, (self.inherit, )
+ def __cmp(self, other):
+ # Yes, I did mean to name this __cmp, rather than __cmp__.
+ # It is a private method used by __lt__ and __gt__.
+ # This is based on, and compatible with, InterfaceClass.
+ # (The two must be mutually comparable to be able to work in e.g., BTrees.)
+ # Instances of this class generally don't have a __module__ other than
+ # `zope.interface.declarations`, whereas they *do* have a __name__ that is the
+ # fully qualified name of the object they are representing.
+
+ # Note, though, that equality and hashing are still identity based. This
+ # accounts for things like nested objects that have the same name (typically
+ # only in tests) and is consistent with pickling. As far as comparisons to InterfaceClass
+ # goes, we'll never have equal name and module to those, so we're still consistent there.
+ # Instances of this class are essentially intended to be unique and are
+ # heavily cached (note how our __reduce__ handles this) so having identity
+ # based hash and eq should also work.
+ if other is None:
+ return -1
+
+ n1 = (self.__name__, self.__module__)
+ n2 = (getattr(other, '__name__', ''), getattr(other, '__module__', ''))
+
+ # This spelling works under Python3, which doesn't have cmp().
+ return (n1 > n2) - (n1 < n2)
+
+ def __hash__(self):
+ return Declaration.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __ne__(self, other):
+ return self is not other
+
+ def __lt__(self, other):
+ c = self.__cmp(other)
+ return c < 0
+
+ def __le__(self, other):
+ c = self.__cmp(other)
+ return c <= 0
+
+ def __gt__(self, other):
+ c = self.__cmp(other)
+ return c > 0
+
+ def __ge__(self, other):
+ c = self.__cmp(other)
+ return c >= 0
+
+def _implements_name(ob):
+ # Return the __name__ attribute to be used by its __implemented__
+ # property.
+ # This must be stable for the "same" object across processes
+ # because it is used for sorting. It needn't be unique, though, in cases
+ # like nested classes named Foo created by different functions, because
+ # equality and hashing is still based on identity.
+ # It might be nice to use __qualname__ on Python 3, but that would produce
+ # different values between Py2 and Py3.
+ return (getattr(ob, '__module__', '?') or '?') + \
+ '.' + (getattr(ob, '__name__', '?') or '?')
+
def implementedByFallback(cls):
"""Return the interfaces implemented for a class' instances
@@ -183,10 +256,11 @@ def implementedByFallback(cls):
return spec
# TODO: need old style __implements__ compatibility?
+ spec_name = _implements_name(cls)
if spec is not None:
# old-style __implemented__ = foo declaration
spec = (spec, ) # tuplefy, as it might be just an int
- spec = Implements(*_normalizeargs(spec))
+ spec = Implements.named(spec_name, *_normalizeargs(spec))
spec.inherit = None # old-style implies no inherit
del cls.__implemented__ # get rid of the old-style declaration
else:
@@ -197,12 +271,9 @@ def implementedByFallback(cls):
raise TypeError("ImplementedBy called for non-factory", cls)
bases = ()
- spec = Implements(*[implementedBy(c) for c in bases])
+ spec = Implements.named(spec_name, *[implementedBy(c) for c in bases])
spec.inherit = cls
- spec.__name__ = (getattr(cls, '__module__', '?') or '?') + \
- '.' + (getattr(cls, '__name__', '?') or '?')
-
try:
cls.__implemented__ = spec
if not hasattr(cls, '__providedBy__'):
@@ -314,7 +385,8 @@ class implementer:
classImplements(ob, *self.interfaces)
return ob
- spec = Implements(*self.interfaces)
+ spec_name = _implements_name(ob)
+ spec = Implements.named(spec_name, *self.interfaces)
try:
ob.__implemented__ = spec
except AttributeError:
@@ -641,7 +713,7 @@ def classProvides(*interfaces):
"""
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
-
+
if PYTHON3: #pragma NO COVER
raise TypeError(_ADVICE_ERROR % 'provider')
diff --git a/src/zope/interface/interface.py b/src/zope/interface/interface.py
index 5a77adb..e7eff5d 100644
--- a/src/zope/interface/interface.py
+++ b/src/zope/interface/interface.py
@@ -152,7 +152,7 @@ class InterfaceBasePy(object):
if adapter is not None:
return adapter
-
+
InterfaceBase = InterfaceBasePy
try:
from _zope_interface_coptimizations import InterfaceBase
| Make declarations.Implements (and possibly Provides) comparable?
Recently we had the need to do some `IComponents`-like things on some very large persistent data sets. So we started with a subclass of `AdapterRegistry` and made it use BTrees instead of dicts. Everything works fine so long as you stay in the realm of interfaces, but as soon as you try to do something based on classes, like `components.registerAdapter(factory, provided=IFoo, required=(str,))` you blow up with `TypeError: Object has default comparison`.
Actually it's slightly worse than that. Some of the dicts used in the implementation of Components (which we also replaced with BTrees) use keys that are tuples. Tuples are automatically comparable, even if their elements aren't. But having non-comparable keys (or rather, keys that change across processes) is a sure way to corrupt the BTree.
I looked into it, and was pretty surprised to find that not only are `Implements` and `Provides` not comparable, they actually rely on the default, identity-based, `__hash__` and `__eq__` inherited from object. Everything pretty much happens to work out for a regular `PersistentAdapterRegistry` because of the way these objects are pickled (in Implement's case, via `__reduce__` and a call to `implementedBy`, which aggressively caches the same object, so identities stay the same in a single process even across pickling).
Is it a bad idea to make `Implements` comparable so it can be used in BTrees? To work for the `IComponents` use-case, it would have to be mutually-comparable with `InterfaceClass`, but it already has `__name__` and `__module__` (which is what `InterfaceClass` uses), so that might just work out nearly automatically.
If there's consensus this is a reasonable idea, I can work on a PR. (In our particular use-case, we were able to simply prohibit class-based registrations as they don't occur in our data set.) | zopefoundation/zope.interface | diff --git a/src/zope/interface/tests/test_declarations.py b/src/zope/interface/tests/test_declarations.py
index 27a999a..83da6fa 100644
--- a/src/zope/interface/tests/test_declarations.py
+++ b/src/zope/interface/tests/test_declarations.py
@@ -238,7 +238,7 @@ class DeclarationTests(unittest.TestCase):
self.assertEqual(list(after), [IFoo, IBar, IBaz])
-class ImplementsTests(unittest.TestCase):
+class TestImplements(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import Implements
@@ -264,6 +264,25 @@ class ImplementsTests(unittest.TestCase):
impl = self._makeOne()
self.assertEqual(impl.__reduce__(), (implementedBy, (None,)))
+ def test_sort(self):
+ from zope.interface.declarations import implementedBy
+ class A(object):
+ pass
+ class B(object):
+ pass
+ from zope.interface.interface import InterfaceClass
+ IFoo = InterfaceClass('IFoo')
+
+ self.assertEqual(implementedBy(A), implementedBy(A))
+ self.assertEqual(hash(implementedBy(A)), hash(implementedBy(A)))
+ self.assertTrue(implementedBy(A) < None)
+ self.assertTrue(None > implementedBy(A))
+ self.assertTrue(implementedBy(A) < implementedBy(B))
+ self.assertTrue(implementedBy(A) > IFoo)
+ self.assertTrue(implementedBy(A) <= implementedBy(B))
+ self.assertTrue(implementedBy(A) >= IFoo)
+ self.assertTrue(implementedBy(A) != IFoo)
+
class Test_implementedByFallback(unittest.TestCase):
@@ -597,7 +616,7 @@ class Test_implementer(unittest.TestCase):
returned = decorator(foo)
self.assertTrue(returned is foo)
spec = foo.__implemented__
- self.assertEqual(spec.__name__, '?')
+ self.assertEqual(spec.__name__, 'zope.interface.tests.test_declarations.?')
self.assertTrue(spec.inherit is None)
self.assertTrue(foo.__implemented__ is spec)
@@ -1567,7 +1586,7 @@ class _MonkeyDict(object):
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(DeclarationTests),
- unittest.makeSuite(ImplementsTests),
+ unittest.makeSuite(TestImplements),
unittest.makeSuite(Test_implementedByFallback),
unittest.makeSuite(Test_implementedBy),
unittest.makeSuite(Test_classImplementsOnly),
diff --git a/src/zope/interface/tests/test_odd_declarations.py b/src/zope/interface/tests/test_odd_declarations.py
index 1e62a4f..e508d1d 100644
--- a/src/zope/interface/tests/test_odd_declarations.py
+++ b/src/zope/interface/tests/test_odd_declarations.py
@@ -46,7 +46,6 @@ class B(Odd): __implemented__ = I2
# a different mechanism.
# from zope.interface import classProvides
-
class A(Odd):
pass
classImplements(A, I1)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 3
} | 4.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/zopefoundation/zope.interface.git@e79273048b92735fb23fcee7f695a5ed5d72f647#egg=zope.interface
| name: zope.interface
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/zope.interface
| [
"src/zope/interface/tests/test_declarations.py::TestImplements::test_sort",
"src/zope/interface/tests/test_declarations.py::Test_implementer::test_nonclass_can_assign_attr"
]
| []
| [
"src/zope/interface/tests/test_declarations.py::NamedTests::test_class",
"src/zope/interface/tests/test_declarations.py::NamedTests::test_function",
"src/zope/interface/tests/test_declarations.py::NamedTests::test_instance",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___add___related_interface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___add___unrelated_interface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___contains__w_base_interface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___contains__w_self",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___contains__w_unrelated_iface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___iter___empty",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___iter___inheritance",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___iter___multiple_bases",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___iter___single_base",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___iter___w_nested_sequence_overlap",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___sub___related_interface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___sub___related_interface_by_inheritance",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test___sub___unrelated_interface",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_changed_w_existing__v_attrs",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_changed_wo_existing__v_attrs",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_ctor_no_bases",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_ctor_w_implements_in_bases",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_ctor_w_interface_in_bases",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_flattened_empty",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_flattened_inheritance",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_flattened_multiple_bases",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_flattened_single_base",
"src/zope/interface/tests/test_declarations.py::DeclarationTests::test_flattened_w_nested_sequence_overlap",
"src/zope/interface/tests/test_declarations.py::TestImplements::test___reduce__",
"src/zope/interface/tests/test_declarations.py::TestImplements::test___repr__",
"src/zope/interface/tests/test_declarations.py::TestImplements::test_ctor_no_bases",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_builtins_added_to_cache",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_builtins_w_existing_cache",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_dictless_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_dictless_w_existing_not_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_dictless_wo_existing_Implements_cant_assign___implemented__",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_dictless_wo_existing_Implements_w_registrations",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_dictless_wo_existing_Implements_wo_registrations",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_no_assertions",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_oldstyle_class_no_assertions",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_w_None_no_bases_not_factory",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_w_None_no_bases_w_class",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_w_None_no_bases_w_factory",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedByFallback::test_w_existing_attr_as_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_builtins_added_to_cache",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_builtins_w_existing_cache",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_dictless_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_dictless_w_existing_not_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_dictless_wo_existing_Implements_cant_assign___implemented__",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_dictless_wo_existing_Implements_w_registrations",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_dictless_wo_existing_Implements_wo_registrations",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_no_assertions",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_oldstyle_class_no_assertions",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_w_None_no_bases_not_factory",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_w_None_no_bases_w_class",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_w_None_no_bases_w_factory",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_implementedBy::test_w_existing_attr_as_Implements",
"src/zope/interface/tests/test_declarations.py::Test_classImplementsOnly::test_no_existing",
"src/zope/interface/tests/test_declarations.py::Test_classImplementsOnly::test_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_classImplements::test_no_existing",
"src/zope/interface/tests/test_declarations.py::Test_classImplements::test_w_existing_Implements",
"src/zope/interface/tests/test_declarations.py::Test_classImplements::test_w_existing_Implements_w_bases",
"src/zope/interface/tests/test_declarations.py::Test__implements_advice::test_no_existing_implements",
"src/zope/interface/tests/test_declarations.py::Test_implementer::test_newstyle_class",
"src/zope/interface/tests/test_declarations.py::Test_implementer::test_nonclass_cannot_assign_attr",
"src/zope/interface/tests/test_declarations.py::Test_implementer::test_oldstyle_class",
"src/zope/interface/tests/test_declarations.py::Test_implementer_only::test_function",
"src/zope/interface/tests/test_declarations.py::Test_implementer_only::test_method",
"src/zope/interface/tests/test_declarations.py::Test_implementer_only::test_newstyle_class",
"src/zope/interface/tests/test_declarations.py::Test_implementer_only::test_oldstyle_class",
"src/zope/interface/tests/test_declarations.py::Test_implementsOnly::test_called_once_from_class_w_bases",
"src/zope/interface/tests/test_declarations.py::Test_implementsOnly::test_simple",
"src/zope/interface/tests/test_declarations.py::Test_implements::test_called_from_function",
"src/zope/interface/tests/test_declarations.py::Test_implements::test_called_once_from_class",
"src/zope/interface/tests/test_declarations.py::Test_implements::test_called_twice_from_class",
"src/zope/interface/tests/test_declarations.py::ProvidesClassTests::test___get___class",
"src/zope/interface/tests/test_declarations.py::ProvidesClassTests::test___get___instance",
"src/zope/interface/tests/test_declarations.py::ProvidesClassTests::test___reduce__",
"src/zope/interface/tests/test_declarations.py::ProvidesClassTests::test_simple_class_one_interface",
"src/zope/interface/tests/test_declarations.py::Test_Provides::test_no_cached_spec",
"src/zope/interface/tests/test_declarations.py::Test_Provides::test_w_cached_spec",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvides::test_w_class",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvides::test_w_classless_object",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvides::test_w_non_descriptor_aware_metaclass",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvides::test_w_normal_object",
"src/zope/interface/tests/test_declarations.py::Test_alsoProvides::test_w_existing_provides",
"src/zope/interface/tests/test_declarations.py::Test_alsoProvides::test_wo_existing_provides",
"src/zope/interface/tests/test_declarations.py::Test_noLongerProvides::test_w_existing_provides_hit",
"src/zope/interface/tests/test_declarations.py::Test_noLongerProvides::test_w_existing_provides_miss",
"src/zope/interface/tests/test_declarations.py::Test_noLongerProvides::test_w_iface_implemented_by_class",
"src/zope/interface/tests/test_declarations.py::Test_noLongerProvides::test_wo_existing_provides",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseFallbackTests::test_w_different_class",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseFallbackTests::test_w_same_class_via_class",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseFallbackTests::test_w_same_class_via_instance",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseTests::test_w_different_class",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseTests::test_w_same_class_via_class",
"src/zope/interface/tests/test_declarations.py::ClassProvidesBaseTests::test_w_same_class_via_instance",
"src/zope/interface/tests/test_declarations.py::ClassProvidesTests::test___reduce__",
"src/zope/interface/tests/test_declarations.py::ClassProvidesTests::test_w_simple_metaclass",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvidedBy::test_w_declarations_in_class_but_not_instance",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvidedBy::test_w_declarations_in_instance_and_class",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvidedBy::test_w_declarations_in_instance_but_not_class",
"src/zope/interface/tests/test_declarations.py::Test_directlyProvidedBy::test_wo_declarations_in_class_or_instance",
"src/zope/interface/tests/test_declarations.py::Test_classProvides::test_called_from_function",
"src/zope/interface/tests/test_declarations.py::Test_classProvides::test_called_once_from_class",
"src/zope/interface/tests/test_declarations.py::Test_classProvides::test_called_twice_from_class",
"src/zope/interface/tests/test_declarations.py::Test_provider::test_w_class",
"src/zope/interface/tests/test_declarations.py::Test_moduleProvides::test_called_from_class",
"src/zope/interface/tests/test_declarations.py::Test_moduleProvides::test_called_from_function",
"src/zope/interface/tests/test_declarations.py::Test_moduleProvides::test_called_once_from_module_scope",
"src/zope/interface/tests/test_declarations.py::Test_moduleProvides::test_called_twice_from_module_scope",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_existing_provides",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_existing_provides_is_not_spec",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_existing_provides_is_spec",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_wo_existing_provides_classless",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_wo_provides_on_class_w_implements",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecificationFallback::test_wo_provides_on_class_wo_implements",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_existing_provides",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_existing_provides_is_not_spec",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_existing_provides_is_spec",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_wo_existing_provides_classless",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_wo_provides_on_class_w_implements",
"src/zope/interface/tests/test_declarations.py::Test_getObjectSpecification::test_wo_provides_on_class_wo_implements",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_invalid_spec",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_invalid_spec_class_w_implements",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_invalid_spec_w_provides_no_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_invalid_spec_w_provides_same_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_w_providedBy_valid_spec",
"src/zope/interface/tests/test_declarations.py::Test_providedByFallback::test_wo_providedBy_on_class_wo_implements",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_invalid_spec",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_invalid_spec_class_w_implements",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_invalid_spec_w_provides_no_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_invalid_spec_w_provides_same_provides_on_class",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_w_providedBy_valid_spec",
"src/zope/interface/tests/test_declarations.py::Test_providedBy::test_wo_providedBy_on_class_wo_implements",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorFallbackTests::test_accessed_via_class",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorFallbackTests::test_accessed_via_inst_w_provides",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorFallbackTests::test_accessed_via_inst_wo_provides",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorTests::test_accessed_via_class",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorTests::test_accessed_via_inst_w_provides",
"src/zope/interface/tests/test_declarations.py::ObjectSpecificationDescriptorTests::test_accessed_via_inst_wo_provides",
"src/zope/interface/tests/test_declarations.py::test_suite",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_ObjectSpecification",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_classImplements",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_classImplementsOnly",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_directlyProvides",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_directlyProvides_fails_for_odd_class",
"src/zope/interface/tests/test_odd_declarations.py::Test::test_implementedBy",
"src/zope/interface/tests/test_odd_declarations.py::test_suite"
]
| []
| Zope Public License 2.1 | 680 | [
"src/zope/interface/declarations.py",
"src/zope/interface/interface.py",
"CHANGES.rst"
]
| [
"src/zope/interface/declarations.py",
"src/zope/interface/interface.py",
"CHANGES.rst"
]
|
|
mkdocs__mkdocs-1009 | 2e4c2afca10cfd697ba5ca820dd5212c4b1e9089 | 2016-08-03 20:25:59 | e7d8879d2b53d9e50bdfcf1cf29c48dc3f6bc87f | waylan: Also needs a comment in the release notes.
waylan: I added a not to the release notes and a test. But the test is failing and its not clear to me why. I'm not an expert on Mock, so maybe I'm misunderstanding something here. But I don't know how else to test this. | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index c8e491f8..426ef3c8 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -138,6 +138,8 @@ pages.
* Change "Edit on..." links to point directly to the file in the source
repository, rather than to the root of the repository (#975), configurable
via the new [`edit_uri`](../user-guide/configuration.md#edit_uri) setting.
+* Bugfix: Don't override config value for strict mode if not specified on CLI
+ (#738).
## Version 0.15.3 (2016-02-18)
diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py
index 9417c3ad..c9d519fc 100644
--- a/mkdocs/__main__.py
+++ b/mkdocs/__main__.py
@@ -9,7 +9,7 @@ import socket
from mkdocs import __version__
from mkdocs import utils
from mkdocs import exceptions
-from mkdocs.config import load_config
+from mkdocs import config
from mkdocs.commands import build, gh_deploy, new, serve
log = logging.getLogger(__name__)
@@ -112,6 +112,10 @@ def serve_command(dev_addr, config_file, strict, theme, theme_dir, livereload):
logging.getLogger('tornado').setLevel(logging.WARNING)
+ # Don't override config value if user did not specify --strict flag
+ # Conveniently, load_config drops None values
+ strict = strict or None
+
try:
serve.serve(
config_file=config_file,
@@ -136,8 +140,13 @@ def serve_command(dev_addr, config_file, strict, theme, theme_dir, livereload):
@common_options
def build_command(clean, config_file, strict, theme, theme_dir, site_dir):
"""Build the MkDocs documentation"""
+
+ # Don't override config value if user did not specify --strict flag
+ # Conveniently, load_config drops None values
+ strict = strict or None
+
try:
- build.build(load_config(
+ build.build(config.load_config(
config_file=config_file,
strict=strict,
theme=theme,
@@ -168,8 +177,12 @@ def json_command(clean, config_file, strict, site_dir):
"future MkDocs release. For details on updating: "
"http://www.mkdocs.org/about/release-notes/")
+ # Don't override config value if user did not specify --strict flag
+ # Conveniently, load_config drops None values
+ strict = strict or None
+
try:
- build.build(load_config(
+ build.build(config.load_config(
config_file=config_file,
strict=strict,
site_dir=site_dir
@@ -189,13 +202,13 @@ def json_command(clean, config_file, strict, site_dir):
def gh_deploy_command(config_file, clean, message, remote_branch, remote_name):
"""Deploy your documentation to GitHub Pages"""
try:
- config = load_config(
+ cfg = config.load_config(
config_file=config_file,
remote_branch=remote_branch,
remote_name=remote_name
)
- build.build(config, dirty=not clean)
- gh_deploy.gh_deploy(config, message=message)
+ build.build(cfg, dirty=not clean)
+ gh_deploy.gh_deploy(cfg, message=message)
except exceptions.ConfigurationError as e:
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
| Configuration strict option not checking broken links
I was testing mkdocs earlier this year and was using "strict: true" in my mkdocs.yml. If I recall correctly, it was working then and building would break if there was a broken link in the .md.
However I am now using v0.14.0 and this config option seems to have no effect on the build - I get no warnings or errors when building regardless of the value of strict. | mkdocs/mkdocs | diff --git a/mkdocs/tests/cli_tests.py b/mkdocs/tests/cli_tests.py
index b070afee..a4da76e8 100644
--- a/mkdocs/tests/cli_tests.py
+++ b/mkdocs/tests/cli_tests.py
@@ -22,16 +22,31 @@ class CLITests(unittest.TestCase):
cli.cli, ["serve", ], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
- self.assertEqual(mock_serve.call_count, 1)
-
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ livereload=None
+ )
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
@mock.patch('mkdocs.commands.build.build', autospec=True)
- def test_build(self, mock_build):
+ def test_build(self, mock_build, mock_load_config):
result = self.runner.invoke(
cli.cli, ["build", ], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ site_dir=None
+ )
@mock.patch('mkdocs.commands.build.build', autospec=True)
def test_build_verbose(self, mock_build):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
exceptiongroup==1.2.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@2e4c2afca10cfd697ba5ca820dd5212c4b1e9089#egg=mkdocs
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- exceptiongroup==1.2.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build",
"mkdocs/tests/cli_tests.py::CLITests::test_serve"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_verbose",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy",
"mkdocs/tests/cli_tests.py::CLITests::test_json"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_new"
]
| []
| BSD 2-Clause "Simplified" License | 681 | [
"docs/about/release-notes.md",
"mkdocs/__main__.py"
]
| [
"docs/about/release-notes.md",
"mkdocs/__main__.py"
]
|
pypa__twine-203 | 26a7c446888e50712168a395cd91360666264390 | 2016-08-03 21:26:54 | 461bac0a8e3333e11bb5dd14547f45e34e6c79d4 | diff --git a/docs/changelog.rst b/docs/changelog.rst
index fde2ef9..a4dafdd 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -16,6 +16,10 @@ Changelog
- Password will default to ``TWINE_PASSWORD``
+ * :feature:`166` Allow the Repository URL to be provided on the command-line
+ (``--repository-url``) or via an environment variable
+ (``TWINE_REPOSITORY_URL``).
+
* :release:`1.7.4 <2016-07-09>`
* Correct a packaging error.
diff --git a/twine/commands/register.py b/twine/commands/register.py
index ea6732e..29744d1 100644
--- a/twine/commands/register.py
+++ b/twine/commands/register.py
@@ -24,8 +24,12 @@ from twine import utils
def register(package, repository, username, password, comment, config_file,
- cert, client_cert):
- config = utils.get_repository_from_config(config_file, repository)
+ cert, client_cert, repository_url):
+ config = utils.get_repository_from_config(
+ config_file,
+ repository,
+ repository_url,
+ )
config["repository"] = utils.normalize_repository_url(
config["repository"]
)
@@ -63,15 +67,26 @@ def main(args):
parser.add_argument(
"-r", "--repository",
action=utils.EnvironmentDefault,
- env='TWINE_REPOSITORY',
+ env="TWINE_REPOSITORY",
default="pypi",
- help="The repository to register the package to (default: "
+ help="The repository to register the package to. Can be a section in "
+ "the config file or a full URL to the repository (default: "
"%(default)s)",
)
+ parser.add_argument(
+ "--repository-url",
+ action=utils.EnvironmentDefault,
+ env="TWINE_REPOSITORY_URL",
+ default=None,
+ required=False,
+ help="The repository URL to upload the package to. This can be "
+ "specified with --repository because it will be used if there is "
+ "no configuration for the value passed to --repository."
+ )
parser.add_argument(
"-u", "--username",
action=utils.EnvironmentDefault,
- env='TWINE_USERNAME',
+ env="TWINE_USERNAME",
required=False, help="The username to authenticate to the repository "
"as (can also be set via %(env)s environment "
"variable)",
@@ -79,7 +94,7 @@ def main(args):
parser.add_argument(
"-p", "--password",
action=utils.EnvironmentDefault,
- env='TWINE_PASSWORD',
+ env="TWINE_PASSWORD",
required=False, help="The password to authenticate to the repository "
"with (can also be set via %(env)s environment "
"variable)",
diff --git a/twine/commands/upload.py b/twine/commands/upload.py
index 7f7335e..e78f4af 100644
--- a/twine/commands/upload.py
+++ b/twine/commands/upload.py
@@ -72,7 +72,8 @@ def skip_upload(response, skip_existing, package):
def upload(dists, repository, sign, identity, username, password, comment,
- sign_with, config_file, skip_existing, cert, client_cert):
+ sign_with, config_file, skip_existing, cert, client_cert,
+ repository_url):
# Check that a nonsensical option wasn't given
if not sign and identity:
raise ValueError("sign must be given along with identity")
@@ -85,7 +86,11 @@ def upload(dists, repository, sign, identity, username, password, comment,
)
uploads = [i for i in dists if not i.endswith(".asc")]
- config = utils.get_repository_from_config(config_file, repository)
+ config = utils.get_repository_from_config(
+ config_file,
+ repository,
+ repository_url,
+ )
config["repository"] = utils.normalize_repository_url(
config["repository"]
@@ -152,7 +157,19 @@ def main(args):
action=utils.EnvironmentDefault,
env="TWINE_REPOSITORY",
default="pypi",
- help="The repository to upload the files to (default: %(default)s)",
+ help="The repository to register the package to. Can be a section in "
+ "the config file or a full URL to the repository (default: "
+ "%(default)s)",
+ )
+ parser.add_argument(
+ "--repository-url",
+ action=utils.EnvironmentDefault,
+ env="TWINE_REPOSITORY_URL",
+ default=None,
+ required=False,
+ help="The repository URL to upload the package to. This can be "
+ "specified with --repository because it will be used if there is "
+ "no configuration for the value passed to --repository."
)
parser.add_argument(
"-s", "--sign",
diff --git a/twine/utils.py b/twine/utils.py
index 3e21fde..9b2e8d6 100644
--- a/twine/utils.py
+++ b/twine/utils.py
@@ -40,6 +40,7 @@ else:
DEFAULT_REPOSITORY = "https://upload.pypi.org/legacy/"
+TEST_REPOSITORY = "https://test.pypi.org/legacy/"
def get_config(path="~/.pypirc"):
@@ -50,7 +51,11 @@ def get_config(path="~/.pypirc"):
return {"pypi": {"repository": DEFAULT_REPOSITORY,
"username": None,
"password": None
- }
+ },
+ "pypitest": {"repository": TEST_REPOSITORY,
+ "username": None,
+ "password": None
+ },
}
# Parse the rc file
@@ -94,13 +99,22 @@ def get_config(path="~/.pypirc"):
return config
-def get_repository_from_config(config_file, repository):
+def get_repository_from_config(config_file, repository, repository_url=None):
# Get our config from the .pypirc file
try:
return get_config(config_file)[repository]
except KeyError:
+ if repository_url and "://" in repository_url:
+ # assume that the repsoitory is actually an URL and just sent
+ # them a dummy with the repo set
+ return {
+ "repository": repository_url,
+ "username": None,
+ "password": None,
+ }
msg = (
- "Missing '{repo}' section from the configuration file.\n"
+ "Missing '{repo}' section from the configuration file\n"
+ "or not a complete URL in --repository.\n"
"Maybe you have a out-dated '{cfg}' format?\n"
"more info: "
"https://docs.python.org/distutils/packageindex.html#pypirc\n"
| Make twine work without a config file
I wanted to add twine to a CI script but it wasn't trivial to find the pypirc file, so I had to regenerate via echo "..." calls (windows).
It would be nice of twine could work with https://testpypi.python.org/pypi without first adding a config file . | pypa/twine | diff --git a/tests/test_upload.py b/tests/test_upload.py
index 2649fe4..e134f82 100644
--- a/tests/test_upload.py
+++ b/tests/test_upload.py
@@ -82,10 +82,13 @@ def test_get_config_old_format(tmpdir):
upload.upload(dists=dists, repository="pypi", sign=None, identity=None,
username=None, password=None, comment=None,
cert=None, client_cert=None,
- sign_with=None, config_file=pypirc, skip_existing=False)
+ sign_with=None, config_file=pypirc, skip_existing=False,
+ repository_url=None,
+ )
except KeyError as err:
assert err.args[0] == (
- "Missing 'pypi' section from the configuration file.\n"
+ "Missing 'pypi' section from the configuration file\n"
+ "or not a complete URL in --repository.\n"
"Maybe you have a out-dated '{0}' format?\n"
"more info: "
"https://docs.python.org/distutils/packageindex.html#pypirc\n"
@@ -130,7 +133,9 @@ def test_skip_upload_respects_skip_existing(monkeypatch):
def test_password_and_username_from_env(monkeypatch):
- def none_upload(*args, **kwargs): pass
+ def none_upload(*args, **kwargs):
+ pass
+
replaced_upload = pretend.call_recorder(none_upload)
monkeypatch.setattr(twine.commands.upload, "upload", replaced_upload)
testenv = {"TWINE_USERNAME": "pypiuser",
diff --git a/tests/test_utils.py b/tests/test_utils.py
index cb763b7..4c4d116 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -19,7 +19,6 @@ import textwrap
import pytest
-from twine.utils import DEFAULT_REPOSITORY, get_config, get_userpass_value
from twine import utils
import helpers
@@ -38,9 +37,9 @@ def test_get_config(tmpdir):
password = testpassword
"""))
- assert get_config(pypirc) == {
+ assert utils.get_config(pypirc) == {
"pypi": {
- "repository": DEFAULT_REPOSITORY,
+ "repository": utils.DEFAULT_REPOSITORY,
"username": "testuser",
"password": "testpassword",
},
@@ -57,9 +56,9 @@ def test_get_config_no_distutils(tmpdir):
password = testpassword
"""))
- assert get_config(pypirc) == {
+ assert utils.get_config(pypirc) == {
"pypi": {
- "repository": DEFAULT_REPOSITORY,
+ "repository": utils.DEFAULT_REPOSITORY,
"username": "testuser",
"password": "testpassword",
},
@@ -79,9 +78,9 @@ def test_get_config_no_section(tmpdir):
password = testpassword
"""))
- assert get_config(pypirc) == {
+ assert utils.get_config(pypirc) == {
"pypi": {
- "repository": DEFAULT_REPOSITORY,
+ "repository": utils.DEFAULT_REPOSITORY,
"username": "testuser",
"password": "testpassword",
},
@@ -91,23 +90,47 @@ def test_get_config_no_section(tmpdir):
def test_get_config_missing(tmpdir):
pypirc = os.path.join(str(tmpdir), ".pypirc")
- assert get_config(pypirc) == {
+ assert utils.get_config(pypirc) == {
"pypi": {
- "repository": DEFAULT_REPOSITORY,
+ "repository": utils.DEFAULT_REPOSITORY,
"username": None,
"password": None,
},
+ "pypitest": {
+ "repository": utils.TEST_REPOSITORY,
+ "username": None,
+ "password": None
+ },
}
+def test_get_repository_config_missing(tmpdir):
+ pypirc = os.path.join(str(tmpdir), ".pypirc")
+
+ repository_url = "https://notexisting.python.org/pypi"
+ exp = {
+ "repository": repository_url,
+ "username": None,
+ "password": None,
+ }
+ assert (utils.get_repository_from_config(pypirc, 'foo', repository_url) ==
+ exp)
+ exp = {
+ "repository": utils.DEFAULT_REPOSITORY,
+ "username": None,
+ "password": None,
+ }
+ assert utils.get_repository_from_config(pypirc, "pypi") == exp
+
+
def test_get_config_deprecated_pypirc():
tests_dir = os.path.dirname(os.path.abspath(__file__))
deprecated_pypirc_path = os.path.join(tests_dir, 'fixtures',
'deprecated-pypirc')
- assert get_config(deprecated_pypirc_path) == {
+ assert utils.get_config(deprecated_pypirc_path) == {
"pypi": {
- "repository": DEFAULT_REPOSITORY,
+ "repository": utils.DEFAULT_REPOSITORY,
"username": 'testusername',
"password": 'testpassword',
},
@@ -123,7 +146,7 @@ def test_get_config_deprecated_pypirc():
),
)
def test_get_userpass_value(cli_value, config, key, strategy, expected):
- ret = get_userpass_value(cli_value, config, key, strategy)
+ ret = utils.get_userpass_value(cli_value, config, key, strategy)
assert ret == expected
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 4
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"coverage",
"pretend",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | args==0.1.0
certifi==2025.1.31
charset-normalizer==3.4.1
clint==0.5.1
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pkginfo==1.12.1.2
pluggy @ file:///croot/pluggy_1733169602837/work
pretend==1.0.9
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
requests-toolbelt==1.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/pypa/twine.git@26a7c446888e50712168a395cd91360666264390#egg=twine
urllib3==2.3.0
| name: twine
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- args==0.1.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- clint==0.5.1
- coverage==7.8.0
- idna==3.10
- pkginfo==1.12.1.2
- pretend==1.0.9
- requests==2.32.3
- requests-toolbelt==1.0.0
- urllib3==2.3.0
prefix: /opt/conda/envs/twine
| [
"tests/test_upload.py::test_get_config_old_format",
"tests/test_utils.py::test_get_config_missing",
"tests/test_utils.py::test_get_repository_config_missing"
]
| []
| [
"tests/test_upload.py::test_ensure_wheel_files_uploaded_first",
"tests/test_upload.py::test_ensure_if_no_wheel_files",
"tests/test_upload.py::test_find_dists_expands_globs",
"tests/test_upload.py::test_find_dists_errors_on_invalid_globs",
"tests/test_upload.py::test_find_dists_handles_real_files",
"tests/test_upload.py::test_skip_existing_skips_files_already_on_PyPI",
"tests/test_upload.py::test_skip_existing_skips_files_already_on_pypiserver",
"tests/test_upload.py::test_skip_upload_respects_skip_existing",
"tests/test_upload.py::test_password_and_username_from_env",
"tests/test_utils.py::test_get_config",
"tests/test_utils.py::test_get_config_no_distutils",
"tests/test_utils.py::test_get_config_no_section",
"tests/test_utils.py::test_get_config_deprecated_pypirc",
"tests/test_utils.py::test_get_userpass_value[cli-config0-key-<lambda>-cli]",
"tests/test_utils.py::test_get_userpass_value[None-config1-key-<lambda>-value]",
"tests/test_utils.py::test_get_userpass_value[None-config2-key-<lambda>-fallback]",
"tests/test_utils.py::test_default_to_environment_action[MY_PASSWORD-None-environ0-None]",
"tests/test_utils.py::test_default_to_environment_action[MY_PASSWORD-None-environ1-foo]",
"tests/test_utils.py::test_default_to_environment_action[URL-https://example.org-environ2-https://example.org]",
"tests/test_utils.py::test_default_to_environment_action[URL-https://example.org-environ3-https://pypi.org]"
]
| []
| Apache License 2.0 | 682 | [
"twine/commands/register.py",
"docs/changelog.rst",
"twine/commands/upload.py",
"twine/utils.py"
]
| [
"twine/commands/register.py",
"docs/changelog.rst",
"twine/commands/upload.py",
"twine/utils.py"
]
|
|
ifosch__accloudtant-108 | b662c13380e9e5c42232e51ef3ef059c679ad5e9 | 2016-08-04 19:57:34 | 33f90ff0bc1639c9fe793afd837eee80170caf3e | diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py
index 0f56ea5..6754f6b 100644
--- a/accloudtant/aws/reports.py
+++ b/accloudtant/aws/reports.py
@@ -22,15 +22,15 @@ from accloudtant.aws.reserved_instance import ReservedInstance
from accloudtant.aws.prices import Prices
import sys
-
class Reports(object):
- def __init__(self, logger=None):
+ def __init__(self, output_format, logger=None):
if logger is None:
self.logger = getLogger('accloudtant.report')
self.logger.setLevel(DEBUG)
self.logger.addHandler(StreamHandler(sys.stdout))
else:
self.logger = logger
+ self.output_format=output_format
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
self.counters = {
@@ -91,7 +91,7 @@ class Reports(object):
reserved_counters['not reserved'] = instances_counters['running']
reserved_counters['not reserved'] -= reserved_counters['used']
- def __repr__(self):
+ def print_report(self):
headers = [
'Id',
'Name',
@@ -119,24 +119,45 @@ class Reports(object):
instance.best,
]
table.append(row)
- footer_headers = [
- 'Running',
- 'Stopped',
- 'Total instances',
- 'Used',
- 'Free',
- 'Not reserved',
- 'Total reserved',
- ]
- footer_table = [[
- self.counters['instances']['running'],
- self.counters['instances']['stopped'],
- self.counters['instances']['total'],
- self.counters['reserved']['used'],
- self.counters['reserved']['free'],
- self.counters['reserved']['not reserved'],
- self.counters['reserved']['total'],
- ]]
- inventory = tabulate(table, headers)
- summary = tabulate(footer_table, footer_headers)
- return "{}\n\n{}".format(inventory, summary)
+
+ if self.output_format == 'table':
+ footer_headers = [
+ 'Running',
+ 'Stopped',
+ 'Total instances',
+ 'Used',
+ 'Free',
+ 'Not reserved',
+ 'Total reserved',
+ ]
+ footer_table = [[
+ self.counters['instances']['running'],
+ self.counters['instances']['stopped'],
+ self.counters['instances']['total'],
+ self.counters['reserved']['used'],
+ self.counters['reserved']['free'],
+ self.counters['reserved']['not reserved'],
+ self.counters['reserved']['total'],
+ ]]
+ inventory = tabulate(table, headers)
+ summary = tabulate(footer_table, footer_headers)
+
+ return "{}\n\n{}".format(inventory, summary)
+
+ elif self.output_format == 'csv':
+ output = ''
+ for header in headers:
+ output += header + ','
+ output = output[:-1] + '\n'
+ for row in table:
+ for column in row:
+ output += str(column) + ','
+ output = output[:-1] + '\n'
+
+ return output[:-1]
+
+ else:
+ raise Exception()
+
+ def __repr__(self):
+ return self.print_report()
diff --git a/bin/accloudtant b/bin/accloudtant
index 148bc66..f9ab2b0 100755
--- a/bin/accloudtant
+++ b/bin/accloudtant
@@ -14,15 +14,15 @@ logger.addHandler(StreamHandler(sys.stdout))
def cli():
pass
-
@cli.command('list', short_help='prints current price lists')
def price_list():
logger.info(Prices())
-
@cli.command(short_help='provides price/usage reports')
-def report():
- logger.info(Reports())
[email protected]('--output', default='table', type=click.Choice(['table', 'csv']),
+ help='Change output format')
+def report(output):
+ logger.info(Reports(output_format=output))
if __name__ == '__main__':
cli()
| CSV as output format
The table format is great to see the data in the terminal, but to facilytate the import into other tools, CSV is a best (and simple) choice. | ifosch/accloudtant | diff --git a/tests/aws/report_running_expected_csv.txt b/tests/aws/report_running_expected_csv.txt
new file mode 100644
index 0000000..ba4cbe8
--- /dev/null
+++ b/tests/aws/report_running_expected_csv.txt
@@ -0,0 +1,7 @@
+Id,Name,Type,AZ,OS,State,Launch time,Reserved,Current hourly price,Renewed hourly price
+i-912a4392,web1,c3.8xlarge,us-east-1c,Windows,running,2015-10-22 14:15:10,Yes,0.5121,0.3894
+i-1840273e,app1,r2.8xlarge,us-east-1b,Red Hat Enterprise Linux,running,2015-10-22 14:15:10,Yes,0.3894,0.3794
+i-9840273d,app2,r2.8xlarge,us-east-1c,SUSE Linux,running,2015-10-22 14:15:10,Yes,0.5225,0.389
+i-1840273c,database2,r2.8xlarge,us-east-1c,Linux/UNIX,running,2015-10-22 14:15:10,Yes,0.611,0.379
+i-1840273b,database3,r2.8xlarge,us-east-1c,Linux/UNIX,running,2015-10-22 14:15:10,Yes,0.611,0.379
+i-912a4393,test,t1.micro,us-east-1c,Linux/UNIX,running,2015-10-22 14:15:10,No,0.767,0.3892
diff --git a/tests/aws/report_running_expected.txt b/tests/aws/report_running_expected_table.txt
similarity index 100%
rename from tests/aws/report_running_expected.txt
rename to tests/aws/report_running_expected_table.txt
diff --git a/tests/aws/test_reports.py b/tests/aws/test_reports.py
index 57084c3..80d2c24 100644
--- a/tests/aws/test_reports.py
+++ b/tests/aws/test_reports.py
@@ -20,8 +20,635 @@ import accloudtant.aws.reports
def get_future_date(years=1):
return datetime.datetime.now() + datetime.timedelta(years)
+def test_reports_table(capsys, monkeypatch, ec2_resource, ec2_client,
+ process_ec2):
+ instances = {
+ 'instances': [{
+ 'id': 'i-912a4392',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'web1',
+ }, ],
+ 'instance_type': 'c3.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc(),
+ ),
+ 'console_output': {'Output': 'Windows', },
+ }, {
+ 'id': 'i-1840273e',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'app1',
+ }, ],
+ 'instance_type': 'r2.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1b',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'RHEL Linux', },
+ }, {
+ 'id': 'i-9840273d',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'app2',
+ }, ],
+ 'instance_type': 'r2.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'SUSE Linux', },
+ }, {
+ 'id': 'i-1840273d',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'database1',
+ }, ],
+ 'instance_type': 'r2.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'stopped',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'Linux', },
+ }, {
+ 'id': 'i-1840273c',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'database2',
+ }, ],
+ 'instance_type': 'r2.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'Linux', },
+ }, {
+ 'id': 'i-1840273b',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'database3',
+ }, ],
+ 'instance_type': 'r2.8xlarge',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'console_output': {'Output': 'Linux', },
+ }, {
+ 'id': 'i-912a4393',
+ 'tags': [{
+ 'Key': 'Name',
+ 'Value': 'test',
+ }, ],
+ 'instance_type': 't1.micro',
+ 'placement': {
+ 'AvailabilityZone': 'us-east-1c',
+ },
+ 'state': {
+ 'Name': 'running',
+ },
+ 'launch_time': datetime.datetime(
+ 2015,
+ 10,
+ 22,
+ 14,
+ 15,
+ 10,
+ tzinfo=tzutc(),
+ ),
+ 'console_output': {'Output': 'Linux', },
+ }, ]
+ }
+ reserved_instances = {
+ 'ReservedInstances': [{
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 29,
+ 'InstanceType': 'm1.large',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df1223331f',
+ 'FixedPrice': 910.0,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.12,
+ 'Duration': 31536000,
+ 'State': 'retired',
+ }, {
+ 'ProductDescription': 'Windows',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'c3.8xlarge',
+ 'Start': datetime.datetime(
+ 2015,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
+ 'FixedPrice': 4486.0,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.5121,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }, {
+ 'ProductDescription': 'Red Hat Enterprise Linux',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'r2.8xlarge',
+ 'Start': datetime.datetime(
+ 2015,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233321',
+ 'FixedPrice': 10234,
+ 'AvailabilityZone': 'us-east-1b',
+ 'UsagePrice': 0.3894,
+ 'Duration': 94608000,
+ 'State': 'active',
+ }, {
+ 'ProductDescription': 'SUSE Linux',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'r2.8xlarge',
+ 'Start': datetime.datetime(
+ 2015,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233322',
+ 'FixedPrice': 2974.0,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.5225,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }, {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'r2.8xlarge',
+ 'Start': datetime.datetime(
+ 2015,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': get_future_date(),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
+ 'FixedPrice': 5352.36,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.611,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }, {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 't2.micro',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
+ 'FixedPrice': 5352.36,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.611,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }, {
+ 'ProductDescription': 'Linux/UNIX',
+ 'InstanceTenancy': 'default',
+ 'InstanceCount': 1,
+ 'InstanceType': 'r2.8xlarge',
+ 'Start': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ 494000,
+ tzinfo=tzutc()
+ ),
+ 'RecurringCharges': [],
+ 'End': datetime.datetime(
+ 2011,
+ 6,
+ 5,
+ 6,
+ 20,
+ 10,
+ tzinfo=tzutc()
+ ),
+ 'CurrencyCode': 'USD',
+ 'OfferingType': 'Medium Utilization',
+ 'ReservedInstancesId': '46a408c7-c33d-422d-af59-28df12233320',
+ 'FixedPrice': 5352.36,
+ 'AvailabilityZone': 'us-east-1c',
+ 'UsagePrice': 0.611,
+ 'Duration': 31536000,
+ 'State': 'active',
+ }, ]
+ }
+ prices = {
+ 'win': {
+ 'us-east-1': {
+ 'c3.8xlarge': {
+ 'storageGB': '60 SSD',
+ 'ri': {
+ 'yrTerm1': {
+ 'noUpfront': {
+ 'upfront': '0',
+ 'monthlyStar': '446.03',
+ 'effectiveHourly': '0.611',
+ },
+ 'partialUpfront': {
+ 'upfront': '2974',
+ 'monthlyStar': '133.59',
+ 'effectiveHourly': '0.5225',
+ },
+ 'allUpfront': {
+ 'upfront': '4398.4',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.5021',
+ },
+ },
+ 'yrTerm3': {
+ 'allUpfront': {
+ 'upfront': '10234',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.3894',
+ },
+ 'partialUpfront': {
+ 'upfront': '7077',
+ 'monthlyStar': '105.85',
+ 'effectiveHourly': '0.4143',
+ },
+ },
+ },
+ 'od': '0.867',
+ 'memoryGiB': '15',
+ 'vCPU': '8',
+ },
+ },
+ },
+ 'rhel': {
+ 'us-east-1': {
+ 'r2.8xlarge': {
+ 'storageGB': '60 SSD',
+ 'ri': {
+ 'yrTerm1': {
+ 'noUpfront': {
+ 'upfront': '0',
+ 'monthlyStar': '446.03',
+ 'effectiveHourly': '0.611',
+ },
+ 'partialUpfront': {
+ 'upfront': '2974',
+ 'monthlyStar': '133.59',
+ 'effectiveHourly': '0.5225',
+ },
+ 'allUpfront': {
+ 'upfront': '4486',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.5121',
+ },
+ },
+ 'yrTerm3': {
+ 'allUpfront': {
+ 'upfront': '10233.432',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.3794',
+ },
+ 'partialUpfront': {
+ 'upfront': '7077',
+ 'monthlyStar': '105.85',
+ 'effectiveHourly': '0.4143',
+ },
+ },
+ },
+ 'od': '0.767',
+ 'memoryGiB': '15',
+ 'vCPU': '8',
+ },
+ },
+ },
+ 'suse': {
+ 'us-east-1': {
+ 'r2.8xlarge': {
+ 'storageGB': '60 SSD',
+ 'ri': {
+ 'yrTerm1': {
+ 'noUpfront': {
+ 'upfront': '0',
+ 'monthlyStar': '446.03',
+ 'effectiveHourly': '0.611',
+ },
+ 'partialUpfront': {
+ 'upfront': '2974',
+ 'monthlyStar': '133.59',
+ 'effectiveHourly': '0.5225',
+ },
+ 'allUpfront': {
+ 'upfront': '4486',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.5121',
+ },
+ },
+ 'yrTerm3': {
+ 'allUpfront': {
+ 'upfront': '10234',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.3890',
+ },
+ 'partialUpfront': {
+ 'upfront': '7077',
+ 'monthlyStar': '105.85',
+ 'effectiveHourly': '0.4143',
+ },
+ },
+ },
+ 'od': '0.767',
+ 'memoryGiB': '15',
+ 'vCPU': '8',
+ },
+ },
+ },
+ 'linux': {
+ 'us-east-1': {
+ 't1.micro': {
+ 'storageGB': '60 SSD',
+ 'ri': {
+ 'yrTerm1': {
+ 'noUpfront': {
+ 'upfront': '0',
+ 'monthlyStar': '446.03',
+ 'effectiveHourly': '0.611',
+ },
+ 'allUpfront': {
+ 'upfront': '2974',
+ 'monthlyStar': '133.59',
+ 'effectiveHourly': '0.5225',
+ },
+ 'partialUpfront': {
+ 'upfront': '4486',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.5121',
+ },
+ },
+ 'yrTerm3': {
+ 'allUpfront': {
+ 'upfront': '10234',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.3892',
+ },
+ 'partialUpfront': {
+ 'upfront': '7077',
+ 'monthlyStar': '105.85',
+ 'effectiveHourly': '0.4143',
+ },
+ },
+ },
+ 'od': '0.767',
+ 'memoryGiB': '15',
+ 'vCPU': '8',
+ },
+ 'r2.8xlarge': {
+ 'storageGB': '60 SSD',
+ 'ri': {
+ 'yrTerm1': {
+ 'noUpfront': {
+ 'upfront': '0',
+ 'monthlyStar': '446.03',
+ 'effectiveHourly': '0.611',
+ },
+ 'allUpfront': {
+ 'upfront': '2974',
+ 'monthlyStar': '133.59',
+ 'effectiveHourly': '0.5225',
+ },
+ 'partialUpfront': {
+ 'upfront': '4486',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.5121',
+ },
+ },
+ 'yrTerm3': {
+ 'allUpfront': {
+ 'upfront': '10234',
+ 'monthlyStar': '0',
+ 'effectiveHourly': '0.3790',
+ },
+ 'partialUpfront': {
+ 'upfront': '7077',
+ 'monthlyStar': '105.85',
+ 'effectiveHourly': '0.4143',
+ },
+ },
+ },
+ 'od': '0.767',
+ 'memoryGiB': '15',
+ 'vCPU': '8',
+ },
+ },
+ },
+ }
+ instances_prices = {
+ 'i-912a4392': {
+ 'current': 0.5121,
+ 'best': 0.3894,
+ },
+ 'i-1840273e': {
+ 'current': 0.3894,
+ 'best': 0.3794,
+ },
+ 'i-9840273d': {
+ 'current': 0.5225,
+ 'best': 0.3890,
+ },
+ 'i-1840273d': {
+ 'current': 0.0,
+ 'best': 0.3790,
+ },
+ 'i-1840273c': {
+ 'current': 0.611,
+ 'best': 0.3790,
+ },
+ 'i-1840273b': {
+ 'current': 0.611,
+ 'best': 0.3790,
+ },
+ 'i-912a4393': {
+ 'current': 0.767,
+ 'best': 0.3892,
+ },
+ }
+ expected = open('tests/aws/report_running_expected_table.txt', 'r').read()
+
+ monkeypatch.setattr('boto3.resource', ec2_resource)
+ ec2_resource.set_responses(instances)
+ monkeypatch.setattr('boto3.client', ec2_client)
+ ec2_client.set_responses({}, reserved_instances)
+ monkeypatch.setattr(
+ 'accloudtant.aws.prices.process_ec2',
+ process_ec2
+ )
+ process_ec2.set_responses(prices)
+
+ reports = accloudtant.aws.reports.Reports(output_format='table')
+ print(reports)
+ out, err = capsys.readouterr()
+
+ assert(len(reports.instances) == 6)
+ for mock in instances['instances']:
+ mock['current'] = instances_prices[mock['id']]['current']
+ mock['best'] = instances_prices[mock['id']]['best']
+ for instance in reports.instances:
+ if instance.id == mock['id']:
+ assert(instance.current == mock['current'])
+ assert(instance.best == mock['best'])
+ assert(out == expected)
-def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
+def test_reports_csv(capsys, monkeypatch, ec2_resource, ec2_client,
+ process_ec2):
instances = {
'instances': [{
'id': 'i-912a4392',
@@ -621,7 +1248,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
'best': 0.3892,
},
}
- expected = open('tests/aws/report_running_expected.txt', 'r').read()
+ expected = open('tests/aws/report_running_expected_csv.txt', 'r').read()
monkeypatch.setattr('boto3.resource', ec2_resource)
ec2_resource.set_responses(instances)
@@ -633,7 +1260,7 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
)
process_ec2.set_responses(prices)
- reports = accloudtant.aws.reports.Reports()
+ reports = accloudtant.aws.reports.Reports(output_format='csv')
print(reports)
out, err = capsys.readouterr()
@@ -646,3 +1273,4 @@ def test_reports(capsys, monkeypatch, ec2_resource, ec2_client, process_ec2):
assert(instance.current == mock['current'])
assert(instance.best == mock['best'])
assert(out == expected)
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ifosch/accloudtant.git@b662c13380e9e5c42232e51ef3ef059c679ad5e9#egg=accloudtant
boto3==1.1.4
botocore==1.2.10
click==4.1
click-log==0.1.4
docutils==0.21.2
exceptiongroup==1.2.2
futures==2.2.0
iniconfig==2.1.0
jmespath==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
requests==2.8.1
six==1.17.0
tabulate==0.7.5
tomli==2.2.1
| name: accloudtant
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.1.4
- botocore==1.2.10
- click==4.1
- click-log==0.1.4
- docutils==0.21.2
- exceptiongroup==1.2.2
- futures==2.2.0
- iniconfig==2.1.0
- jmespath==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- requests==2.8.1
- six==1.17.0
- tabulate==0.7.5
- tomli==2.2.1
prefix: /opt/conda/envs/accloudtant
| [
"tests/aws/test_reports.py::test_reports_table",
"tests/aws/test_reports.py::test_reports_csv"
]
| []
| []
| []
| null | 684 | [
"bin/accloudtant",
"accloudtant/aws/reports.py"
]
| [
"bin/accloudtant",
"accloudtant/aws/reports.py"
]
|
|
setokinto__slack-shogi-67 | 3f692f2862e7bc2fb9fbbe8d73310c2696653e4d | 2016-08-06 17:40:57 | f5175be50a09220713ceb5547cd04f80f43f84fb | diff --git a/app/kifu.py b/app/kifu.py
new file mode 100644
index 0000000..a0a0c5d
--- /dev/null
+++ b/app/kifu.py
@@ -0,0 +1,13 @@
+
+
+class Kifu:
+
+ def __init__(self):
+ self.kifu = []
+
+ def add(self, from_x, from_y, to_x, to_y, promote):
+ self.kifu.append((from_x, from_y, to_x, to_y, promote))
+
+ def pop(self):
+ return self.kifu.pop()
+
diff --git a/app/modules/shogi_input.py b/app/modules/shogi_input.py
index 58bea57..7996f02 100644
--- a/app/modules/shogi_input.py
+++ b/app/modules/shogi_input.py
@@ -6,6 +6,7 @@ from app.slack_utils.user import User as UserFinder
from app.modules.shogi import Shogi as ShogiModule
from app.modules.parse_input import ParseInput
from app.validator import BasicUserValidator, AllPassUserValidator
+from app.kifu import Kifu
class UserDifferentException(Exception):
@@ -136,6 +137,13 @@ class ShogiInput:
"_shogi": shogi,
}
+ @staticmethod
+ def matta(channel_id, user_id):
+ shogi = ShogiInput.manager.get_shogi(channel_id)
+ if not shogi.validate(shogi, user_id):
+ raise UserDifferentException()
+ shogi.matta()
+
class Shogi:
@@ -150,9 +158,11 @@ class Shogi:
self.second_user_name = users[1]["name"]
self.id = uuid.uuid4().hex
self._validator = validator
+ self.kifu = Kifu()
def move(self, from_x, from_y, to_x, to_y, promote):
self.shogi.move(from_x, from_y, to_x, to_y, promote)
+ self.kifu.add(from_x, from_y, to_x, to_y, promote)
def drop(self, koma, to_x, to_y):
self.shogi.drop(koma, to_x, to_y)
@@ -172,6 +182,15 @@ class Shogi:
def set_validator(self, validator):
self._validator = validator
+ def matta(self):
+ if len(self.kifu.kifu) == 0:
+ raise KomaCannotMoveException
+ self.kifu.pop()
+ self.shogi = ShogiModule()
+ for kifu in self.kifu.kifu:
+ from_x, from_y, to_x, to_y, promote = kifu
+ self.shogi.move(from_x, from_y, to_x, to_y, promote)
+
@property
def first(self):
return self.shogi.first
diff --git a/app/shogi.py b/app/shogi.py
index c28afea..3173412 100644
--- a/app/shogi.py
+++ b/app/shogi.py
@@ -113,3 +113,19 @@ def resign(channel, message):
message.send(board_str)
ShogiInput.clear(channel.channel_id)
+@respond_to("待った")
+@channel_info
+@should_exist_shogi
+def matta(channel, message):
+ try:
+ ShogiInput.matta(channel.channel_id, channel.own_id)
+ message.send("mattaed")
+ except UserDifferentException:
+ message.reply("You cannot matta because *it's not your turn*")
+ except KomaCannotMoveException:
+ message.reply("You cannot matta because koma not moved")
+ finally:
+ board = ShogiInput.get_shogi_board(channel.channel_id)
+ board_str = ShogiOutput.make_board_emoji(board)
+ message.send(board_str)
+
| 待った に対応する
あえて 待った をありにしたい。 | setokinto/slack-shogi | diff --git a/test/modules/shogi_input_test.py b/test/modules/shogi_input_test.py
index 58dca13..272b547 100644
--- a/test/modules/shogi_input_test.py
+++ b/test/modules/shogi_input_test.py
@@ -4,7 +4,6 @@ from app.modules.shogi_input import ShogiInput, UserDifferentException, KomaCann
from app.modules.shogi import Koma
-
class ShogiTest(unittest.TestCase):
def setUp(self):
@@ -107,3 +106,48 @@ class ShogiTest(unittest.TestCase):
ShogiInput.setAllMode(channel_id)
ShogiInput.move("34歩", channel_id, shogi.first_user_id)
+ def test_matta(self):
+ channel_id = "test_matta"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+ ShogiInput.matta(channel_id, shogi.second_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.empty)
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+
+ def test_matta_for_UserDifferentException(self):
+ channel_id = "test_matta_for_UserDifferentException"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ ShogiInput.move("76歩", channel_id, shogi.first_user_id)
+ self.assertEqual(shogi.board[5][2], Koma.fu)
+ with self.assertRaises(UserDifferentException):
+ ShogiInput.matta(channel_id, shogi.first_user_id)
+ ShogiInput.move("34歩", channel_id, shogi.second_user_id)
+ with self.assertRaises(UserDifferentException):
+ ShogiInput.matta(channel_id, shogi.second_user_id)
+
+ def test_matta_for_KomaCannotMoveException(self):
+ channel_id = "test_matta_for_KomaCannotMoveException"
+ shogi = ShogiInput.init(channel_id, [{
+ "id": "user1",
+ "name": "user1name",
+ }, {
+ "id": "user2",
+ "name": "user2name",
+ }])
+ with self.assertRaises(KomaCannotMoveException):
+ ShogiInput.matta(channel_id, shogi.first_user_id)
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
requests==2.32.3
six==1.17.0
-e git+https://github.com/setokinto/slack-shogi.git@3f692f2862e7bc2fb9fbbe8d73310c2696653e4d#egg=Slack_Shogi
slackbot==1.0.5
slacker==0.14.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
urllib3==2.3.0
websocket-client==1.6.0
| name: slack-shogi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- idna==3.10
- requests==2.32.3
- six==1.17.0
- slackbot==1.0.5
- slacker==0.14.0
- urllib3==2.3.0
- websocket-client==1.6.0
prefix: /opt/conda/envs/slack-shogi
| [
"test/modules/shogi_input_test.py::ShogiTest::test_matta",
"test/modules/shogi_input_test.py::ShogiTest::test_matta_for_KomaCannotMoveException",
"test/modules/shogi_input_test.py::ShogiTest::test_matta_for_UserDifferentException"
]
| []
| [
"test/modules/shogi_input_test.py::ShogiTest::test_clear_for_non_exists_channnel",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_raise_KomaCannotMoveException",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_raise_UserDifferentException",
"test/modules/shogi_input_test.py::ShogiTest::test_move_method_should_work",
"test/modules/shogi_input_test.py::ShogiTest::test_set_any_user_validator",
"test/modules/shogi_input_test.py::ShogiTest::test_shogi_input_is_initable"
]
| []
| MIT License | 685 | [
"app/modules/shogi_input.py",
"app/shogi.py",
"app/kifu.py"
]
| [
"app/modules/shogi_input.py",
"app/shogi.py",
"app/kifu.py"
]
|
|
andycasey__ads-74 | ce1dce7fb2695d6436c112926709fe1a63e881cd | 2016-08-06 21:30:19 | c039d67c2b2e9dad936758bc89df1fdd1cbd0aa1 | diff --git a/ads/search.py b/ads/search.py
index f1b5205..97727de 100644
--- a/ads/search.py
+++ b/ads/search.py
@@ -399,6 +399,12 @@ class SearchQuery(BaseQuery):
else:
self._query["fl"] = ["id"] + self._query["fl"]
+ # remove bibtex and metrics as a safeguard against
+ # https://github.com/andycasey/ads/issues/73
+ for field in ["bibtex", "metrics"]:
+ if field in self._query["fl"]:
+ self.query["fl"].remove(field)
+
# Format and add kwarg (key, value) pairs to q
if kwargs:
_ = [u'{}:"{}"'.format(k, v) for k, v in six.iteritems(kwargs)]
| Bibtex issue
<!--- Provide a general summary of the issue in the Title above -->
## Expected Behavior
Hello,
I would like to extract the url of the ADS page associated to each entry. I got it from the bibtex entry, but bibtex does always return none when query from the field. More specifically, querying with the following:
papers = list(ads.SearchQuery(author=myname, max_pages=6,fl=['citation_count', 'bibcode', 'title', 'bibtex','year','author']))
does always provide a None for papers[0].bibtex (as well as other entries)
Am I making some error?
Thanks!
Alex
<!--- If you're describing a bug, tell us what should happen -->
<!--- If you're suggesting a change/improvement, tell us how it should work -->
## Current Behavior
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
## Possible Solution
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
<!--- or ideas how to implement the addition or change -->
## Steps to Reproduce (for bugs)
<!--- Provide a link to a live example, or an unambiguous set of steps to -->
<!--- reproduce this bug. Include code to reproduce, if relevant -->
1.
2.
3.
4.
## Context
<!--- How has this issue affected you? What are you trying to accomplish? -->
<!--- Providing context helps us come up with a solution that is most useful in the real world -->
## Your Environment
<!--- Include as many relevant details about the environment you experienced the bug in -->
* Version used (hint: `python -c "import ads; print(ads.__version__)"`)
* Python version (hint: `python -V`)
| andycasey/ads | diff --git a/ads/tests/test_search.py b/ads/tests/test_search.py
index 3213d42..f27f581 100644
--- a/ads/tests/test_search.py
+++ b/ads/tests/test_search.py
@@ -245,6 +245,10 @@ class TestSearchQuery(unittest.TestCase):
with six.assertRaisesRegex(self, AssertionError, ".+mutually exclusive.+"):
SearchQuery(q="start", start=0, cursorMark="*")
+ # test that bibtex/metrics is excluded from sq.query['fl']
+ sq = SearchQuery(q="star", fl=["f1", "bibtex", "f2", "metrics", "f3"])
+ self.assertEqual(sq.query['fl'], ["id", "f1", "f2", "f3"])
+
class TestSolrResponse(unittest.TestCase):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/andycasey/ads.git@ce1dce7fb2695d6436c112926709fe1a63e881cd#egg=ads
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
httpretty==0.8.10
idna==3.10
iniconfig==2.1.0
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
Werkzeug==3.1.3
| name: ads
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- httpretty==0.8.10
- idna==3.10
- iniconfig==2.1.0
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
- werkzeug==3.1.3
prefix: /opt/conda/envs/ads
| [
"ads/tests/test_search.py::TestSearchQuery::test_init"
]
| [
"ads/tests/test_search.py::TestArticle::test_cached_properties",
"ads/tests/test_search.py::TestArticle::test_get_field",
"ads/tests/test_search.py::TestSearchQuery::test_iter",
"ads/tests/test_search.py::TestSearchQuery::test_rows_rewrite"
]
| [
"ads/tests/test_search.py::TestArticle::test_equals",
"ads/tests/test_search.py::TestArticle::test_init",
"ads/tests/test_search.py::TestArticle::test_print_methods",
"ads/tests/test_search.py::TestSolrResponse::test_articles",
"ads/tests/test_search.py::TestSolrResponse::test_default_article_fields",
"ads/tests/test_search.py::TestSolrResponse::test_init",
"ads/tests/test_search.py::TestSolrResponse::test_load_http_response",
"ads/tests/test_search.py::Testquery::test_init"
]
| []
| MIT License | 686 | [
"ads/search.py"
]
| [
"ads/search.py"
]
|
|
chimpler__pyhocon-92 | abac1214ebcda0634960c29a16fba9a533266043 | 2016-08-07 19:29:40 | 4683937b1d195ce2f53ca78987571e41bfe273e7 | diff --git a/pyhocon/config_parser.py b/pyhocon/config_parser.py
index 9e20236..27366a5 100644
--- a/pyhocon/config_parser.py
+++ b/pyhocon/config_parser.py
@@ -236,7 +236,7 @@ class ConfigParser(object):
value_expr = number_expr | true_expr | false_expr | null_expr | string_expr
- include_expr = (Keyword("include", caseless=True).suppress() - (
+ include_expr = (Keyword("include", caseless=True).suppress() + (
quoted_string | (
(Keyword('url') | Keyword('file')) - Literal('(').suppress() - quoted_string - Literal(')').suppress()))) \
.setParseAction(include_config)
| Syntax error while parsing variables of form include-blah-blah
Trying to parse the following config:
```
{
include-other-stuff = true
}
```
with `conf = pyhocon.ConfigFactory.parse_file('/tmp/foo.conf')` results in the following stacktrace:
```
File "/home/blah/blahblah.py", line 53, in <module>
conf = pyhocon.ConfigFactory.parse_file('/tmp/foo.conf')
File "/opt/anaconda3/lib/python3.4/site-packages/pyhocon/config_parser.py", line 48, in parse_file
return ConfigFactory.parse_string(content, os.path.dirname(filename), resolve)
File "/opt/anaconda3/lib/python3.4/site-packages/pyhocon/config_parser.py", line 87, in parse_string
return ConfigParser().parse(content, basedir, resolve)
File "/opt/anaconda3/lib/python3.4/site-packages/pyhocon/config_parser.py", line 269, in parse
config = config_expr.parseString(content, parseAll=True)[0]
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 1125, in parseString
raise exc
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 1115, in parseString
loc, tokens = self._parse( instring, 0 )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2378, in parseImpl
loc, exprtokens = e._parse( instring, loc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2483, in parseImpl
ret = e._parse( instring, loc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2624, in parseImpl
return self.expr._parse( instring, loc, doActions, callPreParse=False )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2361, in parseImpl
loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2369, in parseImpl
loc, exprtokens = e._parse( instring, loc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2624, in parseImpl
return self.expr._parse( instring, loc, doActions, callPreParse=False )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2739, in parseImpl
loc, tmptokens = self.expr._parse( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2483, in parseImpl
ret = e._parse( instring, loc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 989, in _parseNoCache
loc,tokens = self.parseImpl( instring, preloc, doActions )
File "/opt/anaconda3/lib/python3.4/site-packages/pyparsing.py", line 2374, in parseImpl
raise ParseSyntaxException(pe)
pyparsing.ParseSyntaxException: Expected Re:('".*?"[ \t]*') (at char 13), (line:2, col:12)
``` | chimpler/pyhocon | diff --git a/tests/test_config_parser.py b/tests/test_config_parser.py
index a0927f1..72d0114 100644
--- a/tests/test_config_parser.py
+++ b/tests/test_config_parser.py
@@ -1153,6 +1153,16 @@ class TestConfigParser(object):
assert config['x'] == 42
assert config['y'] == 42
+ def test_var_with_include_keyword(self):
+ config = ConfigFactory.parse_string(
+ """
+ include-database=true
+ """)
+
+ assert config == {
+ 'include-database': True
+ }
+
def test_substitution_override(self):
config = ConfigFactory.parse_string(
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///croot/attrs_1668696182826/work
certifi @ file:///croot/certifi_1671487769961/work/certifi
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mock==5.2.0
packaging @ file:///croot/packaging_1671697413597/work
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
-e git+https://github.com/chimpler/pyhocon.git@abac1214ebcda0634960c29a16fba9a533266043#egg=pyhocon
pyparsing==3.1.4
pytest==7.1.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
zipp @ file:///croot/zipp_1672387121353/work
| name: pyhocon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- mock==5.2.0
- pyparsing==3.1.4
prefix: /opt/conda/envs/pyhocon
| [
"tests/test_config_parser.py::TestConfigParser::test_var_with_include_keyword"
]
| []
| [
"tests/test_config_parser.py::TestConfigParser::test_parse_simple_value",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_enclosing_brace",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_enclosing_square_bracket",
"tests/test_config_parser.py::TestConfigParser::test_quoted_key_with_dots",
"tests/test_config_parser.py::TestConfigParser::test_dotted_notation_merge",
"tests/test_config_parser.py::TestConfigParser::test_comma_to_separate_expr",
"tests/test_config_parser.py::TestConfigParser::test_dict_merge",
"tests/test_config_parser.py::TestConfigParser::test_parse_with_comments",
"tests/test_config_parser.py::TestConfigParser::test_missing_config",
"tests/test_config_parser.py::TestConfigParser::test_parse_null",
"tests/test_config_parser.py::TestConfigParser::test_parse_override",
"tests/test_config_parser.py::TestConfigParser::test_concat_dict",
"tests/test_config_parser.py::TestConfigParser::test_concat_string",
"tests/test_config_parser.py::TestConfigParser::test_concat_list",
"tests/test_config_parser.py::TestConfigParser::test_bad_concat",
"tests/test_config_parser.py::TestConfigParser::test_string_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_string_substitutions_with_no_space",
"tests/test_config_parser.py::TestConfigParser::test_int_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_cascade_string_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_multiple_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_dict_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_unquoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_quoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_triple_quoted_string_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_int_noeol",
"tests/test_config_parser.py::TestConfigParser::test_dos_chars_with_float_noeol",
"tests/test_config_parser.py::TestConfigParser::test_list_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_list_element_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_list_with_append",
"tests/test_config_parser.py::TestConfigParser::test_substitution_list_with_append_substitution",
"tests/test_config_parser.py::TestConfigParser::test_non_existent_substitution",
"tests/test_config_parser.py::TestConfigParser::test_non_compatible_substitution",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_string",
"tests/test_config_parser.py::TestConfigParser::test_self_append_non_existent_string",
"tests/test_config_parser.py::TestConfigParser::test_self_append_nonexistent_array",
"tests/test_config_parser.py::TestConfigParser::test_self_append_object",
"tests/test_config_parser.py::TestConfigParser::test_self_append_nonexistent_object",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_array_to_dict",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitiotion_dict_in_array",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_path",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_path_hide",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse2",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_merge",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield_merged_in",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_otherfield_merged_in_mutual",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_string_opt_concat",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_dict_recurse_part",
"tests/test_config_parser.py::TestConfigParser::test_self_ref_substitution_object",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_string",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_list",
"tests/test_config_parser.py::TestConfigParser::test_concat_multi_line_dict",
"tests/test_config_parser.py::TestConfigParser::test_parse_URL_from_samples",
"tests/test_config_parser.py::TestConfigParser::test_parse_URL_from_invalid",
"tests/test_config_parser.py::TestConfigParser::test_include_dict_from_samples",
"tests/test_config_parser.py::TestConfigParser::test_list_of_dicts",
"tests/test_config_parser.py::TestConfigParser::test_list_of_lists",
"tests/test_config_parser.py::TestConfigParser::test_list_of_dicts_with_merge",
"tests/test_config_parser.py::TestConfigParser::test_list_of_lists_with_merge",
"tests/test_config_parser.py::TestConfigParser::test_invalid_assignment",
"tests/test_config_parser.py::TestConfigParser::test_invalid_dict",
"tests/test_config_parser.py::TestConfigParser::test_include_list",
"tests/test_config_parser.py::TestConfigParser::test_include_dict",
"tests/test_config_parser.py::TestConfigParser::test_include_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_override",
"tests/test_config_parser.py::TestConfigParser::test_substitution_flat_override",
"tests/test_config_parser.py::TestConfigParser::test_substitution_nested_override",
"tests/test_config_parser.py::TestConfigParser::test_optional_substitution",
"tests/test_config_parser.py::TestConfigParser::test_cascade_optional_substitution",
"tests/test_config_parser.py::TestConfigParser::test_substitution_cycle",
"tests/test_config_parser.py::TestConfigParser::test_assign_number_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_strings_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_list_numbers_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_list_strings_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_dict_strings_with_equal_sign_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_assign_dict_strings_no_equal_sign_with_eol",
"tests/test_config_parser.py::TestConfigParser::test_substitutions_overwrite",
"tests/test_config_parser.py::TestConfigParser::test_fallback_substitutions_overwrite",
"tests/test_config_parser.py::TestConfigParser::test_fallback_substitutions_overwrite_file",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_append",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_append_plus_equals",
"tests/test_config_parser.py::TestConfigParser::test_self_merge_ref_substitutions_object",
"tests/test_config_parser.py::TestConfigParser::test_self_merge_ref_substitutions_object2",
"tests/test_config_parser.py::TestConfigParser::test_self_merge_ref_substitutions_object3",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_merge",
"tests/test_config_parser.py::TestConfigParser::test_fallback_self_ref_substitutions_concat_string",
"tests/test_config_parser.py::TestConfigParser::test_object_field_substitution",
"tests/test_config_parser.py::TestConfigParser::test_one_line_quote_escape",
"tests/test_config_parser.py::TestConfigParser::test_multi_line_escape",
"tests/test_config_parser.py::TestConfigParser::test_multiline_with_backslash",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_dict",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_ordered_dict",
"tests/test_config_parser.py::TestConfigParser::test_from_dict_with_nested_dict",
"tests/test_config_parser.py::TestConfigParser::test_object_concat",
"tests/test_config_parser.py::TestConfigParser::test_issue_75",
"tests/test_config_parser.py::TestConfigParser::test_plain_ordered_dict",
"tests/test_config_parser.py::TestConfigParser::test_quoted_strings_with_ws",
"tests/test_config_parser.py::TestConfigParser::test_unquoted_strings_with_ws",
"tests/test_config_parser.py::TestConfigParser::test_quoted_unquoted_strings_with_ws",
"tests/test_config_parser.py::TestConfigParser::test_quoted_unquoted_strings_with_ws_substitutions",
"tests/test_config_parser.py::TestConfigParser::test_assign_next_line",
"tests/test_config_parser.py::TestConfigParser::test_string_from_environment",
"tests/test_config_parser.py::TestConfigParser::test_bool_from_environment",
"tests/test_config_parser.py::TestConfigParser::test_int_from_environment"
]
| []
| Apache License 2.0 | 687 | [
"pyhocon/config_parser.py"
]
| [
"pyhocon/config_parser.py"
]
|
|
mkdocs__mkdocs-1018 | 63558bffe33ccb59d10b939641ed546e48c95144 | 2016-08-08 02:49:00 | e7d8879d2b53d9e50bdfcf1cf29c48dc3f6bc87f | diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py
index c9d519fc..016564b3 100644
--- a/mkdocs/__main__.py
+++ b/mkdocs/__main__.py
@@ -67,7 +67,7 @@ def common_options(f):
return f
-clean_help = "Remove old files from the site_dir before building"
+clean_help = "Remove old files from the site_dir before building (the default)."
config_help = "Provide a specific MkDocs config"
dev_addr_help = ("IP address and port to serve documentation locally (default: "
"localhost:8000)")
@@ -103,7 +103,7 @@ def cli():
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help)
@click.option('-e', '--theme-dir', type=click.Path(), help=theme_dir_help)
[email protected]('--livereload', 'livereload', flag_value='livereload', help=reload_help)
[email protected]('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True)
@click.option('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help)
@click.option('-d', '--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help)
@common_options
@@ -125,13 +125,13 @@ def serve_command(dev_addr, config_file, strict, theme, theme_dir, livereload):
theme_dir=theme_dir,
livereload=livereload
)
- except (exceptions.ConfigurationError, socket.error) as e:
+ except (exceptions.ConfigurationError, socket.error) as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="build")
[email protected]('-c', '--clean/--dirty', is_flag=True, help=clean_help)
[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help)
@@ -153,13 +153,13 @@ def build_command(clean, config_file, strict, theme, theme_dir, site_dir):
theme_dir=theme_dir,
site_dir=site_dir
), dirty=not clean)
- except exceptions.ConfigurationError as e:
+ except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="json")
[email protected]('-c', '--clean', is_flag=True, help=clean_help)
[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help)
@@ -187,13 +187,13 @@ def json_command(clean, config_file, strict, site_dir):
strict=strict,
site_dir=site_dir
), dump_json=True, dirty=not clean)
- except exceptions.ConfigurationError as e:
+ except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="gh-deploy")
[email protected]('-c', '--clean', is_flag=True, help=clean_help)
[email protected]('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-m', '--message', help=commit_message_help)
@click.option('-b', '--remote-branch', help=remote_branch_help)
@@ -209,7 +209,7 @@ def gh_deploy_command(config_file, clean, message, remote_branch, remote_name):
)
build.build(cfg, dirty=not clean)
gh_deploy.gh_deploy(cfg, message=message)
- except exceptions.ConfigurationError as e:
+ except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@@ -221,5 +221,5 @@ def new_command(project_directory):
"""Create a new MkDocs project"""
new.new(project_directory)
-if __name__ == '__main__':
+if __name__ == '__main__': # pragma: no cover
cli()
| Improve CLI tests.
The CLI tests use Mock objects (as they should), but all they do is confirm that the proper function was called once. There is no testing of the CLI options and what values are passed in (either the default values or custom values defined by the user). For example, when working on #997, both @aeslaughter and I missed that the default server was changed (see #1014). Also while working on #1009, I noticed that perhaps the default for `--clean` may not be what we think it is (we intended to change the default in #997, but I'm not sure we did).
We need tests for all the different options for each command to ensure the values passed from the CLI match expectations/documentation. Basically, we need more tests like the one I added in #1009. | mkdocs/mkdocs | diff --git a/mkdocs/tests/cli_tests.py b/mkdocs/tests/cli_tests.py
index a4da76e8..0014dc2a 100644
--- a/mkdocs/tests/cli_tests.py
+++ b/mkdocs/tests/cli_tests.py
@@ -4,11 +4,16 @@
from __future__ import unicode_literals
import unittest
import mock
+import logging
+import sys
+import io
from click.testing import CliRunner
from mkdocs import __main__ as cli
+PY3 = sys.version_info[0] == 3
+
class CLITests(unittest.TestCase):
@@ -16,10 +21,10 @@ class CLITests(unittest.TestCase):
self.runner = CliRunner()
@mock.patch('mkdocs.commands.serve.serve', autospec=True)
- def test_serve(self, mock_serve):
+ def test_serve_default(self, mock_serve):
result = self.runner.invoke(
- cli.cli, ["serve", ], catch_exceptions=False)
+ cli.cli, ["serve"], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
mock_serve.assert_called_once_with(
@@ -28,18 +33,149 @@ class CLITests(unittest.TestCase):
strict=None,
theme=None,
theme_dir=None,
- livereload=None
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_config_file(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", "--config-file", "mkdocs.yml"], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_serve.call_count, 1)
+ args, kwargs = mock_serve.call_args
+ self.assertTrue('config_file' in kwargs)
+ if PY3:
+ self.assertIsInstance(kwargs['config_file'], io.BufferedReader)
+ else:
+ self.assertTrue(isinstance(kwargs['config_file'], file))
+ self.assertEqual(kwargs['config_file'].name, 'mkdocs.yml')
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_dev_addr(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--dev-addr', '0.0.0.0:80'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr='0.0.0.0:80',
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_strict(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--strict'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=True,
+ theme=None,
+ theme_dir=None,
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_theme(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--theme', 'readthedocs'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme='readthedocs',
+ theme_dir=None,
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_theme_dir(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--theme-dir', 'custom'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme=None,
+ theme_dir='custom',
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_livereload(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--livereload'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ livereload='livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_no_livereload(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--no-livereload'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ livereload='no-livereload'
+ )
+
+ @mock.patch('mkdocs.commands.serve.serve', autospec=True)
+ def test_serve_dirtyreload(self, mock_serve):
+
+ result = self.runner.invoke(
+ cli.cli, ["serve", '--dirtyreload'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ mock_serve.assert_called_once_with(
+ config_file=None,
+ dev_addr=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ livereload='dirty'
)
@mock.patch('mkdocs.config.load_config', autospec=True)
@mock.patch('mkdocs.commands.build.build', autospec=True)
- def test_build(self, mock_build, mock_load_config):
+ def test_build_defaults(self, mock_build, mock_load_config):
result = self.runner.invoke(
- cli.cli, ["build", ], catch_exceptions=False)
+ cli.cli, ['build'], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
self.assertEqual(mock_build.call_count, 1)
+ args, kwargs = mock_build.call_args
+ self.assertTrue('dirty' in kwargs)
+ self.assertFalse(kwargs['dirty'])
mock_load_config.assert_called_once_with(
config_file=None,
strict=None,
@@ -47,15 +183,140 @@ class CLITests(unittest.TestCase):
theme_dir=None,
site_dir=None
)
+ logger = logging.getLogger('mkdocs')
+ self.assertEqual(logger.level, logging.INFO)
+
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_clean(self, mock_build):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--clean'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ args, kwargs = mock_build.call_args
+ self.assertTrue('dirty' in kwargs)
+ self.assertFalse(kwargs['dirty'])
+
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_dirty(self, mock_build):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--dirty'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ args, kwargs = mock_build.call_args
+ self.assertTrue('dirty' in kwargs)
+ self.assertTrue(kwargs['dirty'])
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_config_file(self, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--config-file', 'mkdocs.yml'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ self.assertEqual(mock_load_config.call_count, 1)
+ args, kwargs = mock_load_config.call_args
+ self.assertTrue('config_file' in kwargs)
+ if PY3:
+ self.assertIsInstance(kwargs['config_file'], io.BufferedReader)
+ else:
+ self.assertTrue(isinstance(kwargs['config_file'], file))
+ self.assertEqual(kwargs['config_file'].name, 'mkdocs.yml')
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_strict(self, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--strict'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ strict=True,
+ theme=None,
+ theme_dir=None,
+ site_dir=None
+ )
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_theme(self, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--theme', 'readthedocs'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ strict=None,
+ theme='readthedocs',
+ theme_dir=None,
+ site_dir=None
+ )
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_theme_dir(self, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--theme-dir', 'custom'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ strict=None,
+ theme=None,
+ theme_dir='custom',
+ site_dir=None
+ )
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_site_dir(self, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--site-dir', 'custom'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ strict=None,
+ theme=None,
+ theme_dir=None,
+ site_dir='custom'
+ )
@mock.patch('mkdocs.commands.build.build', autospec=True)
def test_build_verbose(self, mock_build):
result = self.runner.invoke(
- cli.cli, ["--verbose", "build"], catch_exceptions=False)
+ cli.cli, ['build', '--verbose'], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
self.assertEqual(mock_build.call_count, 1)
+ logger = logging.getLogger('mkdocs')
+ self.assertEqual(logger.level, logging.DEBUG)
+
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ def test_build_quiet(self, mock_build):
+
+ result = self.runner.invoke(
+ cli.cli, ['build', '--quiet'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_build.call_count, 1)
+ logger = logging.getLogger('mkdocs')
+ self.assertEqual(logger.level, logging.ERROR)
@mock.patch('mkdocs.commands.build.build', autospec=True)
def test_json(self, mock_build):
@@ -73,13 +334,125 @@ class CLITests(unittest.TestCase):
cli.cli, ["new", "project"], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
- self.assertEqual(mock_new.call_count, 1)
+ mock_new.assert_called_once_with('project')
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_defaults(self, mock_gh_deploy, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ g_args, g_kwargs = mock_gh_deploy.call_args
+ self.assertTrue('message' in g_kwargs)
+ self.assertEqual(g_kwargs['message'], None)
+ self.assertEqual(mock_build.call_count, 1)
+ b_args, b_kwargs = mock_build.call_args
+ self.assertTrue('dirty' in b_kwargs)
+ self.assertFalse(b_kwargs['dirty'])
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ remote_branch=None,
+ remote_name=None
+ )
+
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_clean(self, mock_gh_deploy, mock_build):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--clean'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ self.assertEqual(mock_build.call_count, 1)
+ args, kwargs = mock_build.call_args
+ self.assertTrue('dirty' in kwargs)
+ self.assertFalse(kwargs['dirty'])
+
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_dirty(self, mock_gh_deploy, mock_build):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--dirty'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ self.assertEqual(mock_build.call_count, 1)
+ args, kwargs = mock_build.call_args
+ self.assertTrue('dirty' in kwargs)
+ self.assertTrue(kwargs['dirty'])
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_config_file(self, mock_gh_deploy, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--config-file', 'mkdocs.yml'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ self.assertEqual(mock_build.call_count, 1)
+ self.assertEqual(mock_load_config.call_count, 1)
+ args, kwargs = mock_load_config.call_args
+ self.assertTrue('config_file' in kwargs)
+ if PY3:
+ self.assertIsInstance(kwargs['config_file'], io.BufferedReader)
+ else:
+ self.assertTrue(isinstance(kwargs['config_file'], file))
+ self.assertEqual(kwargs['config_file'].name, 'mkdocs.yml')
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_message(self, mock_gh_deploy, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--message', 'A commit message'], catch_exceptions=False)
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ g_args, g_kwargs = mock_gh_deploy.call_args
+ self.assertTrue('message' in g_kwargs)
+ self.assertEqual(g_kwargs['message'], 'A commit message')
+ self.assertEqual(mock_build.call_count, 1)
+ self.assertEqual(mock_load_config.call_count, 1)
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
@mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
- def test_gh_deploy(self, mock_gh_deploy):
+ def test_gh_deploy_remote_branch(self, mock_gh_deploy, mock_build, mock_load_config):
result = self.runner.invoke(
- cli.cli, ["gh-deploy"], catch_exceptions=False)
+ cli.cli, ['gh-deploy', '--remote-branch', 'foo'], catch_exceptions=False)
self.assertEqual(result.exit_code, 0)
self.assertEqual(mock_gh_deploy.call_count, 1)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ remote_branch='foo',
+ remote_name=None
+ )
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_remote_name(self, mock_gh_deploy, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--remote-name', 'foo'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ self.assertEqual(mock_build.call_count, 1)
+ mock_load_config.assert_called_once_with(
+ config_file=None,
+ remote_branch=None,
+ remote_name='foo'
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
exceptiongroup==1.2.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@63558bffe33ccb59d10b939641ed546e48c95144#egg=mkdocs
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- exceptiongroup==1.2.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_defaults",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_defaults",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_default",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_dev_addr",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_strict",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_theme",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_theme_dir"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_clean",
"mkdocs/tests/cli_tests.py::CLITests::test_build_dirty",
"mkdocs/tests/cli_tests.py::CLITests::test_build_quiet",
"mkdocs/tests/cli_tests.py::CLITests::test_build_verbose",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_clean",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_dirty",
"mkdocs/tests/cli_tests.py::CLITests::test_json"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_build_site_dir",
"mkdocs/tests/cli_tests.py::CLITests::test_build_strict",
"mkdocs/tests/cli_tests.py::CLITests::test_build_theme",
"mkdocs/tests/cli_tests.py::CLITests::test_build_theme_dir",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_message",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_remote_branch",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_remote_name",
"mkdocs/tests/cli_tests.py::CLITests::test_new",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_dirtyreload",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_livereload",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_no_livereload"
]
| []
| BSD 2-Clause "Simplified" License | 688 | [
"mkdocs/__main__.py"
]
| [
"mkdocs/__main__.py"
]
|
|
zalando-stups__senza-301 | 6bd1f85283e4252aa58aa38fd74936dd978dcded | 2016-08-08 12:43:52 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/auto_scaling_group.py b/senza/components/auto_scaling_group.py
index ebffa4b..17d8974 100644
--- a/senza/components/auto_scaling_group.py
+++ b/senza/components/auto_scaling_group.py
@@ -20,11 +20,9 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a
}
}
- if 'BlockDeviceMappings' in configuration:
- definition['Resources'][config_name]['Properties']['BlockDeviceMappings'] = configuration['BlockDeviceMappings']
-
- if "IamInstanceProfile" in configuration:
- definition["Resources"][config_name]["Properties"]["IamInstanceProfile"] = configuration["IamInstanceProfile"]
+ for key in set(["BlockDeviceMappings", "IamInstanceProfile", "SpotPrice"]):
+ if key in configuration:
+ definition['Resources'][config_name]['Properties'][key] = configuration[key]
if 'IamRoles' in configuration:
logical_id = configuration['Name'] + 'InstanceProfile'
| Senza ignores "SpotPrice" property on auto scaling group
```
InstanceType: m4.xlarge
SpotPrice: 0.250
```
According to cloudformation specs this will create an ASG with spot instances. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 7989398..1191bae 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -504,6 +504,33 @@ def test_component_auto_scaling_group_configurable_properties():
assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["EvaluationPeriods"] == "1"
assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["AlarmDescription"] == expected_desc
+def test_component_auto_scaling_group_configurable_properties():
+ definition = {"Resources": {}}
+ configuration = {
+ 'Name': 'Foo',
+ 'InstanceType': 't2.micro',
+ 'Image': 'foo',
+ 'SpotPrice': 0.250
+ }
+
+ args = MagicMock()
+ args.region = "foo"
+
+ info = {
+ 'StackName': 'FooStack',
+ 'StackVersion': 'FooVersion'
+ }
+
+ result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock())
+
+ assert result["Resources"]["FooConfig"]["Properties"]["SpotPrice"] == 0.250
+
+ del configuration["SpotPrice"]
+
+ result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock())
+
+ assert "SpotPrice" not in result["Resources"]["FooConfig"]["Properties"]
+
def test_component_auto_scaling_group_metric_type():
definition = {"Resources": {}}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@6bd1f85283e4252aa58aa38fd74936dd978dcded#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name"
]
| []
| Apache License 2.0 | 689 | [
"senza/components/auto_scaling_group.py"
]
| [
"senza/components/auto_scaling_group.py"
]
|
|
scikit-build__scikit-build-118 | dd9814474b79009b7ac6523b4c14bbedd418f33f | 2016-08-08 21:40:24 | 3484eb1047c9883a33d26838dc207df5526d7e18 | codecov-io: ## [Current coverage](https://codecov.io/gh/scikit-build/scikit-build/pull/118?src=pr) is 64.22% (diff: 54.00%)
> Merging [#118](https://codecov.io/gh/scikit-build/scikit-build/pull/118?src=pr) into [master](https://codecov.io/gh/scikit-build/scikit-build/branch/master?src=pr) will decrease coverage by **2.64%**
```diff
@@ master #118 diff @@
==========================================
Files 17 18 +1
Lines 486 506 +20
Methods 0 0
Messages 0 0
Branches 93 96 +3
==========================================
Hits 325 325
- Misses 124 141 +17
- Partials 37 40 +3
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [93aca7b...dac50f6](https://codecov.io/gh/scikit-build/scikit-build/compare/93aca7b67f854ab2e9dfdcaaf99cdeaea0280413...dac50f68dde49069c66abcab94512547f3ed14f3?src=pr)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.59 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.59) (commit https://github.com/scikit-build/scikit-build/commit/847f829829 by @msmolens)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.63 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.63) (commit https://github.com/scikit-build/scikit-build/commit/331ee1542f by @msmolens)
AppVeyorBot: :white_check_mark: [Build scikit-build 0.0.1.70 completed](https://ci.appveyor.com/project/scikit-build/scikit-build/build/0.0.1.70) (commit https://github.com/scikit-build/scikit-build/commit/2cc9425fc2 by @msmolens) | diff --git a/skbuild/cmaker.py b/skbuild/cmaker.py
index cdefc46..02a8e10 100644
--- a/skbuild/cmaker.py
+++ b/skbuild/cmaker.py
@@ -27,7 +27,7 @@ def pop_arg(arg, a, default=None):
"""Pops an arg(ument) from an argument list a and returns the new list
and the value of the argument if present and a default otherwise.
"""
- parser = argparse.ArgumentParser()
+ parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(arg)
ns, a = parser.parse_known_args(a)
ns = tuple(vars(ns).items())
@@ -365,7 +365,7 @@ class CMaker(object):
"make?").format(CMAKE_BUILD_DIR))
cmd = ["cmake", "--build", source_dir,
- "--target", "install", "--config", config]
+ "--target", "install", "--config", config, "--"]
cmd.extend(clargs)
cmd.extend(
filter(bool,
diff --git a/skbuild/command/egg_info.py b/skbuild/command/egg_info.py
new file mode 100644
index 0000000..3944aa3
--- /dev/null
+++ b/skbuild/command/egg_info.py
@@ -0,0 +1,14 @@
+
+import os
+
+from setuptools.command.egg_info import egg_info as _egg_info
+
+
+class egg_info(_egg_info):
+ def finalize_options(self):
+ if self.egg_base is not None:
+ script_path = os.path.abspath(self.distribution.script_name)
+ script_dir = os.path.dirname(script_path)
+ self.egg_base = os.path.join(script_dir, self.egg_base)
+
+ _egg_info.finalize_options(self)
diff --git a/skbuild/setuptools_wrap.py b/skbuild/setuptools_wrap.py
index 54efdb3..9a45d50 100644
--- a/skbuild/setuptools_wrap.py
+++ b/skbuild/setuptools_wrap.py
@@ -8,7 +8,7 @@ import sys
import argparse
from . import cmaker
-from .command import build, install, clean, bdist, bdist_wheel
+from .command import build, install, clean, bdist, bdist_wheel, egg_info
from .exceptions import SKBuildError
try:
@@ -17,24 +17,42 @@ except ImportError:
from distutils.core import setup as upstream_setup
-def move_arg(arg, a, b, newarg=None, f=lambda x: x, concatenate_value=False):
- """Moves an argument from a list to b list, possibly giving it a new name
- and/or performing a transformation on the value. Returns a and b. The arg
- need not be present in a.
+def create_skbuild_argparser():
+ """Create and return a scikit-build argument parser.
"""
- newarg = newarg or arg
- parser = argparse.ArgumentParser()
- parser.add_argument(arg)
- ns, a = parser.parse_known_args(a)
- ns = tuple(vars(ns).items())
- if len(ns) > 0 and ns[0][1] is not None:
- key, value = ns[0]
- newargs = [newarg, value]
- if concatenate_value:
- b.append("=".join(newargs))
- elif value is not None:
- b.extend(newargs)
- return a, b
+ parser = argparse.ArgumentParser(add_help=False)
+ parser.add_argument(
+ '--build-type', default='Release', metavar='',
+ help='specify the CMake build type (e.g. Debug or Release)')
+ parser.add_argument(
+ '-G', '--generator', metavar='',
+ help='specify the CMake build system generator')
+ parser.add_argument(
+ '-j', metavar='N', type=int, dest='jobs',
+ help='allow N build jobs at once')
+ return parser
+
+
+def parse_skbuild_args(args, cmake_args, build_tool_args):
+ """
+ Parse arguments in the scikit-build argument set. Convert specified
+ arguments to proper format and append to cmake_args and build_tool_args.
+ Returns remaining arguments.
+ """
+ parser = create_skbuild_argparser()
+ ns, remaining_args = parser.parse_known_args(args)
+
+ # Construct CMake argument list
+ cmake_args.append('-DCMAKE_BUILD_TYPE:STRING=' + ns.build_type)
+ if ns.generator is not None:
+ cmake_args.extend(['-G', ns.generator])
+
+ # Construct build tool argument list
+ build_tool_args.extend(['--config', ns.build_type])
+ if ns.jobs is not None:
+ build_tool_args.extend(['-j', str(ns.jobs)])
+
+ return remaining_args
def parse_args():
@@ -43,31 +61,20 @@ def parse_args():
make = []
argsets = [dutils, cmake, make]
i = 0
+ separator = '--'
- argv = list(sys.argv)
- try:
- argv.index("--build-type")
- except ValueError:
- argv.append("--build-type")
- argv.append("Release")
-
- for arg in argv:
- if arg == '--':
+ for arg in sys.argv:
+ if arg == separator:
i += 1
+ if i >= len(argsets):
+ sys.exit(
+ "ERROR: Too many \"{}\" separators provided "
+ "(expected at most {}).".format(separator,
+ len(argsets) - 1))
else:
argsets[i].append(arg)
- # handle argument transformations
- dutils, cmake = move_arg('--build-type', dutils, cmake,
- newarg='-DCMAKE_BUILD_TYPE:STRING',
- concatenate_value=True)
- dutils, cmake = move_arg('-G', dutils, cmake)
- dutils, make = move_arg('-j', dutils, make)
-
- def absappend(x):
- return os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), x)
-
- dutils, dutils = move_arg('--egg-base', dutils, dutils, f=absappend)
+ dutils = parse_skbuild_args(dutils, cmake, make)
return dutils, cmake, make
@@ -79,6 +86,30 @@ def setup(*args, **kw):
"""
sys.argv, cmake_args, make_args = parse_args()
+ # Skip running CMake when user requests help
+ help_parser = argparse.ArgumentParser(add_help=False)
+ help_parser.add_argument('-h', '--help', action='store_true')
+ help_parser.add_argument('--help-commands', action='store_true')
+ ns = help_parser.parse_known_args()[0]
+ if ns.help_commands:
+ return upstream_setup(*args, **kw)
+ if ns.help:
+ # Prepend scikit-build help. Generate option descriptions using
+ # argparse.
+ skbuild_parser = create_skbuild_argparser()
+ arg_descriptions = [line
+ for line in skbuild_parser.format_help().split('\n')
+ if line.startswith(' ')]
+ print('scikit-build options:')
+ print('\n'.join(arg_descriptions))
+ print()
+ print('Arguments following a "--" are passed directly to CMake '
+ '(e.g. -DMY_VAR:BOOL=TRUE).')
+ print('Arguments following a second "--" are passed directly to the '
+ 'build tool.')
+ print()
+ return upstream_setup(*args, **kw)
+
packages = kw.get('packages', [])
package_dir = kw.get('package_dir', {})
package_data = kw.get('package_data', {}).copy()
@@ -172,6 +203,7 @@ def setup(*args, **kw):
cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
cmdclass['bdist_wheel'] = cmdclass.get(
'bdist_wheel', bdist_wheel.bdist_wheel)
+ cmdclass['egg_info'] = cmdclass.get('egg_info', egg_info.egg_info)
kw['cmdclass'] = cmdclass
return upstream_setup(*args, **kw)
| Improve python setup.py build --help output
- [ ] Document the different build type option
- [ ] Document how to set CMake variables
- [ ] Document how to set the CMake build system generator | scikit-build/scikit-build | diff --git a/tests/test_command_line.py b/tests/test_command_line.py
new file mode 100644
index 0000000..f48b68b
--- /dev/null
+++ b/tests/test_command_line.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""test_command_line
+----------------------------------
+
+Tests for various command line functionality.
+"""
+
+from . import project_setup_py_test, push_dir
+
+
+@project_setup_py_test(("samples", "hello"), ["--help"])
+def test_help(capsys):
+ out, err = capsys.readouterr()
+ assert "scikit-build options" in out
+ assert "--build-type" in out
+ assert "Global options:" in out
+ assert "usage:" in out
+
+
+def test_no_command():
+ with push_dir():
+
+ @project_setup_py_test(("samples", "hello"), [""])
+ def run():
+ pass
+
+ failed = False
+ try:
+ run()
+ except SystemExit as e:
+ failed = e.args[0].startswith('invalid command name')
+
+ assert failed
+
+
+def test_too_many_separators():
+ with push_dir():
+
+ @project_setup_py_test(("samples", "hello"), ["--"] * 3)
+ def run():
+ pass
+
+ failed = False
+ try:
+ run()
+ except SystemExit as e:
+ failed = e.args[0].startswith('ERROR: Too many')
+
+ assert failed
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8",
"tox"
],
"pre_install": [
"apt-get update",
"apt-get install -y build-essential"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
-e git+https://github.com/scikit-build/scikit-build.git@dd9814474b79009b7ac6523b4c14bbedd418f33f#egg=scikit_build
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: scikit-build
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- setuptools==25.1.6
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.16.2
- wheel==0.29.0
- zipp==3.6.0
prefix: /opt/conda/envs/scikit-build
| [
"tests/test_command_line.py::test_help",
"tests/test_command_line.py::test_too_many_separators"
]
| [
"tests/test_command_line.py::test_no_command"
]
| []
| []
| MIT License | 690 | [
"skbuild/setuptools_wrap.py",
"skbuild/command/egg_info.py",
"skbuild/cmaker.py"
]
| [
"skbuild/setuptools_wrap.py",
"skbuild/command/egg_info.py",
"skbuild/cmaker.py"
]
|
Azure__WALinuxAgent-352 | 2de3f63e6f35e3c9ed51efae4949b3ccd0d2156c | 2016-08-08 23:54:57 | 2de3f63e6f35e3c9ed51efae4949b3ccd0d2156c | diff --git a/azurelinuxagent/pa/deprovision/default.py b/azurelinuxagent/pa/deprovision/default.py
index b570c318..3a916e21 100644
--- a/azurelinuxagent/pa/deprovision/default.py
+++ b/azurelinuxagent/pa/deprovision/default.py
@@ -74,6 +74,11 @@ class DeprovisionHandler(object):
files_to_del = ['/root/.bash_history', '/var/log/waagent.log']
actions.append(DeprovisionAction(fileutil.rm_files, files_to_del))
+ def del_resolv(self, warnings, actions):
+ warnings.append("WARNING! /etc/resolv.conf will be deleted.")
+ files_to_del = ["/etc/resolv.conf"]
+ actions.append(DeprovisionAction(fileutil.rm_files, files_to_del))
+
def del_dhcp_lease(self, warnings, actions):
warnings.append("WARNING! Cached DHCP leases will be deleted.")
dirs_to_del = ["/var/lib/dhclient", "/var/lib/dhcpcd", "/var/lib/dhcp"]
@@ -109,6 +114,7 @@ class DeprovisionHandler(object):
self.del_lib_dir(warnings, actions)
self.del_files(warnings, actions)
+ self.del_resolv(warnings, actions)
if deluser:
self.del_user(warnings, actions)
diff --git a/azurelinuxagent/pa/deprovision/ubuntu.py b/azurelinuxagent/pa/deprovision/ubuntu.py
index 14f90de0..b45d4154 100644
--- a/azurelinuxagent/pa/deprovision/ubuntu.py
+++ b/azurelinuxagent/pa/deprovision/ubuntu.py
@@ -18,30 +18,25 @@
#
import os
-import azurelinuxagent.common.logger as logger
import azurelinuxagent.common.utils.fileutil as fileutil
from azurelinuxagent.pa.deprovision.default import DeprovisionHandler, \
- DeprovisionAction
-
-def del_resolv():
- if os.path.realpath('/etc/resolv.conf') != '/run/resolvconf/resolv.conf':
- logger.info("resolvconf is not configured. Removing /etc/resolv.conf")
- fileutil.rm_files('/etc/resolv.conf')
- else:
- logger.info("resolvconf is enabled; leaving /etc/resolv.conf intact")
- fileutil.rm_files('/etc/resolvconf/resolv.conf.d/tail',
- '/etc/resolvconf/resolv.conf.d/originial')
+ DeprovisionAction
class UbuntuDeprovisionHandler(DeprovisionHandler):
def __init__(self):
super(UbuntuDeprovisionHandler, self).__init__()
- def setup(self, deluser):
- warnings, actions = super(UbuntuDeprovisionHandler, self).setup(deluser)
- warnings.append("WARNING! Nameserver configuration in "
- "/etc/resolvconf/resolv.conf.d/{tail,originial} "
- "will be deleted.")
- actions.append(DeprovisionAction(del_resolv))
- return warnings, actions
-
+ def del_resolv(self, warnings, actions):
+ if os.path.realpath(
+ '/etc/resolv.conf') != '/run/resolvconf/resolv.conf':
+ warnings.append("WARNING! /etc/resolv.conf will be deleted.")
+ files_to_del = ["/etc/resolv.conf"]
+ actions.append(DeprovisionAction(fileutil.rm_files, files_to_del))
+ else:
+ warnings.append("WARNING! /etc/resolvconf/resolv.conf.d/tail "
+ "and /etc/resolvconf/resolv.conf.d/original will "
+ "be deleted.")
+ files_to_del = ["/etc/resolvconf/resolv.conf.d/tail",
+ "/etc/resolvconf/resolv.conf.d/original"]
+ actions.append(DeprovisionAction(fileutil.rm_files, files_to_del))
| [2.1.5]waagent doesn't delete /etc/resolv.conf during deprovisioning
Description of problem:
After deprovisioning, the /etc/resolv.conf is not deleted.
Version-Release number of selected component (if applicable):
WALinuxAgent-2.1.5
RHEL Version:
RHEL-7.3 internal build
How reproducible:
100%
Steps to Reproduce:
1. Prepare a RHEL7.3 VM on Azure. Check /etc/resolv.conf:
\# ls -l /etc/resolv.conf
-rw-r--r--. 1 root root 125 Aug 8 10:18 /etc/resolv.conf
2. Deprovision the VM:
\# waagent -deprovision and input 'y' to execute deprovisioning.
3. Check the messages:
WARNING! The waagent service will be stopped.
WARNING! All SSH host key pairs will be deleted.
WARNING! Cached DHCP leases will be deleted.
WARNING! root password will be disabled. You will not be able to login as root.
Do you want to proceed (y/n)y
4. Check the /etc/resolv.conf
\# ls -l /etc/resolv.conf
-rw-r--r--. 1 root root 125 Aug 8 10:18 /etc/resolv.conf
Actual results:
There's no WARNING message to show that /etc/resolv.conf file will be deleted.
The /etc/resolv.conf file is not deleted after deprovisioning.
Expected results:
There's a WARNING message to show that /etc/resolv.conf file will be deleted.
The /etc/resolv.conf file is deleted after deprovisioning.
Additional info:
1. In WALinuxAgent-2.0.16 the /etc/resolv.conf is deleted during deprovisioning.
2. It seems that there's no code to delete /etc/resolv.conf in azurelinuxagent/pa/deprovision/default.py.
| Azure/WALinuxAgent | diff --git a/tests/pa/test_deprovision.py b/tests/pa/test_deprovision.py
new file mode 100644
index 00000000..be349153
--- /dev/null
+++ b/tests/pa/test_deprovision.py
@@ -0,0 +1,48 @@
+# Copyright 2016 Microsoft Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Requires Python 2.4+ and Openssl 1.0+
+#
+
+from tests.tools import *
+from azurelinuxagent.pa.deprovision import get_deprovision_handler
+
+
+class TestDeprovision(AgentTestCase):
+ @distros("redhat")
+ def test_deprovision(self,
+ distro_name,
+ distro_version,
+ distro_full_name):
+ deprovision_handler = get_deprovision_handler(distro_name,
+ distro_version,
+ distro_full_name)
+ warnings, actions = deprovision_handler.setup(deluser=False)
+ assert any("/etc/resolv.conf" in w for w in warnings)
+
+ @distros("ubuntu")
+ def test_deprovision_ubuntu(self,
+ distro_name,
+ distro_version,
+ distro_full_name):
+ deprovision_handler = get_deprovision_handler(distro_name,
+ distro_version,
+ distro_full_name)
+
+ with patch("os.path.realpath", return_value="/run/resolvconf/resolv.conf"):
+ warnings, actions = deprovision_handler.setup(deluser=False)
+ assert any("/etc/resolvconf/resolv.conf.d/tail" in w for w in warnings)
+
+if __name__ == '__main__':
+ unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pyasn1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@2de3f63e6f35e3c9ed51efae4949b3ccd0d2156c#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/pa/test_deprovision.py::TestDeprovision::test_deprovision",
"tests/pa/test_deprovision.py::TestDeprovision::test_deprovision_ubuntu"
]
| []
| []
| []
| Apache License 2.0 | 691 | [
"azurelinuxagent/pa/deprovision/ubuntu.py",
"azurelinuxagent/pa/deprovision/default.py"
]
| [
"azurelinuxagent/pa/deprovision/ubuntu.py",
"azurelinuxagent/pa/deprovision/default.py"
]
|
|
zalando-stups__senza-304 | 87feda79265966aa5d6a67f3a652e2f0d7961e64 | 2016-08-09 13:36:53 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/manaus/route53.py b/senza/manaus/route53.py
index 0075eb0..6aab215 100644
--- a/senza/manaus/route53.py
+++ b/senza/manaus/route53.py
@@ -112,8 +112,9 @@ class Route53HostedZone:
'ResourceRecordSet': record.boto_dict}
change_batch['Changes'].append(change)
- client.change_resource_record_sets(HostedZoneId=self.id,
- ChangeBatch=change_batch)
+ if change_batch['Changes']:
+ client.change_resource_record_sets(HostedZoneId=self.id,
+ ChangeBatch=change_batch)
return change_batch
| Only call API for Route53 config if there is changes to be made
Only call AWS API when there is actual changes to be made.
Users reporting this exception:
```
raise ParamValidationError(report=report.generate_report())
botocore.exceptions.ParamValidationError: Parameter validation failed:
Invalid length for parameter ChangeBatch.Changes, value: 0, valid range: 1-inf
```
Error comes from https://github.com/zalando-stups/senza/blob/master/senza/manaus/route53.py#L114-L115 | zalando-stups/senza | diff --git a/tests/test_manaus/test_route53.py b/tests/test_manaus/test_route53.py
index 2441ba1..24c5441 100644
--- a/tests/test_manaus/test_route53.py
+++ b/tests/test_manaus/test_route53.py
@@ -209,6 +209,12 @@ def test_hosted_zone_upsert(monkeypatch):
ChangeBatch={'Changes': expected_changes,
'Comment': 'test'})
+ m_client.change_resource_record_sets.reset_mock()
+ change_batch2 = hosted_zone.upsert([], comment="test")
+ assert change_batch2['Comment'] == "test"
+ assert change_batch2['Changes'] == []
+ m_client.change_resource_record_sets.assert_not_called()
+
def test_hosted_zone_create(monkeypatch):
m_client = MagicMock()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@87feda79265966aa5d6a67f3a652e2f0d7961e64#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_route53.py::test_hosted_zone_upsert"
]
| []
| [
"tests/test_manaus/test_route53.py::test_hosted_zone_from_boto_dict",
"tests/test_manaus/test_route53.py::test_record_from_boto_dict",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones",
"tests/test_manaus/test_route53.py::test_route53_hosted_zones_paginated",
"tests/test_manaus/test_route53.py::test_get_records",
"tests/test_manaus/test_route53.py::test_route53_record_boto_dict",
"tests/test_manaus/test_route53.py::test_hosted_zone_create",
"tests/test_manaus/test_route53.py::test_hosted_zone_delete",
"tests/test_manaus/test_route53.py::test_to_alias",
"tests/test_manaus/test_route53.py::test_convert_domain_records_to_alias",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_domain_name",
"tests/test_manaus/test_route53.py::test_hosted_zone_get_by_id",
"tests/test_manaus/test_route53.py::test_get_by_domain_name"
]
| []
| Apache License 2.0 | 692 | [
"senza/manaus/route53.py"
]
| [
"senza/manaus/route53.py"
]
|
|
zalando-stups__senza-305 | cdd45d357b6767742393d4b2aa9af68715e6dd5e | 2016-08-09 14:56:30 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/stups_auto_configuration.py b/senza/components/stups_auto_configuration.py
index 5969330..415e380 100644
--- a/senza/components/stups_auto_configuration.py
+++ b/senza/components/stups_auto_configuration.py
@@ -28,12 +28,13 @@ def find_taupage_image(region: str):
def component_stups_auto_configuration(definition, configuration, args, info, force, account_info):
ec2 = boto3.resource('ec2', args.region)
+ vpc_id = configuration.get('VpcId', account_info.VpcID)
availability_zones = configuration.get('AvailabilityZones')
server_subnets = []
lb_subnets = []
lb_internal_subnets = []
- for subnet in ec2.subnets.filter(Filters=[{'Name': 'vpc-id', 'Values': [account_info.VpcID]}]):
+ for subnet in ec2.subnets.filter(Filters=[{'Name': 'vpc-id', 'Values': [vpc_id]}]):
name = get_tag(subnet.tags, 'Name', '')
if availability_zones and subnet.availability_zone not in availability_zones:
# skip subnet as it's not in one of the given AZs
| Support multiple VPCs
Senza's STUPS components currently assume a single VPC per region (Seven Seconds only configures a single VPC). There might be valid reasons to have multiple VPCs (e.g. a special VPC with VPN tunnel to some 3rd party location), so Senza needs to support that, too. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 1191bae..eceb9da 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -220,6 +220,42 @@ def test_component_stups_auto_configuration(monkeypatch):
assert {'myregion': {'Subnets': ['sn-3']}} == result['Mappings']['ServerSubnets']
+def test_component_stups_auto_configuration_vpc_id(monkeypatch):
+ args = MagicMock()
+ args.region = 'myregion'
+
+ configuration = {
+ 'Name': 'Config',
+ 'VpcId': 'vpc-123'
+ }
+
+ sn1 = MagicMock()
+ sn1.id = 'sn-1'
+ sn1.tags = [{'Key': 'Name', 'Value': 'dmz-1'}]
+ sn1.availability_zone = 'az-1'
+ sn2 = MagicMock()
+ sn2.id = 'sn-2'
+ sn2.tags = [{'Key': 'Name', 'Value': 'dmz-2'}]
+ sn2.availability_zone = 'az-2'
+ sn3 = MagicMock()
+ sn3.id = 'sn-3'
+ sn3.tags = [{'Key': 'Name', 'Value': 'internal-3'}]
+ sn3.availability_zone = 'az-1'
+ ec2 = MagicMock()
+ def get_subnets(Filters):
+ assert Filters == [{'Name': 'vpc-id', 'Values': ['vpc-123']}]
+ return [sn1, sn2, sn3]
+ ec2.subnets.filter = get_subnets
+ image = MagicMock()
+ ec2.images.filter.return_value = [image]
+ monkeypatch.setattr('boto3.resource', lambda x, y: ec2)
+
+ result = component_stups_auto_configuration({}, configuration, args, MagicMock(), False, MagicMock())
+
+ assert {'myregion': {'Subnets': ['sn-1', 'sn-2']}} == result['Mappings']['LoadBalancerSubnets']
+ assert {'myregion': {'Subnets': ['sn-3']}} == result['Mappings']['ServerSubnets']
+
+
def test_component_redis_node(monkeypatch):
mock_string = "foo"
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@cdd45d357b6767742393d4b2aa9af68715e6dd5e#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name"
]
| []
| Apache License 2.0 | 693 | [
"senza/components/stups_auto_configuration.py"
]
| [
"senza/components/stups_auto_configuration.py"
]
|
|
mkdocs__mkdocs-1020 | 343b820f9949787943fa1e230635674a2b304cc7 | 2016-08-10 02:09:12 | e7d8879d2b53d9e50bdfcf1cf29c48dc3f6bc87f | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index b8d9926b..fdb21e46 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -92,6 +92,20 @@ to support such customization.
[blocks]: ../user-guide/styling-your-docs/#overriding-template-blocks
+#### Auto-Populated `extra_css` and `extra_javascript` Deprecated. (#986)
+
+In previous versions of MkDocs, if the `extra_css` or `extra_javascript` config
+settings were empty, MkDocs would scan the `docs_dir` and auto-populate each
+setting with all of the CSS and JavaScript files found. This behavior is
+deprecated and a warning will be issued. In the next release, the auto-populate
+feature will stop working and any unlisted CSS and JavaScript files will not be
+included in the HTML templates. In other words, they will still be copied to the
+`site-dir`, but they will not have any effect on the theme if they are not
+explicitly listed.
+
+All CSS and javaScript files in the `docs_dir` should be explicitly listed in
+the `extra_css` or `extra_javascript` config settings going forward.
+
#### Support for dirty builds. (#990)
For large sites the build time required to create the pages can become problematic,
diff --git a/docs/user-guide/configuration.md b/docs/user-guide/configuration.md
index 6bc7bf11..1971d23f 100644
--- a/docs/user-guide/configuration.md
+++ b/docs/user-guide/configuration.md
@@ -221,9 +221,9 @@ directory path.
### extra_css
-Set a list of CSS files to be included by the theme. For example, the
-following example will include the the extra.css file within the css
-subdirectory in your [docs_dir](#docs_dir).
+Set a list of CSS files in your `docs_dir` to be included by the theme. For
+example, the following example will include the the extra.css file within the
+css subdirectory in your [docs_dir](#docs_dir).
```yaml
extra_css:
@@ -231,27 +231,23 @@ extra_css:
- css/second_extra.css
```
-**default**: By default `extra_css` will contain a list of all the CSS files
-found within the `docs_dir`, if none are found it will be `[]` (an empty list).
+**default**: `[]` (an empty list).
### extra_javascript
-Set a list of JavaScript files to be included by the theme. See the example
-in [extra_css](#extra_css) for usage.
+Set a list of JavaScript files in your `docs_dir` to be included by the theme.
+See the example in [extra_css](#extra_css) for usage.
-**default**: By default `extra_javascript` will contain a list of all the
-JavaScript files found within the `docs_dir`, if none are found it will be `[]`
-(an empty list).
+**default**: `[]` (an empty list).
### extra_templates
-Set a list of templates to be built by MkDocs. To see more about writing
-templates for MkDocs read the documentation about [custom themes] and
-specifically the section about the [variables that are available] to templates.
-See the example in [extra_css](#extra_css) for usage.
+Set a list of templates in your `docs_dir` to be built by MkDocs. To see more
+about writing templates for MkDocs read the documentation about [custom themes]
+and specifically the section about the [variables that are available] to
+templates. See the example in [extra_css](#extra_css) for usage.
-**default**: Unlike extra_css and extra_javascript, by default `extra_templates`
-will be `[]` (an empty list).
+**default**: `[]` (an empty list).
### extra
diff --git a/mkdocs.yml b/mkdocs.yml
index 26463482..1d4c020c 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -18,6 +18,9 @@ pages:
- Contributing: about/contributing.md
- License: about/license.md
+extra_css:
+ - css/extra.css
+
markdown_extensions:
- toc:
permalink:
diff --git a/mkdocs/config/base.py b/mkdocs/config/base.py
index 9f376a9b..afcec0a9 100644
--- a/mkdocs/config/base.py
+++ b/mkdocs/config/base.py
@@ -64,24 +64,47 @@ class Config(utils.UserDict):
def _pre_validate(self):
+ failed, warnings = [], []
+
for key, config_option in self._schema:
- config_option.pre_validation(self, key_name=key)
+ try:
+ config_option.pre_validation(self, key_name=key)
+ warnings.extend([(key, w) for w in config_option.warnings])
+ config_option.reset_warnings()
+ except ValidationError as e:
+ failed.append((key, e))
+
+ return failed, warnings
def _post_validate(self):
+ failed, warnings = [], []
+
for key, config_option in self._schema:
- config_option.post_validation(self, key_name=key)
+ try:
+ config_option.post_validation(self, key_name=key)
+ warnings.extend([(key, w) for w in config_option.warnings])
+ config_option.reset_warnings()
+ except ValidationError as e:
+ failed.append((key, e))
+
+ return failed, warnings
def validate(self):
- self._pre_validate()
+ failed, warnings = self._pre_validate()
+
+ run_failed, run_warnings = self._validate()
- failed, warnings = self._validate()
+ failed.extend(run_failed)
+ warnings.extend(run_warnings)
# Only run the post validation steps if there are no failures, warnings
# are okay.
if len(failed) == 0:
- self._post_validate()
+ post_failed, post_warnings = self._post_validate()
+ failed.extend(post_failed)
+ warnings.extend(post_warnings)
return failed, warnings
diff --git a/mkdocs/config/config_options.py b/mkdocs/config/config_options.py
index 70f610c4..78ea5455 100644
--- a/mkdocs/config/config_options.py
+++ b/mkdocs/config/config_options.py
@@ -393,6 +393,16 @@ class Extras(OptionallyRequired):
config[key_name] = extras
+ if not extras:
+ return
+
+ self.warnings.append((
+ 'The following files have been automatically included in the '
+ 'documentation build and will be added to the HTML: {0}. This '
+ 'behavior is deprecated. In version 1.0 and later they will '
+ "need to be explicitly listed in the '{1}' config setting."
+ ).format(','.join(extras), key_name))
+
class Pages(Extras):
"""
| Static resources (especially CSS/JS!) shouldn't be automatically included
(Pulling this out from #972.)
Currently, MkDocs automatically includes all static resources from the docs dir in a build. This can cause issues if some files aren't meant to be included but are located in the docs dir. For example, this could happen with files that get preprocessed by another tool into `.md` files for MkDocs (which was how I planned on doing auto-generation of docs from code for MkDocs if I ever get around to it).
However, there's a more-significant issue: all CSS and JS files are automatically included in the generated HTML itself (except when hosting on RTD, apparently). This means that any CSS/JS used for static HTML files will get erroneously included in the MkDocs-generated HTML by default. The inconsistency with RTD adds another wrinkle to this. I think the behavior should be the same no matter what, and as Python says, "Explicit is better than implicit."
At minimum, I think CSS and JS shouldn't be automatically added to the generated HTML, but I'd prefer to see MkDocs only include files that are explicitly listed in `mkdocs.yml`. Doing so would make things more flexible when people want to do unusual things with MkDocs, and with globbing or the ability to include whole subdirectories of static resources, the configuration wouldn't be much more complex than it is now. | mkdocs/mkdocs | diff --git a/mkdocs/tests/build_tests.py b/mkdocs/tests/build_tests.py
index aead1c85..607d4244 100644
--- a/mkdocs/tests/build_tests.py
+++ b/mkdocs/tests/build_tests.py
@@ -28,6 +28,8 @@ def load_config(cfg=None):
cfg['site_name'] = 'Example'
if 'config_file_path' not in cfg:
cfg['config_file_path'] = os.path.join(os.path.abspath('.'), 'mkdocs.yml')
+ if 'extra_css' not in cfg:
+ cfg['extra_css'] = ['css/extra.css']
conf = config.Config(schema=config.DEFAULT_SCHEMA)
conf.load_dict(cfg)
diff --git a/mkdocs/tests/config/base_tests.py b/mkdocs/tests/config/base_tests.py
index 28f42f0a..ce56cd93 100644
--- a/mkdocs/tests/config/base_tests.py
+++ b/mkdocs/tests/config/base_tests.py
@@ -5,6 +5,7 @@ import unittest
from mkdocs import exceptions
from mkdocs.config import base, defaults
+from mkdocs.config.config_options import BaseConfigOption
class ConfigBaseTests(unittest.TestCase):
@@ -125,3 +126,111 @@ class ConfigBaseTests(unittest.TestCase):
base.load_config, config_file=config_file.name)
finally:
os.remove(config_file.name)
+
+ def test_pre_validation_error(self):
+ class InvalidConfigOption(BaseConfigOption):
+ def pre_validation(self, config, key_name):
+ raise base.ValidationError('pre_validation error')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(errors[0][0], 'invalid_option')
+ self.assertEqual(str(errors[0][1]), 'pre_validation error')
+ self.assertTrue(isinstance(errors[0][1], base.ValidationError))
+ self.assertEqual(len(warnings), 0)
+
+ def test_run_validation_error(self):
+ class InvalidConfigOption(BaseConfigOption):
+ def run_validation(self, value):
+ raise base.ValidationError('run_validation error')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(errors[0][0], 'invalid_option')
+ self.assertEqual(str(errors[0][1]), 'run_validation error')
+ self.assertTrue(isinstance(errors[0][1], base.ValidationError))
+ self.assertEqual(len(warnings), 0)
+
+ def test_post_validation_error(self):
+ class InvalidConfigOption(BaseConfigOption):
+ def post_validation(self, config, key_name):
+ raise base.ValidationError('post_validation error')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(errors[0][0], 'invalid_option')
+ self.assertEqual(str(errors[0][1]), 'post_validation error')
+ self.assertTrue(isinstance(errors[0][1], base.ValidationError))
+ self.assertEqual(len(warnings), 0)
+
+ def test_pre_and_run_validation_errors(self):
+ """ A pre_validation error does not stop run_validation from running. """
+ class InvalidConfigOption(BaseConfigOption):
+ def pre_validation(self, config, key_name):
+ raise base.ValidationError('pre_validation error')
+
+ def run_validation(self, value):
+ raise base.ValidationError('run_validation error')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 2)
+ self.assertEqual(errors[0][0], 'invalid_option')
+ self.assertEqual(str(errors[0][1]), 'pre_validation error')
+ self.assertTrue(isinstance(errors[0][1], base.ValidationError))
+ self.assertEqual(errors[1][0], 'invalid_option')
+ self.assertEqual(str(errors[1][1]), 'run_validation error')
+ self.assertTrue(isinstance(errors[1][1], base.ValidationError))
+ self.assertEqual(len(warnings), 0)
+
+ def test_run_and_post_validation_errors(self):
+ """ A run_validation error stops post_validation from running. """
+ class InvalidConfigOption(BaseConfigOption):
+ def run_validation(self, value):
+ raise base.ValidationError('run_validation error')
+
+ def post_validation(self, config, key_name):
+ raise base.ValidationError('post_validation error')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(errors[0][0], 'invalid_option')
+ self.assertEqual(str(errors[0][1]), 'run_validation error')
+ self.assertTrue(isinstance(errors[0][1], base.ValidationError))
+ self.assertEqual(len(warnings), 0)
+
+ def test_validation_warnings(self):
+ class InvalidConfigOption(BaseConfigOption):
+ def pre_validation(self, config, key_name):
+ self.warnings.append('pre_validation warning')
+
+ def run_validation(self, value):
+ self.warnings.append('run_validation warning')
+
+ def post_validation(self, config, key_name):
+ self.warnings.append('post_validation warning')
+
+ c = base.Config(schema=(('invalid_option', InvalidConfigOption()), ))
+
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 0)
+ self.assertEqual(warnings, [
+ ('invalid_option', 'pre_validation warning'),
+ ('invalid_option', 'run_validation warning'),
+ ('invalid_option', 'post_validation warning'),
+ ])
diff --git a/mkdocs/tests/config/config_options_tests.py b/mkdocs/tests/config/config_options_tests.py
index de9396bd..284dbdc6 100644
--- a/mkdocs/tests/config/config_options_tests.py
+++ b/mkdocs/tests/config/config_options_tests.py
@@ -289,7 +289,7 @@ class ExtrasTest(unittest.TestCase):
self.assertRaises(config_options.ValidationError,
option.validate, {})
- def test_talk(self):
+ def test_walk(self):
option = config_options.Extras(utils.is_markdown_file)
diff --git a/mkdocs/tests/config/config_tests.py b/mkdocs/tests/config/config_tests.py
index 497cfa6f..ff14478b 100644
--- a/mkdocs/tests/config/config_tests.py
+++ b/mkdocs/tests/config/config_tests.py
@@ -198,7 +198,6 @@ class ConfigTests(unittest.TestCase):
)
conf = {
- 'site_name': 'Example',
'config_file_path': j(os.path.abspath('..'), 'mkdocs.yml')
}
@@ -211,7 +210,11 @@ class ConfigTests(unittest.TestCase):
c = config.Config(schema=(
('docs_dir', config_options.Dir(default='docs')),
('site_dir', config_options.SiteDir(default='site')),
+ ('config_file_path', config_options.Type(utils.string_types))
))
c.load_dict(patch)
- self.assertRaises(config_options.ValidationError, c.validate)
+ errors, warnings = c.validate()
+
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(warnings, [])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 5
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
exceptiongroup==1.2.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@343b820f9949787943fa1e230635674a2b304cc7#egg=mkdocs
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- exceptiongroup==1.2.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_post_validation_error",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_pre_and_run_validation_errors",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_pre_validation_error",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_validation_warnings",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_doc_dir_in_site_dir"
]
| [
"mkdocs/tests/build_tests.py::BuildTests::test_absolute_link",
"mkdocs/tests/build_tests.py::BuildTests::test_anchor_only_link",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_asbolute_media",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link_differing_directory",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_link_with_anchor",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_internal_media",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_markdown",
"mkdocs/tests/build_tests.py::BuildTests::test_convert_multiple_internal_links",
"mkdocs/tests/build_tests.py::BuildTests::test_copy_theme_files",
"mkdocs/tests/build_tests.py::BuildTests::test_copying_media",
"mkdocs/tests/build_tests.py::BuildTests::test_dont_convert_code_block_urls",
"mkdocs/tests/build_tests.py::BuildTests::test_empty_document",
"mkdocs/tests/build_tests.py::BuildTests::test_extension_config",
"mkdocs/tests/build_tests.py::BuildTests::test_extra_context",
"mkdocs/tests/build_tests.py::BuildTests::test_ignore_email_links",
"mkdocs/tests/build_tests.py::BuildTests::test_ignore_external_link",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_custom_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_duplicate_custom_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_fenced_code_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_markdown_table_extension",
"mkdocs/tests/build_tests.py::BuildTests::test_not_use_directory_urls",
"mkdocs/tests/build_tests.py::BuildTests::test_strict_mode_invalid",
"mkdocs/tests/build_tests.py::BuildTests::test_strict_mode_valid",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_from_closed_file",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_from_file",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_from_open_file",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_config_option",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_default_pages",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_default_pages_nested"
]
| [
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_from_deleted_file",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_from_missing_file",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_load_missing_required",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_missing_required",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_run_and_post_validation_errors",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_run_validation_error",
"mkdocs/tests/config/base_tests.py::ConfigBaseTests::test_unrecognised_keys",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_replace_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required_no_default",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_length",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_multiple_types",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_single_type",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid_url",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_valid_url",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_github",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_github",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_doc_dir_is_config_dir",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_file",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_attribute_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_type_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir_but_required",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_valid_dir",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_doc_dir_in_site_dir",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_site_dir_in_docs_dir",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_walk",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_config",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_type",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_dict",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_empty",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_invalid_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_many_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_one_page",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PrivateTest::test_defined",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins_config",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_configkey",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_duplicates",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_option",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_dict_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_list_dicts",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_mixed_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_none",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_not_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_simple_list",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_empty_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_invalid_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_missing_config_file",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_missing_site_name",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_nonexistant_config",
"mkdocs/tests/config/config_tests.py::ConfigTests::test_theme"
]
| []
| BSD 2-Clause "Simplified" License | 694 | [
"docs/about/release-notes.md",
"docs/user-guide/configuration.md",
"mkdocs/config/base.py",
"mkdocs.yml",
"mkdocs/config/config_options.py"
]
| [
"docs/about/release-notes.md",
"docs/user-guide/configuration.md",
"mkdocs/config/base.py",
"mkdocs.yml",
"mkdocs/config/config_options.py"
]
|
|
fabiobatalha__chess_master-2 | 041556a8017679512fb37b2fe73ccb226eadf125 | 2016-08-10 04:17:25 | 041556a8017679512fb37b2fe73ccb226eadf125 | diff --git a/chess.py b/chess.py
index 8c57bc4..c9d7287 100644
--- a/chess.py
+++ b/chess.py
@@ -101,6 +101,19 @@ class Bishop(Pieces):
def __str__(self):
return self.NAME
+
+ def threatening_zone(self, max_size):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
+ """
+
+ import pdb; pdb.set_trace()
+
+ self.position
class Kinight(Pieces):
@@ -110,6 +123,14 @@ class Kinight(Pieces):
return self.NAME
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class King(Pieces):
NAME = 'king'
@@ -118,6 +139,14 @@ class King(Pieces):
return self.NAME
+ def threatening_zone(self):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class Pawn(Pieces):
NAME = 'pawn'
@@ -126,6 +155,24 @@ class Pawn(Pieces):
return self.NAME
+ def threatening_zone(self, max_size):
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
+ """
+ zone = []
+
+ x, y = self.position
+
+ zone.append((x+1, y+1))
+ zone.append((x-1, y+1))
+
+ return [(x, y) for x, y in zone if x in range(max_size) and y in range(max_size)]
+
+
class Queen(Pieces):
NAME = 'queen'
@@ -134,6 +181,14 @@ class Queen(Pieces):
return self.NAME
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
+
class Rook(Pieces):
NAME = 'rook'
@@ -141,3 +196,11 @@ class Rook(Pieces):
def __str__(self):
return self.NAME
+
+ def threatening_zone():
+ """
+ Get the current position of the piece and produce a list of threathening
+ places in the board.
+ """
+
+ pass
| Implement threatening zone for panws
Implement a method to delivery the threatening zone for panws | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 24ff9a6..acb7e6d 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -89,6 +89,84 @@ class TestsChessMasterPiece(unittest.TestCase):
with self.assertRaises(ValueError):
pawn.set_position((1, 2, 4,))
+
+ def test_pawn_threatening_zone(self):
+ """
+ Testing pawn when the piece is able to threatening other piece in both
+ sides.
+ """
+
+ pawn = chess.Pawn((4,0))
+
+ expected = [
+ (3,1),
+ (5,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+
+ def test_pawn_threatening_x_boundary_left(self):
+ """
+ Testing boundary where the pawn can not move to the left
+ """
+
+ pawn = chess.Pawn((0,0))
+
+ expected = [
+ (1,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_x_boundary_right(self):
+ """
+ Testing boundary where the pawn can not move to the right
+ """
+
+ pawn = chess.Pawn((7,0))
+
+ expected = [
+ (6,1)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_zone_y_boundary(self):
+ """
+ Testing boundary where the pawn can not move forward
+ """
+
+ pawn = chess.Pawn((4,7))
+
+ expected = []
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_pawn_threatening_zone_y_boundary_last_move(self):
+ """
+ Testing boundary where the pawn can make your last move forward
+ """
+
+ pawn = chess.Pawn((4,6))
+
+ expected = [
+ (3,7),
+ (5,7)
+ ]
+
+ self.assertEqual(
+ sorted(pawn.threatening_zone(8)), sorted(expected)
+ )
+
class TestsChessMasterBoard(unittest.TestCase):
def test_put_1_piece(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@041556a8017679512fb37b2fe73ccb226eadf125#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 695 | [
"chess.py"
]
| [
"chess.py"
]
|
|
fabiobatalha__chess_master-4 | dabbee5c27437b805262f12e0181aceae7066bcb | 2016-08-10 21:26:01 | dabbee5c27437b805262f12e0181aceae7066bcb | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 63f5aab..35ba524 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -79,6 +79,74 @@ class Pieces(object):
self.set_position(position or (0, 0))
+ def _se_positions(self, max_size):
+ """
+ Retrieve the south east positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ y -= 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _ne_positions(self, max_size):
+ """
+ Retrieve the north east positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ y += 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _nw_positions(self, max_size):
+ """
+ Retrieve the south weast positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ y += 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _sw_positions(self, max_size):
+ """
+ Retrieve the south weast positions of as given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ y -= 1
+ if x not in rg or y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
def set_position(self, position):
"""
Set the x,y position of the piece on the board.
@@ -113,7 +181,14 @@ class Bishop(Pieces):
Arguments:
max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ zone += self._se_positions(max_size)
+ zone += self._ne_positions(max_size)
+ zone += self._nw_positions(max_size)
+ zone += self._sw_positions(max_size)
+
+ return zone
class Kinight(Pieces):
| Implement threatening zone for bishops | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index c70e5aa..a6db443 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import unittest
from masterchess import chess
@@ -164,6 +163,92 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(pawn.threatening_zone(8)), sorted(expected)
)
+ def test_se_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 3),
+ (5, 2),
+ (6, 1),
+ (7, 0),
+ ]
+
+ self.assertEqual(
+ sorted(piece._se_positions(8)), sorted(expected)
+ )
+
+ def test_ne_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 5),
+ (5, 6),
+ (6, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._ne_positions(8)), sorted(expected)
+ )
+
+ def test_nw_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 5),
+ (1, 6),
+ (0, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._nw_positions(8)), sorted(expected)
+ )
+
+ def test_sw_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 3),
+ (1, 2),
+ (0, 1)
+ ]
+
+ self.assertEqual(
+ sorted(piece._sw_positions(8)), sorted(expected)
+ )
+
+
+ def test_bishop_threatening_zone(self):
+ """
+ Testing bishop moves when the piece is able to threatening other pieces
+ in all directions.
+ """
+
+ bishop = chess.Bishop((3, 4))
+
+ expected = [
+ (0, 1),
+ (0, 7),
+ (1, 2),
+ (1, 6),
+ (2, 3),
+ (2, 5),
+ (4, 3),
+ (4, 5),
+ (5, 2),
+ (5, 6),
+ (6, 1),
+ (6, 7),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(bishop.threatening_zone(8)), sorted(expected)
+ )
+
class TestsChessMasterBoard(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@dabbee5c27437b805262f12e0181aceae7066bcb#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 696 | [
"masterchess/chess.py"
]
| [
"masterchess/chess.py"
]
|
|
fabiobatalha__chess_master-6 | 9fe35b2b4029e1eeedeb69b941eba6cb955182a3 | 2016-08-10 21:46:26 | 9fe35b2b4029e1eeedeb69b941eba6cb955182a3 | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 35ba524..91432b1 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -79,9 +79,73 @@ class Pieces(object):
self.set_position(position or (0, 0))
+ def _w_positions(self, max_size):
+ """
+ Retrieve the west positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x -= 1
+ if x not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _l_positions(self, max_size):
+ """
+ Retrieve the lest positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ x += 1
+ if x not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _n_positions(self, max_size):
+ """
+ Retrieve the south positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ y += 1
+ if y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
+ def _s_positions(self, max_size):
+ """
+ Retrieve the south positions of a given position
+ """
+ ne_positions = []
+
+ x, y = self.position
+ rg = range(max_size)
+ while True:
+ y -= 1
+ if y not in rg:
+ break
+ ne_positions.append((x, y))
+
+ return ne_positions
+
def _se_positions(self, max_size):
"""
- Retrieve the south east positions of as given position
+ Retrieve the south east positions of a given position
"""
ne_positions = []
@@ -98,7 +162,7 @@ class Pieces(object):
def _ne_positions(self, max_size):
"""
- Retrieve the north east positions of as given position
+ Retrieve the north east positions of a given position
"""
ne_positions = []
@@ -115,7 +179,7 @@ class Pieces(object):
def _nw_positions(self, max_size):
"""
- Retrieve the south weast positions of as given position
+ Retrieve the south weast positions of a given position
"""
ne_positions = []
@@ -132,7 +196,7 @@ class Pieces(object):
def _sw_positions(self, max_size):
"""
- Retrieve the south weast positions of as given position
+ Retrieve the south weast positions of a given position
"""
ne_positions = []
@@ -276,10 +340,21 @@ class Rook(Pieces):
return self.NAME
- def threatening_zone():
+ def threatening_zone(self, max_size):
"""
Get the current position of the piece and produce a list of threathening
places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ zone += self._s_positions(max_size)
+ zone += self._n_positions(max_size)
+ zone += self._l_positions(max_size)
+ zone += self._w_positions(max_size)
+
+ return zone
+
| Implement threatening zone for rooks | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index a6db443..e48b78b 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -163,6 +163,64 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(pawn.threatening_zone(8)), sorted(expected)
)
+ def test_w_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (2, 4),
+ (1, 4),
+ (0, 4)
+ ]
+
+ self.assertEqual(
+ sorted(piece._w_positions(8)), sorted(expected)
+ )
+
+ def test_l_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (4, 4),
+ (5, 4),
+ (6, 4),
+ (7, 4)
+ ]
+
+ self.assertEqual(
+ sorted(piece._l_positions(8)), sorted(expected)
+ )
+
+ def test_n_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (3, 5),
+ (3, 6),
+ (3, 7)
+ ]
+
+ self.assertEqual(
+ sorted(piece._n_positions(8)), sorted(expected)
+ )
+
+ def test_s_positions(self):
+
+ piece = chess.Pieces((3, 4))
+
+ expected = [
+ (3, 3),
+ (3, 2),
+ (3, 1),
+ (3, 0),
+ ]
+
+ self.assertEqual(
+ sorted(piece._s_positions(8)), sorted(expected)
+ )
+
def test_se_positions(self):
piece = chess.Pieces((3, 4))
@@ -223,8 +281,7 @@ class TestsChessMasterPiece(unittest.TestCase):
def test_bishop_threatening_zone(self):
"""
- Testing bishop moves when the piece is able to threatening other pieces
- in all directions.
+ Testing gather the bishop allowed moves
"""
bishop = chess.Bishop((3, 4))
@@ -249,6 +306,33 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(bishop.threatening_zone(8)), sorted(expected)
)
+ def test_rook_threatening_zone(self):
+ """
+ Testing gather the rook allowed moves
+ """
+
+ rook = chess.Rook((3, 4))
+
+ expected = [
+ (0, 4),
+ (1, 4),
+ (2, 4),
+ (4, 4),
+ (5, 4),
+ (6, 4),
+ (7, 4),
+ (3, 0),
+ (3, 1),
+ (3, 2),
+ (3, 3),
+ (3, 5),
+ (3, 6),
+ (3, 7)
+ ]
+
+ self.assertEqual(
+ sorted(rook.threatening_zone(8)), sorted(expected)
+ )
class TestsChessMasterBoard(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@9fe35b2b4029e1eeedeb69b941eba6cb955182a3#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
- pytest-cov==6.0.0
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_l_positions",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 697 | [
"masterchess/chess.py"
]
| [
"masterchess/chess.py"
]
|
|
fabiobatalha__chess_master-11 | 9843f1ab8accc5342d4f3712260fc4fe74640f09 | 2016-08-10 23:28:28 | 9843f1ab8accc5342d4f3712260fc4fe74640f09 | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 069d27e..f2bd2ad 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -280,13 +280,29 @@ class King(Pieces):
return self.NAME
- def threatening_zone(self):
+ def threatening_zone(self, max_size):
"""
Get the current position of the piece and produce a list of threathening
places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ x, y = self.position
+ zone.append((x-1, y+1))
+ zone.append((x, y+1))
+ zone.append((x+1, y+1))
+ zone.append((x-1, y))
+ zone.append((x+1, y))
+ zone.append((x-1, y-1))
+ zone.append((x, y-1))
+ zone.append((x+1, y-1))
+
+ rg = range(max_size)
+ return [(x, y) for x, y in zone if x in rg and y in rg]
class Pawn(Pieces):
| Implement threatening zone for kings | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 2702e57..71bb655 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -375,6 +375,134 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(queen.threatening_zone(8)), sorted(expected)
)
+ def test_king_threatening_zone(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((3, 4))
+
+ expected = [
+ (2, 5),
+ (3, 5),
+ (4, 5),
+ (2, 4),
+ (4, 4),
+ (2, 3),
+ (3, 3),
+ (4, 3)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_bottom(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((3, 0))
+
+ expected = [
+ (2, 1),
+ (3, 1),
+ (4, 1),
+ (2, 0),
+ (4, 0)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_top(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((3, 7))
+
+ expected = [
+ (2, 7),
+ (4, 7),
+ (2, 6),
+ (3, 6),
+ (4, 6)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_top_left(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((0, 7))
+
+ expected = [
+ (1, 7),
+ (0, 6),
+ (1, 6)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_top_right(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((7, 7))
+
+ expected = [
+ (6, 7),
+ (6, 6),
+ (7, 6)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_bottom_left(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((0, 0))
+
+ expected = [
+ (0, 1),
+ (1, 1),
+ (1, 0)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_king_threatening_zone_boundary_bottom_right(self):
+ """
+ Testing gather the king allowed moves
+ """
+
+ king = chess.King((7, 0))
+
+ expected = [
+ (6, 0),
+ (6, 1),
+ (7, 1)
+ ]
+
+ self.assertEqual(
+ sorted(king.threatening_zone(8)), sorted(expected)
+ )
+
class TestsChessMasterBoard(unittest.TestCase):
def test_put_1_piece(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@9843f1ab8accc5342d4f3712260fc4fe74640f09#egg=chessmaster
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_right",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_right"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_e_positions",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_queen_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 698 | [
"masterchess/chess.py"
]
| [
"masterchess/chess.py"
]
|
|
fabiobatalha__chess_master-12 | e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb | 2016-08-11 00:33:22 | e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb | diff --git a/masterchess/chess.py b/masterchess/chess.py
index f2bd2ad..146c8d9 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -263,13 +263,31 @@ class Kinight(Pieces):
return self.NAME
- def threatening_zone():
+ def threatening_zone(self, max_size):
"""
Get the current position of the piece and produce a list of threathening
places in the board.
+
+ Arguments:
+ max_size -- integer that defines de boundary limits of the board.
"""
- pass
+ zone = []
+
+ x, y = self.position
+
+ zone.append((x-1, y+2))
+ zone.append((x+1, y+2))
+ zone.append((x-2, y+1))
+ zone.append((x+2, y+1))
+ zone.append((x-2, y-1))
+ zone.append((x+2, y-1))
+ zone.append((x-1, y-2))
+ zone.append((x+1, y-2))
+
+ rg = range(max_size)
+
+ return [(x, y) for x, y in zone if x in rg and y in rg]
class King(Pieces):
@@ -292,6 +310,7 @@ class King(Pieces):
zone = []
x, y = self.position
+
zone.append((x-1, y+1))
zone.append((x, y+1))
zone.append((x+1, y+1))
| Implement threatening zone for kinights | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 71bb655..2837ed0 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -375,6 +375,692 @@ class TestsChessMasterPiece(unittest.TestCase):
sorted(queen.threatening_zone(8)), sorted(expected)
)
+ def test_kinight_threatening_zone(self):
+ """
+ Testing gather the kinight allowed moves
+ """
+
+ kinight = chess.Kinight((3, 4))
+
+ expected = [
+ (2, 6),
+ (4, 6),
+ (1, 5),
+ (5, 5),
+ (1, 3),
+ (5, 3),
+ (2, 2),
+ (4, 2)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 0))
+
+ expected = [
+ (1, 2),
+ (2, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 0))
+
+ expected = [
+ (0, 2),
+ (2, 2),
+ (3, 1),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 0))
+
+ expected = [
+ (1, 2),
+ (3, 2),
+ (0, 1),
+ (4, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 0))
+
+ expected = [
+ (6, 2),
+ (5, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 0))
+
+ expected = [
+ (5, 2),
+ (7, 2),
+ (4, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_0(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 0))
+
+ expected = [
+ (4, 2),
+ (6, 2),
+ (3, 1),
+ (7, 1)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 1))
+
+ expected = [
+ (1, 3),
+ (2, 2),
+ (2, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 1))
+
+ expected = [
+ (0, 3),
+ (2, 3),
+ (3, 2),
+ (3, 0),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 1))
+
+ expected = [
+ (1, 3),
+ (3, 3),
+ (0, 2),
+ (4, 2),
+ (0, 0),
+ (4, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 1))
+
+ expected = [
+ (6, 3),
+ (5, 2),
+ (5, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 1))
+
+ expected = [
+ (5, 3),
+ (7, 3),
+ (4, 2),
+ (4, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_1(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 1))
+
+ expected = [
+ (4, 3),
+ (6, 3),
+ (3, 2),
+ (7, 2),
+ (3, 0),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 2))
+
+ expected = [
+ (1, 4),
+ (2, 3),
+ (2, 1),
+ (1, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 2))
+
+ expected = [
+ (0, 4),
+ (2, 4),
+ (3, 3),
+ (3, 1),
+ (2, 0),
+ (0, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 2))
+
+ expected = [
+ (1, 4),
+ (3, 4),
+ (0, 3),
+ (4, 3),
+ (0, 1),
+ (4, 1),
+ (1, 0),
+ (3, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 2))
+
+ expected = [
+ (6, 4),
+ (5, 3),
+ (5, 1),
+ (6, 0),
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 2))
+
+ expected = [
+ (5, 4),
+ (7, 4),
+ (4, 3),
+ (4, 1),
+ (5, 0),
+ (7, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_2(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 2))
+
+ expected = [
+ (4, 4),
+ (6, 4),
+ (3, 3),
+ (7, 3),
+ (3, 1),
+ (7, 1),
+ (4, 0),
+ (6, 0)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 7))
+
+ expected = [
+ (2, 6),
+ (1, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 7))
+
+ expected = [
+ (3, 6),
+ (0, 5),
+ (2, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 7))
+
+ expected = [
+ (0, 6),
+ (4, 6),
+ (1, 5),
+ (3, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 7))
+
+ expected = [
+ (5, 6),
+ (6, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 7))
+
+ expected = [
+ (4, 6),
+ (5, 5),
+ (7, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_7(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 7))
+
+ expected = [
+ (3, 6),
+ (7, 6),
+ (4, 5),
+ (6, 5)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 6))
+
+ expected = [
+ (2, 7),
+ (2, 5),
+ (1, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 6))
+
+ expected = [
+ (3, 7),
+ (3, 5),
+ (0, 4),
+ (2, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 6))
+
+ expected = [
+ (0, 7),
+ (4, 7),
+ (0, 5),
+ (4, 5),
+ (1, 4),
+ (3, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 6))
+
+ expected = [
+ (5, 7),
+ (5, 5),
+ (6, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 6))
+
+ expected = [
+ (4, 7),
+ (4, 5),
+ (5, 4),
+ (7, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_6(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 6))
+
+ expected = [
+ (3, 7),
+ (7, 7),
+ (3, 5),
+ (7, 5),
+ (4, 4),
+ (6, 4)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_0_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((0, 5))
+
+ expected = [
+ (1, 7),
+ (2, 6),
+ (2, 4),
+ (1, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_1_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((1, 5))
+
+ expected = [
+ (0, 7),
+ (2, 7),
+ (3, 6),
+ (3, 4),
+ (0, 3),
+ (2, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_2_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((2, 5))
+
+ expected = [
+ (1, 7),
+ (3, 7),
+ (0, 6),
+ (4, 6),
+ (0, 4),
+ (4, 4),
+ (1, 3),
+ (3, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_7_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((7, 5))
+
+ expected = [
+ (6, 7),
+ (5, 6),
+ (5, 4),
+ (6, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_6_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((6, 5))
+
+ expected = [
+ (5, 7),
+ (7, 7),
+ (4, 6),
+ (4, 4),
+ (5, 3),
+ (7, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
+ def test_kinight_threatening_zone_boundary_5_5(self):
+ """
+ Kinight Boundary Testing
+ """
+
+ kinight = chess.Kinight((5, 5))
+
+ expected = [
+ (4, 7),
+ (6, 7),
+ (3, 6),
+ (7, 6),
+ (3, 4),
+ (7, 4),
+ (4, 3),
+ (6, 3)
+ ]
+
+ self.assertEqual(
+ sorted(kinight.threatening_zone(8)), sorted(expected)
+ )
+
def test_king_threatening_zone(self):
"""
Testing gather the king allowed moves
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/fabiobatalha/chess_master.git@e1eefa4035d6d6963da2ea543fc4ecb6c919c2fb#egg=chessmaster
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_7"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_e_positions",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_right",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_right",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_queen_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 699 | [
"masterchess/chess.py"
]
| [
"masterchess/chess.py"
]
|
|
fabiobatalha__chess_master-14 | 361087d1f6951733624962438d7db5158df503dc | 2016-08-11 02:16:29 | 361087d1f6951733624962438d7db5158df503dc | diff --git a/masterchess/chess.py b/masterchess/chess.py
index 146c8d9..6711231 100644
--- a/masterchess/chess.py
+++ b/masterchess/chess.py
@@ -10,12 +10,23 @@ class OccupiedSquare(BoardExceptions):
def __init__(self, value):
self.value = value
+class Threatening(BoardExceptions):
+
+ def __init__(self, value):
+ self.value = value
+
+class Threatened(BoardExceptions):
+
+ def __init__(self, value):
+ self.value = value
+
class Board(object):
def __init__(self, size):
self.board = None
self.pieces = []
+ self._size = size
self._setup_board(size)
def _setup_board(self, size):
@@ -27,10 +38,11 @@ class Board(object):
size -- integer
"""
- if not isinstance(size, int):
+ if not isinstance(self._size, int):
raise ValueError('board size must be integer')
- self.board = [[None for i in range(size)] for i in range(size)]
+ rg = range(self._size)
+ self.board = [[None for i in rg] for i in rg]
def _update_board(self):
"""
@@ -50,25 +62,58 @@ class Board(object):
"""
return [[(str(x) if x else x) for x in i] for i in self.board]
+ @property
+ def picture_threat(self):
+ """
+ Return a 2 dimension list with a picture of the current state of the
+ board and pieces position.
+
+ It will display T on places where pieces will be threatened and the
+ piece name where the pieces are allocated.
+ """
+
+ board = self.picture
+
+ for piece in self.pieces:
+ for threat in piece.threatening_zone(self._size):
+ x, y = threat
+ board[x][y] = 'T' if board[x][y] is None else board[x][y]
+
+ return board
+
def place_piece(self, piece):
"""
Put a given piece on the board.
- The piece must not threatening any other piece already available in
- the board. If so, it will raise Threatened Exception.
-
- The piece position must not match with the position of any other piece
+ (Rule 1) The piece position must not match with the position of any other piece
already available in the board. If so, it will raise OccupiedSquare
Exception.
+ (Rule 2) The piece position must not be threatened by other pieces already safe
+ disposed in the board. If so, it will raise Threatened Exception.
+ Exception.
+
+ (Rule 3) The piece must not threatening any other piece already available in
+ the board. If so, it will raise Threatening Exception.
+
Arguments:
piece -- a instance o Pieces (Pawn, King, Bishop, Queen, Rook, Kinight)
"""
x, y = piece.position
+ # Rule (1)
if self.picture[x][y] is not None:
raise OccupiedSquare(str(piece.position))
+ # Rule (2)
+ if self.picture_threat[x][y] is not None and self.picture_threat[x][y] == 'T':
+ raise Threatened(str(piece.position))
+
+ # Rule (3)
+ pieces_on_board = [i.position for i in self.pieces]
+ if len(set(piece.threatening_zone(self._size)).intersection(pieces_on_board)) >= 1:
+ raise Threatening(str(piece.position))
+
self.pieces.append(piece)
self._update_board()
@@ -406,4 +451,3 @@ class Rook(Pieces):
zone += self._w_positions(max_size)
return zone
-
| Implement the threatening and threatened validations
improve the Board.place_piece method to:
Put a given piece on the board.
(Rule 1) The piece position must not match with the position of any other piece
already available in the board. If so, it will raise OccupiedSquare
Exception.
(Rule 2) The piece position must not be threatened by other pieces already safe
disposed in the board. If so, it will raise Threatened Exception.
Exception.
(Rule 3) The piece must not threatening any other piece already available in
the board. If so, it will raise Threatening Exception. | fabiobatalha/chess_master | diff --git a/tests/tests.py b/tests/tests.py
index 2837ed0..4ebc049 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -1225,6 +1225,101 @@ class TestsChessMasterBoard(unittest.TestCase):
self.assertEqual(board.picture, expected)
+ def test_picture_threat_1(self):
+
+ board = chess.Board(8)
+ king = chess.King((1, 2))
+ board.place_piece(king)
+
+ expected =[
+ [None, 'T', 'T', 'T', None, None, None, None],
+ [None, 'T', 'king', 'T', None, None, None, None],
+ [None, 'T', 'T', 'T', None, None, None, None],
+ [None, None, None, None, None, None, None, None],
+ [None, None, None, None, None, None, None, None],
+ [None, None, None, None, None, None, None, None],
+ [None, None, None, None, None, None, None, None],
+ [None, None, None, None, None, None, None, None]
+ ]
+ self.assertEqual(board.picture_threat, expected)
+
+ def test_picture_threat_2(self):
+
+ board = chess.Board(4)
+ king = chess.King((0, 3))
+ board.place_piece(king)
+
+ expected = [
+ [None, None, 'T', 'king'],
+ [None, None, 'T', 'T'],
+ [None, None, None, None],
+ [None, None, None, None]
+ ]
+ self.assertEqual(board.picture_threat, expected)
+
+ def test_picture_threat_3(self):
+
+ board = chess.Board(8)
+ queen = chess.Queen((4, 3))
+ board.place_piece(queen)
+
+ expected = [
+ [None, None, None, 'T', None, None, None, 'T'],
+ ['T', None, None, 'T', None, None, 'T', None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ ['T', 'T', 'T', 'queen', 'T', 'T', 'T', 'T'],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ ['T', None, None, 'T', None, None, 'T', None]
+ ]
+
+ self.assertEqual(board.picture_threat, expected)
+
+ def test_picture_threat_4_with_2_pieces(self):
+
+ board = chess.Board(8)
+ queen = chess.Queen((4, 3))
+ king = chess.King((1, 1))
+ board.place_piece(queen)
+ board.place_piece(king)
+
+ expected = [
+ ['T', 'T', 'T', 'T', None, None, None, 'T'],
+ ['T', 'king', 'T', 'T', None, None, 'T', None],
+ ['T', 'T', 'T', 'T', None, 'T', None, None],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ ['T', 'T', 'T', 'queen', 'T', 'T', 'T', 'T'],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ ['T', None, None, 'T', None, None, 'T', None]
+ ]
+
+ self.assertEqual(board.picture_threat, expected)
+
+ def test_picture_threat_4_with_3_pieces(self):
+
+ board = chess.Board(8)
+ queen = chess.Queen((4, 3))
+ king = chess.King((1, 1))
+ kinight = chess.Kinight((7, 7))
+ board.place_piece(queen)
+ board.place_piece(king)
+ board.place_piece(kinight)
+
+ expected = [
+ ['T', 'T', 'T', 'T', None, None, None, 'T'],
+ ['T', 'king', 'T', 'T', None, None, 'T', None],
+ ['T', 'T', 'T', 'T', None, 'T', None, None],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ ['T', 'T', 'T', 'queen', 'T', 'T', 'T', 'T'],
+ [None, None, 'T', 'T', 'T', None, 'T', None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ ['T', None, None, 'T', None, None, 'T', 'kinight']
+ ]
+
+ self.assertEqual(board.picture_threat, expected)
+
def test_put_piece_in_occupied_square(self):
pawn1 = chess.Pawn((1, 2))
pawn2 = chess.Pawn((1, 2))
@@ -1236,6 +1331,56 @@ class TestsChessMasterBoard(unittest.TestCase):
with self.assertRaises(chess.OccupiedSquare):
board.place_piece(pawn2)
+ def test_put_piece_in_threatened_square(self):
+ """
+ Test ThretenedException while trying to put a rook in an place
+ threatened by a queen.
+ [
+ [None, None, None, 'T', None, None, None, 'T'],
+ ['T (trying to put rook here)', None, None, 'T', None, None, 'T', None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ ['T', 'T', 'T', 'queen', 'T', 'T', 'T', 'T'],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ ['T', None, None, 'T', None, None, 'T', None]
+ ]
+
+ """
+
+ board = chess.Board(8)
+ queen = chess.Queen((4, 3))
+ board.place_piece(queen)
+
+ with self.assertRaises(chess.Threatened):
+ rook = chess.Rook((1, 0))
+ board.place_piece(rook)
+
+ def test_put_piece_in_threatening_square(self):
+ """
+ Test ThreteningException while trying to put a kinight in an place that
+ will threatening queen.
+ [
+ [None, None, None, 'T', None, None, None, 'T'],
+ ['T', None, None, 'T', None, None, 'T', None],
+ [None, 'T', None (trying to put kinight here), 'T', None, 'T', None, None],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ ['T', 'T', 'T', 'queen', 'T', 'T', 'T', 'T'],
+ [None, None, 'T', 'T', 'T', None, None, None],
+ [None, 'T', None, 'T', None, 'T', None, None],
+ ['T', None, None, 'T', None, None, 'T', None]
+ ]
+
+ """
+
+ board = chess.Board(8)
+ queen = chess.Queen((4, 3))
+ board.place_piece(queen)
+
+ with self.assertRaises(chess.Threatening):
+ kinight = chess.Kinight((2, 2))
+ board.place_piece(kinight)
+
def test_instanciating_board_wrong_size(self):
with self.assertRaises(ValueError):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
-e git+https://github.com/fabiobatalha/chess_master.git@361087d1f6951733624962438d7db5158df503dc#egg=chessmaster
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: chess_master
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- nose==1.3.7
prefix: /opt/conda/envs/chess_master
| [
"tests/tests.py::TestsChessMasterBoard::test_picture_threat_1",
"tests/tests.py::TestsChessMasterBoard::test_picture_threat_2",
"tests/tests.py::TestsChessMasterBoard::test_picture_threat_3",
"tests/tests.py::TestsChessMasterBoard::test_picture_threat_4_with_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_picture_threat_4_with_3_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_threatened_square",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_threatening_square"
]
| []
| [
"tests/tests.py::TestsChessMasterPiece::test_bishop_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_e_positions",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_piece",
"tests/tests.py::TestsChessMasterPiece::test_instanciation_preset_position",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_bottom_right",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_left",
"tests/tests.py::TestsChessMasterPiece::test_king_threatening_zone_boundary_top_right",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_0_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_1_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_2_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_5_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_6_7",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_0",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_1",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_2",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_5",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_6",
"tests/tests.py::TestsChessMasterPiece::test_kinight_threatening_zone_boundary_7_7",
"tests/tests.py::TestsChessMasterPiece::test_n_positions",
"tests/tests.py::TestsChessMasterPiece::test_ne_positions",
"tests/tests.py::TestsChessMasterPiece::test_nw_positions",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_left",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_x_boundary_right",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary",
"tests/tests.py::TestsChessMasterPiece::test_pawn_threatening_zone_y_boundary_last_move",
"tests/tests.py::TestsChessMasterPiece::test_queen_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_rook_threatening_zone",
"tests/tests.py::TestsChessMasterPiece::test_s_positions",
"tests/tests.py::TestsChessMasterPiece::test_se_positions",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_1",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_2",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_3",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_4",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_5",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_6",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_7",
"tests/tests.py::TestsChessMasterPiece::test_set_invalid_position_8",
"tests/tests.py::TestsChessMasterPiece::test_set_position",
"tests/tests.py::TestsChessMasterPiece::test_sw_positions",
"tests/tests.py::TestsChessMasterPiece::test_w_positions",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_3",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_size_7",
"tests/tests.py::TestsChessMasterBoard::test_instanciating_board_wrong_size",
"tests/tests.py::TestsChessMasterBoard::test_put_1_piece",
"tests/tests.py::TestsChessMasterBoard::test_put_2_pieces",
"tests/tests.py::TestsChessMasterBoard::test_put_piece_in_occupied_square"
]
| []
| MIT License | 700 | [
"masterchess/chess.py"
]
| [
"masterchess/chess.py"
]
|
|
mkdocs__mkdocs-1023 | 582523bcd362cc689d5a229c304055fcb3c65a69 | 2016-08-11 14:22:36 | e7d8879d2b53d9e50bdfcf1cf29c48dc3f6bc87f | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index fdb21e46..e52abf87 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -164,6 +164,8 @@ better conform with the documented [layout].
via the new [`edit_uri`](../user-guide/configuration.md#edit_uri) setting.
* Bugfix: Don't override config value for strict mode if not specified on CLI
(#738).
+* Add a `--force` flag to the `gh-deploy` command to force the push to the
+ repository (#973).
## Version 0.15.3 (2016-02-18)
diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py
index 016564b3..de26cde9 100644
--- a/mkdocs/__main__.py
+++ b/mkdocs/__main__.py
@@ -86,6 +86,7 @@ remote_branch_help = ("The remote branch to commit to for Github Pages. This "
"overrides the value specified in config")
remote_name_help = ("The remote name to commit to for Github Pages. This "
"overrides the value specified in config")
+force_help = "Force the push to the repository."
@click.group(context_settings={'help_option_names': ['-h', '--help']})
@@ -198,8 +199,9 @@ def json_command(clean, config_file, strict, site_dir):
@click.option('-m', '--message', help=commit_message_help)
@click.option('-b', '--remote-branch', help=remote_branch_help)
@click.option('-r', '--remote-name', help=remote_name_help)
[email protected]('--force', is_flag=True, help=force_help)
@common_options
-def gh_deploy_command(config_file, clean, message, remote_branch, remote_name):
+def gh_deploy_command(config_file, clean, message, remote_branch, remote_name, force):
"""Deploy your documentation to GitHub Pages"""
try:
cfg = config.load_config(
@@ -208,7 +210,7 @@ def gh_deploy_command(config_file, clean, message, remote_branch, remote_name):
remote_name=remote_name
)
build.build(cfg, dirty=not clean)
- gh_deploy.gh_deploy(cfg, message=message)
+ gh_deploy.gh_deploy(cfg, message=message, force=force)
except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
diff --git a/mkdocs/commands/gh_deploy.py b/mkdocs/commands/gh_deploy.py
index 9a240006..0f504c7a 100644
--- a/mkdocs/commands/gh_deploy.py
+++ b/mkdocs/commands/gh_deploy.py
@@ -49,7 +49,7 @@ def _get_remote_url(remote_name):
return host, path
-def gh_deploy(config, message=None):
+def gh_deploy(config, message=None, force=False):
if not _is_cwd_git_repo():
log.error('Cannot deploy - this directory does not appear to be a git '
@@ -66,7 +66,7 @@ def gh_deploy(config, message=None):
config['site_dir'], config['remote_branch'])
result, error = ghp_import.ghp_import(config['site_dir'], message, remote_name,
- remote_branch)
+ remote_branch, force)
if not result:
log.error("Failed to deploy to GitHub with error: \n%s", error)
raise SystemExit(1)
diff --git a/mkdocs/utils/ghp_import.py b/mkdocs/utils/ghp_import.py
index 5339b216..62ef8c5c 100644
--- a/mkdocs/utils/ghp_import.py
+++ b/mkdocs/utils/ghp_import.py
@@ -158,7 +158,7 @@ def run_import(srcdir, branch, message, nojekyll):
sys.stdout.write(enc("Failed to process commit.\n"))
-def ghp_import(directory, message, remote='origin', branch='gh-pages'):
+def ghp_import(directory, message, remote='origin', branch='gh-pages', force=False):
if not try_rebase(remote, branch):
log.error("Failed to rebase %s branch.", branch)
@@ -167,8 +167,12 @@ def ghp_import(directory, message, remote='origin', branch='gh-pages'):
run_import(directory, branch, message, nojekyll)
- proc = sp.Popen(['git', 'push', remote, branch],
- stdout=sp.PIPE, stderr=sp.PIPE)
+ cmd = ['git', 'push', remote, branch]
+
+ if force:
+ cmd.insert(2, '--force')
+
+ proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = proc.communicate()
result = proc.wait() == 0
| Sometimes deploy to gh-pages fails
Encountered while fixing #966. Originally added `--force` in #967, but it makes more sense to do some more research on the root cause.
Possibly:
- GH token permissions
- Same GH token used on multiple machines(?)
- Some git call is failing silently
Will investigate repro steps and update here. | mkdocs/mkdocs | diff --git a/mkdocs/tests/cli_tests.py b/mkdocs/tests/cli_tests.py
index 0014dc2a..56b01e20 100644
--- a/mkdocs/tests/cli_tests.py
+++ b/mkdocs/tests/cli_tests.py
@@ -349,6 +349,8 @@ class CLITests(unittest.TestCase):
g_args, g_kwargs = mock_gh_deploy.call_args
self.assertTrue('message' in g_kwargs)
self.assertEqual(g_kwargs['message'], None)
+ self.assertTrue('force' in g_kwargs)
+ self.assertEqual(g_kwargs['force'], False)
self.assertEqual(mock_build.call_count, 1)
b_args, b_kwargs = mock_build.call_args
self.assertTrue('dirty' in b_kwargs)
@@ -456,3 +458,19 @@ class CLITests(unittest.TestCase):
remote_branch=None,
remote_name='foo'
)
+
+ @mock.patch('mkdocs.config.load_config', autospec=True)
+ @mock.patch('mkdocs.commands.build.build', autospec=True)
+ @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True)
+ def test_gh_deploy_force(self, mock_gh_deploy, mock_build, mock_load_config):
+
+ result = self.runner.invoke(
+ cli.cli, ['gh-deploy', '--force'], catch_exceptions=False)
+
+ self.assertEqual(result.exit_code, 0)
+ self.assertEqual(mock_gh_deploy.call_count, 1)
+ g_args, g_kwargs = mock_gh_deploy.call_args
+ self.assertTrue('force' in g_kwargs)
+ self.assertEqual(g_kwargs['force'], True)
+ self.assertEqual(mock_build.call_count, 1)
+ self.assertEqual(mock_load_config.call_count, 1)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 4
} | 0.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"nose",
"nose-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
cov-core==1.15.0
coverage==7.8.0
exceptiongroup==1.2.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@582523bcd362cc689d5a229c304055fcb3c65a69#egg=mkdocs
mock==5.2.0
nose==1.3.7
nose-cov==1.6
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- cov-core==1.15.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- mock==5.2.0
- nose==1.3.7
- nose-cov==1.6
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_defaults",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_force"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_clean",
"mkdocs/tests/cli_tests.py::CLITests::test_build_dirty",
"mkdocs/tests/cli_tests.py::CLITests::test_build_quiet",
"mkdocs/tests/cli_tests.py::CLITests::test_build_verbose",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_clean",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_dirty",
"mkdocs/tests/cli_tests.py::CLITests::test_json"
]
| [
"mkdocs/tests/cli_tests.py::CLITests::test_build_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_build_defaults",
"mkdocs/tests/cli_tests.py::CLITests::test_build_site_dir",
"mkdocs/tests/cli_tests.py::CLITests::test_build_strict",
"mkdocs/tests/cli_tests.py::CLITests::test_build_theme",
"mkdocs/tests/cli_tests.py::CLITests::test_build_theme_dir",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_message",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_remote_branch",
"mkdocs/tests/cli_tests.py::CLITests::test_gh_deploy_remote_name",
"mkdocs/tests/cli_tests.py::CLITests::test_new",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_config_file",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_default",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_dev_addr",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_dirtyreload",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_livereload",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_no_livereload",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_strict",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_theme",
"mkdocs/tests/cli_tests.py::CLITests::test_serve_theme_dir"
]
| []
| BSD 2-Clause "Simplified" License | 701 | [
"mkdocs/commands/gh_deploy.py",
"docs/about/release-notes.md",
"mkdocs/__main__.py",
"mkdocs/utils/ghp_import.py"
]
| [
"mkdocs/commands/gh_deploy.py",
"docs/about/release-notes.md",
"mkdocs/__main__.py",
"mkdocs/utils/ghp_import.py"
]
|
|
XD-embedded__xd-docker-47 | e402ad039360902a341fc7b77f369a885f7cbce8 | 2016-08-14 10:32:25 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/CHANGELOG b/CHANGELOG
index 8b6bf1b..7fdaf1f 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,7 @@
-0.1.0 (in development)
+0.2.0 (in development)
----------------------
-Initial release
+* Add container_remove() method.
+
+0.1.0 (2016-08-13)
+------------------
+First development release
diff --git a/xd/docker/client.py b/xd/docker/client.py
index c5ad43d..4e16fa9 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -428,3 +428,33 @@ class DockerClient(object):
headers=headers, data=json.dumps(json_params))
response_json = response.json()
return Container(self, id=response_json['Id'])
+
+ def container_remove(self, container: Union[Container, ContainerName, str],
+ force: Optional[bool]=None,
+ volumes: Optional[bool]=None):
+ """Remove a container.
+
+ Remove a container and (optionally) the associated volumes.
+
+ Arguments:
+ container: The container to remove (id or name).
+ force: Kill then remove the container.
+ volumes: Remove the volumes associated to the container.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ query_params = {}
+ if force is not None:
+ query_params['force'] = force
+ if volumes is not None:
+ query_params['v'] = volumes
+
+ self._delete('/containers/' + id_or_name, params=query_params)
+ return
| client.container_remove()
Client API command to remove a container. | XD-embedded/xd-docker | diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index c09777c..b23c92d 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -13,7 +13,7 @@ DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
- os.system("for c in `docker ps -a -q`;do docker rm $c;done")
+ os.system("for c in `docker ps -a -q`;do docker rm -f -v $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
diff --git a/tests/integration/container_remove_test.py b/tests/integration/container_remove_test.py
new file mode 100644
index 0000000..948a92f
--- /dev/null
+++ b/tests/integration/container_remove_test.py
@@ -0,0 +1,38 @@
+import pytest
+import os
+import re
+
+from xd.docker.client import *
+
+
+def test_ok(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest")
+ docker.container_remove('xd-docker-test')
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_remove('xd-docker-test')
+ assert clienterror.value.code == 404
+
+
+def test_container_running(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 600")
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_remove('xd-docker-test')
+ assert clienterror.value.code == 409
+
+
+def test_container_running_force_false(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 600")
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_remove('xd-docker-test', force=False)
+ assert clienterror.value.code == 409
+
+
+def test_container_running_force(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 600")
+ docker.container_remove('xd-docker-test', force=True)
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 19a8acf..55c1443 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1,5 +1,6 @@
import unittest
import mock
+import pytest
import io
import contextlib
import tempfile
@@ -292,7 +293,7 @@ class containers_tests(SimpleClientTestCase):
},
"Mounts": []
}]
-
+
@mock.patch('requests.get')
def test_containers_1(self, get_mock):
get_mock.return_value = requests_mock.Response(json.dumps(
@@ -1100,3 +1101,76 @@ class container_create_tests(ContextClientTestCase):
assert post_mock.call_count == 1
name, args, kwargs = post_mock.mock_calls[0]
assert args[0].endswith('/containers/create')
+
+
+class container_remove_tests(ContextClientTestCase):
+
+ @mock.patch('requests.delete')
+ def test_ok(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove('foobar')
+ params = delete_mock.call_args[1]['params']
+ assert 'force' not in params
+ assert 'v' not in params
+ assert delete_mock.call_args[0][0].endswith('/containers/foobar')
+
+ @mock.patch('requests.delete')
+ def test_no_such_container(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response(
+ "No such container", 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_remove('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.delete')
+ def test_containername(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove(ContainerName('foobar'))
+ assert delete_mock.call_args[0][0].endswith('/containers/foobar')
+
+ @mock.patch('requests.delete')
+ def test_container_with_name(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove(Container(self.client, name='foobar'))
+ assert delete_mock.call_args[0][0].endswith('/containers/foobar')
+
+ @mock.patch('requests.delete')
+ def test_container_with_id(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove(Container(self.client, id='dfeb03b02b41'))
+ assert delete_mock.call_args[0][0].endswith('/containers/dfeb03b02b41')
+
+ @mock.patch('requests.delete')
+ def test_container_with_id_and_name(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove(Container(self.client,
+ id='dfeb03b02b41', name='foo'))
+ assert delete_mock.call_args[0][0].endswith('/containers/dfeb03b02b41')
+
+ @mock.patch('requests.delete')
+ def test_force_true(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove('foobar', force=True)
+ params = delete_mock.call_args[1]['params']
+ assert 'force' in params and params['force']
+
+ @mock.patch('requests.delete')
+ def test_force_false(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove('foobar', force=False)
+ params = delete_mock.call_args[1]['params']
+ assert 'force' in params and not params['force']
+
+ @mock.patch('requests.delete')
+ def test_volumes_true(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove('foobar', volumes=True)
+ params = delete_mock.call_args[1]['params']
+ assert 'v' in params and params['v']
+
+ @mock.patch('requests.delete')
+ def test_volumes_false(self, delete_mock):
+ delete_mock.return_value = requests_mock.Response("OK", 200)
+ self.client.container_remove('foobar', volumes=False)
+ params = delete_mock.call_args[1]['params']
+ assert 'v' in params and not params['v']
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@e402ad039360902a341fc7b77f369a885f7cbce8#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true"
]
| []
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory"
]
| []
| MIT License | 702 | [
"CHANGELOG",
"xd/docker/client.py"
]
| [
"CHANGELOG",
"xd/docker/client.py"
]
|
|
XD-embedded__xd-docker-48 | ae237d02e27ae5cecdf3f766574865e7081abb02 | 2016-08-14 11:18:29 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/CHANGELOG b/CHANGELOG
index 7fdaf1f..6051fc5 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,6 +1,7 @@
0.2.0 (in development)
----------------------
* Add container_remove() method.
+* Add container_start() method.
0.1.0 (2016-08-13)
------------------
diff --git a/xd/docker/client.py b/xd/docker/client.py
index 4e16fa9..049e530 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -458,3 +458,30 @@ class DockerClient(object):
self._delete('/containers/' + id_or_name, params=query_params)
return
+
+ def container_start(self, container: Union[Container, ContainerName, str]):
+ """Start a container.
+
+ Arguments:
+ container: The container to start (id or name).
+
+ Returns:
+ True if container was started.
+ False if container was already started.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ try:
+ self._post('/containers/{}/start'.format(id_or_name))
+ except HTTPError as e:
+ if e.code == 304:
+ return False
+ raise e
+ return True
| client.container_start()
Client API command to start a container. | XD-embedded/xd-docker | diff --git a/tests/integration/container_start_test.py b/tests/integration/container_start_test.py
new file mode 100644
index 0000000..7321187
--- /dev/null
+++ b/tests/integration/container_start_test.py
@@ -0,0 +1,22 @@
+import pytest
+import os
+import re
+
+from xd.docker.client import *
+
+
+def test_ok(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest echo test")
+ assert docker.container_start('xd-docker-test') == True
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_start('xd-docker-test')
+ assert clienterror.value.code == 404
+
+
+def test_container_running(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 600")
+ assert docker.container_start('xd-docker-test') == False
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 55c1443..59bddf6 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1174,3 +1174,57 @@ class container_remove_tests(ContextClientTestCase):
self.client.container_remove('foobar', volumes=False)
params = delete_mock.call_args[1]['params']
assert 'v' in params and not params['v']
+
+
+class container_start_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_str(self, post_mock):
+ post_mock.return_value = requests_mock.Response("OK", 204)
+ assert self.client.container_start('foobar')
+ assert not post_mock.call_args[1]['params']
+ assert post_mock.call_args[0][0].endswith('/containers/foobar/start')
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response("OK", 204)
+ assert self.client.container_start(ContainerName('foobar'))
+ assert not post_mock.call_args[1]['params']
+ assert post_mock.call_args[0][0].endswith('/containers/foobar/start')
+
+ @mock.patch('requests.post')
+ def test_container_with_name(self, post_mock):
+ post_mock.return_value = requests_mock.Response("OK", 204)
+ assert self.client.container_start(
+ Container(self.client, name='foobar'))
+ assert post_mock.call_args[0][0].endswith('/containers/foobar/start')
+
+ @mock.patch('requests.post')
+ def test_container_with_id(self, post_mock):
+ post_mock.return_value = requests_mock.Response("OK", 204)
+ assert self.client.container_start(
+ Container(self.client, id='dfeb03b02b41'))
+ assert post_mock.call_args[0][0].endswith(
+ '/containers/dfeb03b02b41/start')
+
+ @mock.patch('requests.post')
+ def test_container_with_id_and_name(self, post_mock):
+ post_mock.return_value = requests_mock.Response("OK", 204)
+ assert self.client.container_start(
+ Container(self.client, id='dfeb03b02b41', name='foobar'))
+ assert post_mock.call_args[0][0].endswith(
+ '/containers/dfeb03b02b41/start')
+
+ @mock.patch('requests.post')
+ def test_already_running(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ "Container already started", 304)
+ assert not self.client.container_start('foobar')
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(
+ "No such container", 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_start('foobar')
+ assert clienterror.value.code == 404
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@ae237d02e27ae5cecdf3f766574865e7081abb02#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str"
]
| []
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true"
]
| []
| MIT License | 703 | [
"CHANGELOG",
"xd/docker/client.py"
]
| [
"CHANGELOG",
"xd/docker/client.py"
]
|
|
falconry__falcon-868 | fd5a0ba5874c68e2a61470180455c8e05c3d3d66 | 2016-08-15 21:11:54 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/868?src=pr) is 100% (diff: 100%)
> Merging [#868](https://codecov.io/gh/falconry/falcon/pull/868?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #868 diff @@
====================================
Files 30 30
Lines 1864 1864
Methods 0 0
Messages 0 0
Branches 305 305
====================================
Hits 1864 1864
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [fd5a0ba...dbf09ee](https://codecov.io/gh/falconry/falcon/compare/fd5a0ba5874c68e2a61470180455c8e05c3d3d66...dbf09ee6361a2abaa9ceaae5e4fedc77fefad291?src=pr) | diff --git a/falcon/request.py b/falcon/request.py
index 30a2bd3..dd49159 100644
--- a/falcon/request.py
+++ b/falcon/request.py
@@ -243,6 +243,7 @@ class Request(object):
'options',
'_cookies',
'_cached_access_route',
+ '__dict__',
)
# Child classes may override this
diff --git a/falcon/response.py b/falcon/response.py
index 44d3d5d..fd25479 100644
--- a/falcon/response.py
+++ b/falcon/response.py
@@ -116,6 +116,7 @@ class Response(object):
'stream',
'stream_len',
'context',
+ '__dict__',
)
# Child classes may override this
| Make it easier to add custom attributes to slotted classes
Users of the framework sometimes want to assign additional variables to the `Request`, `Response`, or other "slotted" classes. This can be allowed without hurting efficiency by adding a `'__dict__'` string to the `__slots__` declarations.
From the Python [data model docs](https://docs.python.org/2/reference/datamodel.html#slots):
> Without a __dict__ variable, instances cannot be assigned new variables not listed in the __slots__ definition. Attempts to assign to an unlisted variable name raises AttributeError. If dynamic assignment of new variables is desired, then add '__dict__' to the sequence of strings in the __slots__ declaration.
I did a quick test and adding '__dict__' does not decrease performance for accessing the other slotted variables in 2.7 and 3.5.
Question: Does this enable an anti-pattern? Would we rather that apps use `req.context`? Would it be better to have a context type that supports attribute-based access? (e.g., `req.context.foo` == `req.context['foo']`)? | falconry/falcon | diff --git a/tests/test_slots.py b/tests/test_slots.py
new file mode 100644
index 0000000..665ea32
--- /dev/null
+++ b/tests/test_slots.py
@@ -0,0 +1,22 @@
+from falcon import Request, Response
+import falcon.testing as testing
+
+
+class TestSlots(testing.TestBase):
+
+ def test_slots_request(self):
+ env = testing.create_environ()
+ req = Request(env)
+
+ try:
+ req.doesnt = 'exist'
+ except AttributeError:
+ self.fail('Unable to add additional variables dynamically')
+
+ def test_slots_response(self):
+ resp = Response()
+
+ try:
+ resp.doesnt = 'exist'
+ except AttributeError:
+ self.fail('Unable to add additional variables dynamically')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"ddt",
"testtools",
"requests",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@fd5a0ba5874c68e2a61470180455c8e05c3d3d66#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_slots.py::TestSlots::test_slots_request",
"tests/test_slots.py::TestSlots::test_slots_response"
]
| []
| []
| []
| Apache License 2.0 | 704 | [
"falcon/request.py",
"falcon/response.py"
]
| [
"falcon/request.py",
"falcon/response.py"
]
|
falconry__falcon-869 | e0f059378b113dcc40e7de7fb16b4f4a104a74f1 | 2016-08-16 01:20:09 | 67d61029847cbf59e4053c8a424df4f9f87ad36f | codecov-io: ## [Current coverage](https://codecov.io/gh/falconry/falcon/pull/869?src=pr) is 100% (diff: 100%)
> Merging [#869](https://codecov.io/gh/falconry/falcon/pull/869?src=pr) into [master](https://codecov.io/gh/falconry/falcon/branch/master?src=pr) will not change coverage
```diff
@@ master #869 diff @@
====================================
Files 30 30
Lines 1864 1874 +10
Methods 0 0
Messages 0 0
Branches 305 309 +4
====================================
+ Hits 1864 1874 +10
Misses 0 0
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [9499207...901718d](https://codecov.io/gh/falconry/falcon/compare/94992071b1b9c6f828bb827b8be092146a514200...901718d0d5f4618a704f341a9bb9b923187c4b3a?src=pr)
kgriffs: [rebased]
kgriffs: [rebased]
fxfitz: LGTM
jmvrbanac: :+1: | diff --git a/falcon/util/uri.py b/falcon/util/uri.py
index fc1acd9..47726da 100644
--- a/falcon/util/uri.py
+++ b/falcon/util/uri.py
@@ -60,6 +60,7 @@ def _create_char_encoder(allowed_chars):
def _create_str_encoder(is_value):
allowed_chars = _UNRESERVED if is_value else _ALL_ALLOWED
+ allowed_chars_plus_percent = allowed_chars + '%'
encode_char = _create_char_encoder(allowed_chars)
def encoder(uri):
@@ -67,10 +68,32 @@ def _create_str_encoder(is_value):
if not uri.rstrip(allowed_chars):
return uri
+ if not uri.rstrip(allowed_chars_plus_percent):
+ # NOTE(kgriffs): There's a good chance the string has already
+ # been escaped. Do one more check to increase our certainty.
+ tokens = uri.split('%')
+ for token in tokens[1:]:
+ hex_octet = token[:2]
+
+ if not len(hex_octet) == 2:
+ break
+
+ if not (hex_octet[0] in _HEX_DIGITS and
+ hex_octet[1] in _HEX_DIGITS):
+ break
+ else:
+ # NOTE(kgriffs): All percent-encoded sequences were
+ # valid, so assume that the string has already been
+ # encoded.
+ return uri
+
+ # NOTE(kgriffs): At this point we know there is at least
+ # one unallowed percent character. We are going to assume
+ # that everything should be encoded. If the string is
+ # partially encoded, the caller will need to normalize it
+ # before passing it in here.
+
# Convert to a byte array if it is not one already
- #
- # NOTE(kgriffs): Code coverage disabled since in Py3K the uri
- # is always a text type, so we get a failure for that tox env.
if isinstance(uri, six.text_type):
uri = uri.encode('utf-8')
| resp.location double-encodes urlencoded strings
I've got a URL which contains a url-encoded URL as a parameter, and when setting resp.location, that results in it being double-encoded:
resp.location = "http://something?redirect_uri=http%3A%2F%2Fsite"
This should result in:
Location: http://something?redirect_uri=http%3A%2F%2Fsite...
But what actually happens is:
Location: http://something?redirect_uri=http%253A%252F%252Fsite
I worked around by raising a HTTPStatus() object, but this doesn't seem like the ideal situation. | falconry/falcon | diff --git a/tests/test_utils.py b/tests/test_utils.py
index 6b5f75d..de32f0a 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -177,6 +177,35 @@ class TestFalconUtils(testtools.TestCase):
'?limit=3&e%C3%A7ho=true')
self.assertEqual(uri.encode(url), expected)
+ def test_uri_encode_double(self):
+ url = 'http://example.com/v1/fiz bit/messages'
+ expected = 'http://example.com/v1/fiz%20bit/messages'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = u'http://example.com/v1/fizbit/messages?limit=3&e\u00e7ho=true'
+ expected = ('http://example.com/v1/fizbit/messages'
+ '?limit=3&e%C3%A7ho=true')
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = 'http://example.com/v1/fiz%bit/mess%ages/%'
+ expected = 'http://example.com/v1/fiz%25bit/mess%25ages/%25'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ url = 'http://example.com/%%'
+ expected = 'http://example.com/%25%25'
+ self.assertEqual(uri.encode(uri.encode(url)), expected)
+
+ # NOTE(kgriffs): Specific example cited in GH issue
+ url = 'http://something?redirect_uri=http%3A%2F%2Fsite'
+ self.assertEqual(uri.encode(url), url)
+
+ hex_digits = 'abcdefABCDEF0123456789'
+ for c1 in hex_digits:
+ for c2 in hex_digits:
+ url = 'http://example.com/%' + c1 + c2
+ encoded = uri.encode(uri.encode(url))
+ self.assertEqual(encoded, url)
+
def test_uri_encode_value(self):
self.assertEqual(uri.encode_value('abcd'), 'abcd')
self.assertEqual(uri.encode_value(u'abcd'), u'abcd')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"ddt",
"pyyaml",
"requests",
"testtools",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"tools/test-requires"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
ddt==1.7.2
-e git+https://github.com/falconry/falcon.git@e0f059378b113dcc40e7de7fb16b4f4a104a74f1#egg=falcon
fixtures==4.0.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
nose==1.3.7
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-mimeparse==1.6.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: falcon
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- fixtures==4.0.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/falcon
| [
"tests/test_utils.py::TestFalconUtils::test_uri_encode_double"
]
| [
"tests/test_utils.py::TestFalconUtils::test_deprecated_decorator"
]
| [
"tests/test_utils.py::TestFalconUtils::test_dt_to_http",
"tests/test_utils.py::TestFalconUtils::test_get_http_status",
"tests/test_utils.py::TestFalconUtils::test_http_date_to_dt",
"tests/test_utils.py::TestFalconUtils::test_http_now",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_none",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_one",
"tests/test_utils.py::TestFalconUtils::test_pack_query_params_several",
"tests/test_utils.py::TestFalconUtils::test_parse_host",
"tests/test_utils.py::TestFalconUtils::test_parse_query_string",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_decode_models_stdlib_unquote_plus",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_models_stdlib_quote",
"tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_value_models_stdlib_quote_safe_tilde",
"tests/test_utils.py::TestFalconUtils::test_uri_decode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode",
"tests/test_utils.py::TestFalconUtils::test_uri_encode_value",
"tests/test_utils.py::TestFalconTesting::test_decode_empty_result",
"tests/test_utils.py::TestFalconTesting::test_httpnow_alias_for_backwards_compat",
"tests/test_utils.py::TestFalconTesting::test_none_header_value_in_create_environ",
"tests/test_utils.py::TestFalconTesting::test_path_escape_chars_in_create_environ",
"tests/test_utils.py::TestFalconTestCase::test_cached_text_in_result",
"tests/test_utils.py::TestFalconTestCase::test_path_must_start_with_slash",
"tests/test_utils.py::TestFalconTestCase::test_query_string",
"tests/test_utils.py::TestFalconTestCase::test_query_string_in_path",
"tests/test_utils.py::TestFalconTestCase::test_query_string_no_question",
"tests/test_utils.py::TestFalconTestCase::test_simple_resource_body_json_xor",
"tests/test_utils.py::TestFalconTestCase::test_status",
"tests/test_utils.py::TestFalconTestCase::test_wsgi_iterable_not_closeable",
"tests/test_utils.py::FancyTestCase::test_something"
]
| []
| Apache License 2.0 | 705 | [
"falcon/util/uri.py"
]
| [
"falcon/util/uri.py"
]
|
andir__isc-dhcp-filter-2 | efc868102f47329f7280b87a21b5fd9e9defcd64 | 2016-08-16 14:13:07 | efc868102f47329f7280b87a21b5fd9e9defcd64 | diff --git a/isc_dhcp_filter/__init__.py b/isc_dhcp_filter/__init__.py
index d769b6d..70524c3 100644
--- a/isc_dhcp_filter/__init__.py
+++ b/isc_dhcp_filter/__init__.py
@@ -1,6 +1,6 @@
+from isc_dhcp_leases import IscDhcpLeases
from isc_dhcp_leases import Lease
from isc_dhcp_leases import Lease6
-from isc_dhcp_leases import IscDhcpLeases
def parse(*files):
@@ -119,6 +119,13 @@ class Leases:
return Leases(g)
+ def count(self):
+ """
+ Returns the count of leases in the current set of leases
+ :return: int count of leases
+ """
+ return len(self)
+
def __iter__(self):
"""
Returns an iterator for the current set of leases
@@ -128,3 +135,16 @@ class Leases:
yield from iter(self._leases)
elif self._iter:
yield from self._iter()
+
+ def __len__(self):
+ """
+ Implements __len__
+ If we are dealing with a generator we will expand it into `_leases`
+ :return:
+ """
+ if type(self._leases) is list:
+ return len(self._leases)
+ else:
+ l = list(iter(self))
+ self._leases = l
+ return len(l)
| Add `.count()` method as shortcut for len(list(leases))
Sometime the only intresting part about the lease db is how many are actually in a given state. Currently you've to write `len(list(leases.active))` to get the count of active leases. `leases.active.count()` and also implementing `__len__` would probably be handy. | andir/isc-dhcp-filter | diff --git a/tests/__init__.py b/tests/__init__.py
index e924baf..426d90b 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,11 +1,11 @@
import os
from unittest import TestCase
+from freezegun import freeze_time
from isc_dhcp_leases import Lease
from isc_dhcp_leases.iscdhcpleases import BaseLease
from isc_dhcp_filter import parse, Leases
-from freezegun import freeze_time
class LeaseLoaderMixin:
@@ -26,6 +26,15 @@ class BaseLeaseTester:
self.assertEqual(len(active_valid), len(valid_active))
self.assertEqual(len(active_valid), len(list(self.leases.current)))
+ def test_list_twice(self):
+ a = list(self.leases)
+ b = list(self.leases)
+
+ self.assertEqual(a, b)
+
+ def test_len(self):
+ self.assertEqual(len(self.leases), self.leases.count())
+
def test_v4_filter(self):
for lease in self.leases.v4:
self.assertIsInstance(lease, Lease)
@@ -65,7 +74,7 @@ class BaseLeaseTester:
def test_filter_combine(self):
combined = Leases(self.leases.v4, self.leases.v6)
l = len(list(combined))
- self.assertEqual(l, len(list(self.leases)))
+ self.assertEqual(l, len(self.leases))
class TestDhcpd6(LeaseLoaderMixin, BaseLeaseTester, TestCase):
@@ -74,18 +83,20 @@ class TestDhcpd6(LeaseLoaderMixin, BaseLeaseTester, TestCase):
def test_dhcpv6_active(self):
leases = self.leases
- self.assertEqual(len(list(leases)), 4)
- self.assertEqual(len(list(leases.active)), 4)
+ self.assertEqual(len(leases), 4)
+ self.assertEqual(len(leases.active), 4)
+ self.assertEqual(len(leases.active), 4)
@freeze_time("2015-07-6 8:15:0")
def test_dhcpv6_active_valid(self):
leases = self.leases
- active_valid = list(leases.active.valid)
- valid_active = list(leases.valid.active)
+ active_valid = leases.active.valid
+ valid_active = leases.valid.active
self.assertEqual(len(active_valid), len(valid_active))
- self.assertEqual(len(active_valid), len(list(leases.current)))
+ self.assertEqual(len(active_valid), len(leases.current))
+ self.assertEqual(sorted(active_valid, key=id), sorted(valid_active, key=id))
def test_dhcpv6_invalid(self):
leases = self.leases
@@ -115,6 +126,11 @@ class TestDebian7(LeaseLoaderMixin, BaseLeaseTester, TestCase):
leases2 = list(self.leases.where_eq('vendor-class-identifier', 'Some Vendor Identifier'))
self.assertEqual(leases1, leases2)
+
class TestEmptyLease(BaseLeaseTester, TestCase):
def setUp(self):
- self.leases = Leases()
\ No newline at end of file
+ self.leases = Leases()
+
+ def test_lease_count_zero(self):
+ self.assertEqual(self.leases.count(), 0)
+ self.assertEqual(len(self.leases), 0)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"freezegun"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
freezegun==1.5.1
iniconfig==2.1.0
-e git+https://github.com/andir/isc-dhcp-filter.git@efc868102f47329f7280b87a21b5fd9e9defcd64#egg=isc_dhcp_filter
isc-dhcp-leases==0.10.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tomli==2.2.1
| name: isc-dhcp-filter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- freezegun==1.5.1
- iniconfig==2.1.0
- isc-dhcp-leases==0.10.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/isc-dhcp-filter
| [
"tests/__init__.py::TestDhcpd6::test_dhcpv6_active",
"tests/__init__.py::TestDhcpd6::test_dhcpv6_active_valid",
"tests/__init__.py::TestDhcpd6::test_filter_combine",
"tests/__init__.py::TestDhcpd6::test_len",
"tests/__init__.py::TestDebian7::test_filter_combine",
"tests/__init__.py::TestDebian7::test_len",
"tests/__init__.py::TestEmptyLease::test_filter_combine",
"tests/__init__.py::TestEmptyLease::test_lease_count_zero",
"tests/__init__.py::TestEmptyLease::test_len"
]
| []
| [
"tests/__init__.py::TestDhcpd6::test_active_filter",
"tests/__init__.py::TestDhcpd6::test_active_valid_current",
"tests/__init__.py::TestDhcpd6::test_current_filter",
"tests/__init__.py::TestDhcpd6::test_dhcpv6_invalid",
"tests/__init__.py::TestDhcpd6::test_filter_func",
"tests/__init__.py::TestDhcpd6::test_inactive_filter",
"tests/__init__.py::TestDhcpd6::test_invalid_filter",
"tests/__init__.py::TestDhcpd6::test_list_twice",
"tests/__init__.py::TestDhcpd6::test_v4_filter",
"tests/__init__.py::TestDhcpd6::test_v6_filter",
"tests/__init__.py::TestDhcpd6::test_valid_filter",
"tests/__init__.py::TestDhcpd6::test_where_eq",
"tests/__init__.py::TestDebian7::test_active_filter",
"tests/__init__.py::TestDebian7::test_active_valid_current",
"tests/__init__.py::TestDebian7::test_current_filter",
"tests/__init__.py::TestDebian7::test_filter_func",
"tests/__init__.py::TestDebian7::test_inactive_filter",
"tests/__init__.py::TestDebian7::test_invalid_filter",
"tests/__init__.py::TestDebian7::test_list_twice",
"tests/__init__.py::TestDebian7::test_v4_filter",
"tests/__init__.py::TestDebian7::test_v6_filter",
"tests/__init__.py::TestDebian7::test_valid_filter",
"tests/__init__.py::TestDebian7::test_vendor_class_identifier",
"tests/__init__.py::TestEmptyLease::test_active_filter",
"tests/__init__.py::TestEmptyLease::test_active_valid_current",
"tests/__init__.py::TestEmptyLease::test_current_filter",
"tests/__init__.py::TestEmptyLease::test_filter_func",
"tests/__init__.py::TestEmptyLease::test_inactive_filter",
"tests/__init__.py::TestEmptyLease::test_invalid_filter",
"tests/__init__.py::TestEmptyLease::test_list_twice",
"tests/__init__.py::TestEmptyLease::test_v4_filter",
"tests/__init__.py::TestEmptyLease::test_v6_filter",
"tests/__init__.py::TestEmptyLease::test_valid_filter"
]
| []
| MIT License | 706 | [
"isc_dhcp_filter/__init__.py"
]
| [
"isc_dhcp_filter/__init__.py"
]
|
|
grabbles__grabbit-6 | afe361809ca5c040a46caa9f8a9bae017bcc706e | 2016-08-18 05:30:39 | afe361809ca5c040a46caa9f8a9bae017bcc706e | diff --git a/grabbit/core.py b/grabbit/core.py
index 84009db..a2a87be 100644
--- a/grabbit/core.py
+++ b/grabbit/core.py
@@ -197,7 +197,7 @@ class Layout(object):
return_type (str): Type of result to return. Valid values:
'tuple': returns a list of namedtuples containing file name as
well as attribute/value pairs for all named entities.
- 'file': returns a list of File instances.
+ 'file': returns a list of matching filenames.
'dir': returns a list of directories.
'id': returns a list of unique IDs. Must be used together with
a valid target.
@@ -222,7 +222,7 @@ class Layout(object):
result.append(file)
if return_type == 'file':
- return result
+ return natural_sort([f.path for f in result])
if return_type == 'tuple':
result = [r.as_named_tuple() for r in result]
| Redefining File class is confusing
Returning File objects which are something different that python build in [file object](https://docs.python.org/3/glossary.html#term-file-object) | grabbles/grabbit | diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py
index 0c92377..11da286 100644
--- a/grabbit/tests/test_core.py
+++ b/grabbit/tests/test_core.py
@@ -127,6 +127,8 @@ class TestLayout:
result = layout.get(target='subject', return_type='dir')
assert os.path.exists(result[0])
assert os.path.isdir(result[0])
+ result = layout.get(target='subject', type='phasediff', return_type='file')
+ assert all([os.path.exists(f) for f in result])
def test_unique_and_count(self, layout):
result = layout.unique('subject')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/grabbles/grabbit.git@afe361809ca5c040a46caa9f8a9bae017bcc706e#egg=grabbit
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: grabbit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/grabbit
| [
"grabbit/tests/test_core.py::TestLayout::test_querying"
]
| []
| [
"grabbit/tests/test_core.py::TestFile::test_init",
"grabbit/tests/test_core.py::TestFile::test_matches",
"grabbit/tests/test_core.py::TestFile::test_named_tuple",
"grabbit/tests/test_core.py::TestEntity::test_init",
"grabbit/tests/test_core.py::TestEntity::test_matches",
"grabbit/tests/test_core.py::TestEntity::test_unique_and_count",
"grabbit/tests/test_core.py::TestEntity::test_add_file",
"grabbit/tests/test_core.py::TestLayout::test_init",
"grabbit/tests/test_core.py::TestLayout::test_absolute_paths",
"grabbit/tests/test_core.py::TestLayout::test_dynamic_getters",
"grabbit/tests/test_core.py::TestLayout::test_unique_and_count"
]
| []
| MIT License | 707 | [
"grabbit/core.py"
]
| [
"grabbit/core.py"
]
|
|
pre-commit__pre-commit-400 | f11338ccfa612e36a6c1f2dc688080ec08fd66b0 | 2016-08-18 14:37:30 | f11338ccfa612e36a6c1f2dc688080ec08fd66b0 | diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py
index a2978b9..c8ee9c2 100644
--- a/pre_commit/staged_files_only.py
+++ b/pre_commit/staged_files_only.py
@@ -45,7 +45,7 @@ def staged_files_only(cmd_runner):
finally:
# Try to apply the patch we saved
try:
- cmd_runner.run(['git', 'apply', patch_filename])
+ cmd_runner.run(('git', 'apply', patch_filename), encoding=None)
except CalledProcessError:
logger.warning(
'Stashed changes conflicted with hook auto-fixes... '
@@ -55,7 +55,7 @@ def staged_files_only(cmd_runner):
# by hooks.
# Roll back the changes made by hooks.
cmd_runner.run(['git', 'checkout', '--', '.'])
- cmd_runner.run(['git', 'apply', patch_filename])
+ cmd_runner.run(('git', 'apply', patch_filename), encoding=None)
logger.info('Restored changes from {0}.'.format(patch_filename))
else:
# There weren't any staged files so we don't need to do anything
| Stashed changes lost if hook fails with non-UTF-8 diff containing trailing whitespace
Hi,
A colleague almost lost all the changes she was working on after launching a `git commit` (with zero file added) and `pre-commit` crashing without restoring its [patch](https://github.com/pre-commit/pre-commit/blob/master/pre_commit/staged_files_only.py#L15).
Here is the terminal message she got:
```
[WARNING] Stashed changes conflicted with hook auto-fixes... Rolling back fixes...
An unexpected error has occurred: CalledProcessError: Command: ['git', 'apply', 'C:\\Users\\toto\\.pre-commit\\patch1471341002']
```
This seems very similar to a past solved issue:
https://github.com/pre-commit/pre-commit/issues/176
I think it had to do with CRLF conversion.
I'm going to try to reproduce this.
| pre-commit/pre-commit | diff --git a/tests/staged_files_only_test.py b/tests/staged_files_only_test.py
index 00f4cca..993d33d 100644
--- a/tests/staged_files_only_test.py
+++ b/tests/staged_files_only_test.py
@@ -280,3 +280,28 @@ def test_stage_non_utf8_changes(foo_staged, cmd_runner):
with staged_files_only(cmd_runner):
_test_foo_state(foo_staged)
_test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+
+
+def test_non_utf8_conflicting_diff(foo_staged, cmd_runner):
+ """Regression test for #397"""
+ # The trailing whitespace is important here, this triggers git to produce
+ # an error message which looks like:
+ #
+ # ...patch1471530032:14: trailing whitespace.
+ # [[unprintable character]][[space character]]
+ # error: patch failed: foo:1
+ # error: foo: patch does not apply
+ #
+ # Previously, the error message (though discarded immediately) was being
+ # decoded with the UTF-8 codec (causing a crash)
+ contents = 'ú \n'
+ with io.open('foo', 'w', encoding='latin-1') as foo_file:
+ foo_file.write(contents)
+
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
+ with staged_files_only(cmd_runner):
+ _test_foo_state(foo_staged)
+ # Create a conflicting diff that will need to be rolled back
+ with io.open('foo', 'w') as foo_file:
+ foo_file.write('')
+ _test_foo_state(foo_staged, contents, 'AM', encoding='latin-1')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aspy.yaml==1.3.0
attrs==25.3.0
cached-property==2.0.1
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==5.2.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
-e git+https://github.com/pre-commit/pre-commit.git@f11338ccfa612e36a6c1f2dc688080ec08fd66b0#egg=pre_commit
pycodestyle==2.13.0
pyflakes==3.3.1
pyterminalsize==0.1.0
pytest==8.3.5
PyYAML==6.0.2
referencing==0.36.2
rpds-py==0.24.0
tomli==2.2.1
typing_extensions==4.13.0
virtualenv==20.29.3
| name: pre-commit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aspy-yaml==1.3.0
- attrs==25.3.0
- cached-property==2.0.1
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==5.2.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pyterminalsize==0.1.0
- pytest==8.3.5
- pyyaml==6.0.2
- referencing==0.36.2
- rpds-py==0.24.0
- setuptools==18.4
- tomli==2.2.1
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/pre-commit
| [
"tests/staged_files_only_test.py::test_non_utf8_conflicting_diff"
]
| []
| [
"tests/staged_files_only_test.py::test_foo_staged",
"tests/staged_files_only_test.py::test_foo_nothing_unstaged",
"tests/staged_files_only_test.py::test_foo_something_unstaged",
"tests/staged_files_only_test.py::test_foo_something_unstaged_diff_color_always",
"tests/staged_files_only_test.py::test_foo_both_modify_non_conflicting",
"tests/staged_files_only_test.py::test_foo_both_modify_conflicting",
"tests/staged_files_only_test.py::test_img_staged",
"tests/staged_files_only_test.py::test_img_nothing_unstaged",
"tests/staged_files_only_test.py::test_img_something_unstaged",
"tests/staged_files_only_test.py::test_img_conflict",
"tests/staged_files_only_test.py::test_diff_returns_1_no_diff_though",
"tests/staged_files_only_test.py::test_stage_utf8_changes",
"tests/staged_files_only_test.py::test_stage_non_utf8_changes"
]
| []
| MIT License | 708 | [
"pre_commit/staged_files_only.py"
]
| [
"pre_commit/staged_files_only.py"
]
|
|
pre-commit__pre-commit-hooks-135 | 09d1747840bfb1457e9b8876b4c1f577d00a2a37 | 2016-08-18 14:46:54 | c8a1c91c762b8e24fdc5a33455ec10662f523328 | asottile: Needs a test, also I'm not the biggest fan of fileinput, I think it might be easier just using `io.open` instead? (that'd remove the awkward temporary file that may clobber other files in the working directory).
Lucas-C: Test added
Lucas-C: I'll do the move to `io.open` in a 2nd commit, when implementing the "option 2" we discussed.
asottile: New test seems falling :(
coveralls:
[](https://coveralls.io/builds/7522605)
Coverage remained the same at 100.0% when pulling **ab1afc7680c9210bb1782e2cce1ebf814068d341 on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
coveralls:
[](https://coveralls.io/builds/7522605)
Coverage remained the same at 100.0% when pulling **ab1afc7680c9210bb1782e2cce1ebf814068d341 on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
coveralls:
[](https://coveralls.io/builds/7524557)
Coverage remained the same at 100.0% when pulling **211beb11487534c0462201ad1fc6964ed92a0a8b on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
coveralls:
[](https://coveralls.io/builds/7525502)
Coverage remained the same at 100.0% when pulling **3cf15fdbfec70f379265a2152852249909af6daa on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
coveralls:
[](https://coveralls.io/builds/7525502)
Coverage remained the same at 100.0% when pulling **3cf15fdbfec70f379265a2152852249909af6daa on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
coveralls:
[](https://coveralls.io/builds/7525795)
Coverage remained the same at 100.0% when pulling **ad38d9bfc490e34409415eba305e1cbd3f21757f on Lucas-C:master** into **775a7906cd6004bec2401a85ed16d5b3540d2f94 on pre-commit:master**.
| diff --git a/.travis.yml b/.travis.yml
index 41f9e7d..de7c294 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,9 +1,8 @@
language: python
-python: 3.5
env: # These should match the tox env list
- TOXENV=py27
+ - TOXENV=py33
- TOXENV=py34
- - TOXENV=py35
- TOXENV=pypy
- TOXENV=pypy3
install: pip install coveralls tox
diff --git a/README.md b/README.md
index ee77b25..6c3a3ec 100644
--- a/README.md
+++ b/README.md
@@ -52,9 +52,8 @@ Add this to your `.pre-commit-config.yaml`
- Use `args: ['--django']` to match `test*.py` instead.
- `pyflakes` - Run pyflakes on your python files.
- `pretty-format-json` - Checks that all your JSON files are pretty
- - `--autofix` - automatically format json files
- - `--no-sort-keys` - when autofixing, retain the original key ordering (instead of sorting the keys)
- - `--indent ...` - Control the indentation (either a number for a number of spaces or a string of whitespace).
+ - Use `args: ['--autofix']` to automatically fixing the encountered not-pretty-formatted files and
+ `args: ['--no-sort-keys']` to disable the sort on the keys.
- `requirements-txt-fixer` - Sorts entries in requirements.txt
- `trailing-whitespace` - Trims trailing whitespace.
- Markdown linebreak trailing spaces preserved for `.md` and`.markdown`;
diff --git a/appveyor.yml b/appveyor.yml
index 30eab46..cf8f927 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,10 +1,10 @@
environment:
matrix:
- TOXENV: py27
- - TOXENV: py35
+ - TOXENV: py34
install:
- - "SET PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH%"
+ - "SET PATH=C:\\Python34;C:\\Python34\\Scripts;%PATH%"
- pip install tox
# Not a C# project
diff --git a/pre_commit_hooks/detect_aws_credentials.py b/pre_commit_hooks/detect_aws_credentials.py
index 4c51546..f727078 100644
--- a/pre_commit_hooks/detect_aws_credentials.py
+++ b/pre_commit_hooks/detect_aws_credentials.py
@@ -4,7 +4,7 @@ from __future__ import unicode_literals
import argparse
import os
-from six.moves import configparser
+from six.moves import configparser # pylint: disable=import-error
def get_your_keys(credentials_file):
diff --git a/pre_commit_hooks/trailing_whitespace_fixer.py b/pre_commit_hooks/trailing_whitespace_fixer.py
index c159071..fa9b7dd 100644
--- a/pre_commit_hooks/trailing_whitespace_fixer.py
+++ b/pre_commit_hooks/trailing_whitespace_fixer.py
@@ -1,24 +1,27 @@
from __future__ import print_function
import argparse
-import fileinput
import os
import sys
from pre_commit_hooks.util import cmd_output
-def _fix_file(filename, markdown=False):
- for line in fileinput.input([filename], inplace=True):
- # preserve trailing two-space for non-blank lines in markdown files
- if markdown and (not line.isspace()) and (line.endswith(" \n")):
- line = line.rstrip(' \n')
- # only preserve if there are no trailing tabs or unusual whitespace
- if not line[-1].isspace():
- print(line + " ")
- continue
+def _fix_file(filename, is_markdown):
+ with open(filename, mode='rb') as file_processed:
+ lines = file_processed.readlines()
+ lines = [_process_line(line, is_markdown) for line in lines]
+ with open(filename, mode='wb') as file_processed:
+ for line in lines:
+ file_processed.write(line)
- print(line.rstrip())
+
+def _process_line(line, is_markdown):
+ # preserve trailing two-space for non-blank lines in markdown files
+ eol = b'\r\n' if line[-2:] == b'\r\n' else b'\n'
+ if is_markdown and (not line.isspace()) and line.endswith(b' ' + eol):
+ return line.rstrip() + b' ' + eol
+ return line.rstrip() + eol
def fix_trailing_whitespace(argv=None):
@@ -64,14 +67,13 @@ def fix_trailing_whitespace(argv=None):
.format(ext)
)
- if bad_whitespace_files:
- for bad_whitespace_file in bad_whitespace_files:
- print('Fixing {0}'.format(bad_whitespace_file))
- _, extension = os.path.splitext(bad_whitespace_file.lower())
- _fix_file(bad_whitespace_file, all_markdown or extension in md_exts)
- return 1
- else:
- return 0
+ return_code = 0
+ for bad_whitespace_file in bad_whitespace_files:
+ print('Fixing {0}'.format(bad_whitespace_file))
+ _, extension = os.path.splitext(bad_whitespace_file.lower())
+ _fix_file(bad_whitespace_file, all_markdown or extension in md_exts)
+ return_code = 1
+ return return_code
if __name__ == '__main__':
diff --git a/pylintrc b/pylintrc
new file mode 100644
index 0000000..c905a37
--- /dev/null
+++ b/pylintrc
@@ -0,0 +1,22 @@
+[MESSAGES CONTROL]
+disable=bad-open-mode,invalid-name,missing-docstring,redefined-outer-name,star-args,locally-disabled,locally-enabled
+
+[REPORTS]
+output-format=colorized
+reports=no
+
+[BASIC]
+const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
+
+[FORMAT]
+max-line-length=131
+
+[TYPECHECK]
+ignored-classes=pytest
+
+[DESIGN]
+min-public-methods=0
+
+[SIMILARITIES]
+min-similarity-lines=5
+ignore-imports=yes
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 2922ef5..97343d5 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,7 +1,9 @@
-e .
+astroid<1.3.3
coverage
flake8
mock
pre-commit
+pylint<1.4
pytest
diff --git a/setup.py b/setup.py
index f92f821..e8c5000 100644
--- a/setup.py
+++ b/setup.py
@@ -11,19 +11,20 @@ setup(
author='Anthony Sottile',
author_email='[email protected]',
+ platforms='linux',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
- packages=find_packages(exclude=('tests*', 'testing*')),
+ packages=find_packages('.', exclude=('tests*', 'testing*')),
install_requires=[
# quickfix to prevent pep8 conflicts
'flake8!=2.5.3',
diff --git a/tox.ini b/tox.ini
index f2ac125..a9b17c0 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,7 @@
[tox]
project = pre_commit_hooks
# These should match the travis env list
-envlist = py27,py34,py35,pypy,pypy3
+envlist = py27,py33,py34,pypy,pypy3
[testenv]
deps = -rrequirements-dev.txt
@@ -12,6 +12,7 @@ commands =
coverage report --show-missing --fail-under 100
pre-commit install -f --install-hooks
pre-commit run --all-files
+ pylint {[tox]project} testing tests setup.py
[testenv:venv]
envdir = venv-{[tox]project}
| Non-utf8 file will cause trailing-whitespace to truncate the file contents
Hi,
(I started investigating this in https://github.com/pre-commit/pre-commit/issues/397)
Bug reproductions steps, after setting up a git repository with the "trailing-whitespace" pre-commit hook installed (with the latest sha: 775a7906cd6004bec2401a85ed16d5b3540d2f94):
```
echo -en '\x3c\x61\x3e\xe9\x20\x0a\x3c\x2f\x61\x3e' > minimal_crasher.xml
git add minimal_crasher.xml
pre-commit run trailing-whitespace --files minimal_crasher.xml
```
Results in:
````
Trim Trailing Whitespace...............................................................................................................................................................Failed
hookid: trailing-whitespace
Files were modified by this hook. Additional output:
Fixing minimal_crasher.xml
Traceback (most recent call last):
File "/home/lucas_cimon/.pre-commit/repok3wjweh4/py_env-default/bin/trailing-whitespace-fixer", line 9, in <module>
load_entry_point('pre-commit-hooks==0.6.0', 'console_scripts', 'trailing-whitespace-fixer')()
File "/home/lucas_cimon/.pre-commit/repok3wjweh4/py_env-default/lib/python3.4/site-packages/pre_commit_hooks/trailing_whitespace_fixer.py", line 71, in fix_trailing_whitespace
_fix_file(bad_whitespace_file, all_markdown or extension in md_exts)
File "/home/lucas_cimon/.pre-commit/repok3wjweh4/py_env-default/lib/python3.4/site-packages/pre_commit_hooks/trailing_whitespace_fixer.py", line 12, in _fix_file
for line in fileinput.input([filename], inplace=True):
File "/usr/lib/python3.4/fileinput.py", line 263, in __next__
line = self.readline()
File "/usr/lib/python3.4/fileinput.py", line 363, in readline
self._buffer = self._file.readlines(self._bufsize)
File "/home/lucas_cimon/.pre-commit/repok3wjweh4/py_env-default/lib/python3.4/codecs.py", line 319, in decode
(result, consumed) = self._buffer_decode(data, self.errors, final)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 3: invalid continuation byte
```
And **all the content of `minimal_crasher.xml` lost** !!
In this test the xml file is very small, but it can happen with any file size nor number of staged modifications !
| pre-commit/pre-commit-hooks | diff --git a/tests/check_merge_conflict_test.py b/tests/check_merge_conflict_test.py
index f1528b2..8141ade 100644
--- a/tests/check_merge_conflict_test.py
+++ b/tests/check_merge_conflict_test.py
@@ -12,6 +12,9 @@ from testing.util import get_resource_path
from testing.util import write_file
+# pylint:disable=unused-argument
+
+
@pytest.yield_fixture
def f1_is_a_conflict_file(tmpdir):
# Make a merge conflict
diff --git a/tests/trailing_whitespace_fixer_test.py b/tests/trailing_whitespace_fixer_test.py
index 6f4fdfd..3a72ccb 100644
--- a/tests/trailing_whitespace_fixer_test.py
+++ b/tests/trailing_whitespace_fixer_test.py
@@ -42,7 +42,7 @@ def test_fixes_trailing_markdown_whitespace(filename, input_s, output, tmpdir):
MD_TESTS_2 = (
('foo.txt', 'foo \nbar \n \n', 'foo \nbar\n\n'),
('bar.Markdown', 'bar \nbaz\t\n\t\n', 'bar \nbaz\n\n'),
- ('bar.MD', 'bar \nbaz\t \n\t\n', 'bar \nbaz\n\n'),
+ ('bar.MD', 'bar \nbaz\t \n\t\n', 'bar \nbaz \n\n'),
('.txt', 'baz \nquux \t\n\t\n', 'baz\nquux\n\n'),
('txt', 'foo \nbaz \n\t\n', 'foo\nbaz\n\n'),
)
@@ -103,3 +103,12 @@ def test_no_markdown_linebreak_ext_opt(filename, input_s, output, tmpdir):
def test_returns_zero_for_no_changes():
assert fix_trailing_whitespace([__file__]) == 0
+
+
+def test_preserve_non_utf8_file(tmpdir):
+ non_utf8_bytes_content = b'<a>\xe9 \n</a>\n'
+ path = tmpdir.join('file.txt')
+ path.write_binary(non_utf8_bytes_content)
+ ret = fix_trailing_whitespace([path.strpath])
+ assert ret == 1
+ assert path.size() == (len(non_utf8_bytes_content) - 1)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_git_commit_hash",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 8
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | autopep8==2.3.2
cfgv==3.4.0
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
flake8==7.2.0
identify==2.6.9
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
-e git+https://github.com/pre-commit/pre-commit-hooks.git@09d1747840bfb1457e9b8876b4c1f577d00a2a37#egg=pre_commit_hooks
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
PyYAML==6.0.2
simplejson==3.20.1
six==1.17.0
tomli==2.2.1
virtualenv==20.29.3
| name: pre-commit-hooks
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- autopep8==2.3.2
- cfgv==3.4.0
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- flake8==7.2.0
- identify==2.6.9
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- pyyaml==6.0.2
- simplejson==3.20.1
- six==1.17.0
- tomli==2.2.1
- virtualenv==20.29.3
prefix: /opt/conda/envs/pre-commit-hooks
| [
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt[bar.MD-bar",
"tests/trailing_whitespace_fixer_test.py::test_preserve_non_utf8_file"
]
| []
| [
"tests/check_merge_conflict_test.py::test_does_not_care_when_not_in_a_merge",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_whitespace[foo",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_whitespace[bar\\t\\nbaz\\t\\n-bar\\nbaz\\n]",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_markdown_whitespace[foo.md-foo",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_markdown_whitespace[bar.Markdown-bar",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_markdown_whitespace[.md-baz",
"tests/trailing_whitespace_fixer_test.py::test_fixes_trailing_markdown_whitespace[txt-foo",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt[foo.txt-foo",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt[bar.Markdown-bar",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt[.txt-baz",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt[txt-foo",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt_all[foo.baz-foo",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_opt_all[bar-bar",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_badopt[--]",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_badopt[a.b]",
"tests/trailing_whitespace_fixer_test.py::test_markdown_linebreak_ext_badopt[a/b]",
"tests/trailing_whitespace_fixer_test.py::test_no_markdown_linebreak_ext_opt[bar.md-bar",
"tests/trailing_whitespace_fixer_test.py::test_no_markdown_linebreak_ext_opt[bar.markdown-baz",
"tests/trailing_whitespace_fixer_test.py::test_returns_zero_for_no_changes"
]
| []
| MIT License | 709 | [
"setup.py",
".travis.yml",
"appveyor.yml",
"README.md",
"tox.ini",
"pre_commit_hooks/detect_aws_credentials.py",
"pre_commit_hooks/trailing_whitespace_fixer.py",
"requirements-dev.txt",
"pylintrc"
]
| [
"setup.py",
".travis.yml",
"appveyor.yml",
"README.md",
"tox.ini",
"pre_commit_hooks/detect_aws_credentials.py",
"pre_commit_hooks/trailing_whitespace_fixer.py",
"requirements-dev.txt",
"pylintrc"
]
|
googleapis__gax-python-125 | e8f6daaecfcbbf01fc78a54542108295aaf7f356 | 2016-08-18 22:57:35 | 7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a | codecov-io: ## [Current coverage](https://codecov.io/gh/googleapis/gax-python/pull/125?src=pr) is 97.19% (diff: 100%)
> Merging [#125](https://codecov.io/gh/googleapis/gax-python/pull/125?src=pr) into [master](https://codecov.io/gh/googleapis/gax-python/branch/master?src=pr) will not change coverage
```diff
@@ master #125 diff @@
==========================================
Files 8 8
Lines 607 607
Methods 0 0
Messages 0 0
Branches 0 0
==========================================
Hits 590 590
Misses 17 17
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [e8f6daa...009253d](https://codecov.io/gh/googleapis/gax-python/compare/e8f6daaecfcbbf01fc78a54542108295aaf7f356...009253dc3736831b9b7dd2f3cf55f72dbbae9d14?src=pr)
jmuk: LGTM | diff --git a/google/gax/__init__.py b/google/gax/__init__.py
index 64eea72..09c38a5 100644
--- a/google/gax/__init__.py
+++ b/google/gax/__init__.py
@@ -48,7 +48,7 @@ CallOptions belongs will attempt to inherit that field from its default
settings."""
-class CallSettings(object):
+class _CallSettings(object):
"""Encapsulates the call settings for an API call."""
# pylint: disable=too-few-public-methods
def __init__(self, timeout=30, retry=None, page_descriptor=None,
@@ -94,10 +94,10 @@ class CallSettings(object):
return self.page_token is None
def merge(self, options):
- """Returns a new CallSettings merged from this and a CallOptions object.
+ """Returns new _CallSettings merged from this and a CallOptions object.
Note that passing if the CallOptions instance specifies a page_token,
- the merged CallSettings will have ``flatten_pages`` disabled. This
+ the merged _CallSettings will have ``flatten_pages`` disabled. This
permits toggling per-resource/per-page page streaming.
Args:
@@ -106,10 +106,10 @@ class CallSettings(object):
object
Returns:
- A :class:`CallSettings` object.
+ A :class:`_CallSettings` object.
"""
if not options:
- return CallSettings(
+ return _CallSettings(
timeout=self.timeout, retry=self.retry,
page_descriptor=self.page_descriptor,
page_token=self.page_token,
@@ -142,7 +142,7 @@ class CallSettings(object):
kwargs = self.kwargs.copy()
kwargs.update(options.kwargs)
- return CallSettings(
+ return _CallSettings(
timeout=timeout, retry=retry,
page_descriptor=self.page_descriptor, page_token=page_token,
bundler=bundler, bundle_descriptor=self.bundle_descriptor,
diff --git a/google/gax/api_callable.py b/google/gax/api_callable.py
index 955882e..622c08e 100644
--- a/google/gax/api_callable.py
+++ b/google/gax/api_callable.py
@@ -35,7 +35,7 @@ import time
from future import utils
-from . import (BackoffSettings, BundleOptions, bundling, CallSettings, config,
+from . import (BackoffSettings, BundleOptions, bundling, _CallSettings, config,
PageIterator, ResourceIterator, RetryOptions)
from .errors import GaxError, RetryError
@@ -301,7 +301,7 @@ def construct_settings(
service_name, client_config, config_override,
retry_names, bundle_descriptors=None, page_descriptors=None,
kwargs=None):
- """Constructs a dictionary mapping method names to CallSettings.
+ """Constructs a dictionary mapping method names to _CallSettings.
The ``client_config`` parameter is parsed from a client configuration JSON
file of the form:
@@ -406,7 +406,7 @@ def construct_settings(
_construct_retry(overriding_method, overrides.get('retry_codes'),
overrides.get('retry_params'), retry_names))
- defaults[snake_name] = CallSettings(
+ defaults[snake_name] = _CallSettings(
timeout=timeout, retry=retry,
page_descriptor=page_descriptors.get(snake_name),
bundler=bundler, bundle_descriptor=bundle_descriptor,
@@ -452,7 +452,7 @@ def create_api_call(func, settings):
Args:
func (callable[[object], object]): is used to make a bare rpc call
- settings (:class:`CallSettings`): provides the settings for this call
+ settings (:class:`_CallSettings`): provides the settings for this call
Returns:
func (callable[[object], object]): a bound method on a request stub used
| Rename and refactor CallSettings
Currently we have CallSettings and CallOptions, and it's hard to say their roles and the differences.
Actually CallSettings is the settings (or configurations) for an API call (or a method), such as the page descriptors or bundling descriptors, retry parameters.
On the other hand, CallOptions is the optional data to modify the behavior of individual invocation of the methods.
To me,
- CallSettings isn't a very clear name. 'Call-' prefix sounds like per-call (per-invocation) thing. Maybe, `MethodSettings`?
- the 'merge' method to create a new call settings doesn't look a good design after the redesign of create_api_call. I feel like CallSettings (or MethodSettings) should hold the default CallOptions instance, and CallOptions instance should have the `merge()` method to create the actual options for the invocation.
Thoughts? | googleapis/gax-python | diff --git a/test/test_api_callable.py b/test/test_api_callable.py
index 5621619..f45c5f4 100644
--- a/test/test_api_callable.py
+++ b/test/test_api_callable.py
@@ -36,7 +36,7 @@ import unittest2
from google.gax import (
api_callable, bundling, BackoffSettings, BundleDescriptor, BundleOptions,
- CallSettings, CallOptions, INITIAL_PAGE, PageDescriptor, RetryOptions)
+ _CallSettings, CallOptions, INITIAL_PAGE, PageDescriptor, RetryOptions)
from google.gax.errors import GaxError, RetryError
@@ -116,19 +116,19 @@ class AnotherException(Exception):
class TestCreateApiCallable(unittest2.TestCase):
def test_call_api_call(self):
- settings = CallSettings()
+ settings = _CallSettings()
my_callable = api_callable.create_api_call(
lambda _req, _timeout: 42, settings)
self.assertEqual(my_callable(None), 42)
def test_call_override(self):
- settings = CallSettings(timeout=10)
+ settings = _CallSettings(timeout=10)
my_callable = api_callable.create_api_call(
lambda _req, timeout: timeout, settings)
self.assertEqual(my_callable(None, CallOptions(timeout=20)), 20)
def test_call_kwargs(self):
- settings = CallSettings(kwargs={'key': 'value'})
+ settings = _CallSettings(kwargs={'key': 'value'})
my_callable = api_callable.create_api_call(
lambda _req, _timeout, **kwargs: kwargs['key'], settings)
self.assertEqual(my_callable(None), 'value')
@@ -150,7 +150,7 @@ class TestCreateApiCallable(unittest2.TestCase):
(to_attempt - 1) + [mock.DEFAULT])
mock_call.return_value = 1729
mock_time.return_value = 0
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(mock_call, settings)
self.assertEqual(my_callable(None), 1729)
self.assertEqual(mock_call.call_count, to_attempt)
@@ -163,7 +163,7 @@ class TestCreateApiCallable(unittest2.TestCase):
mock_call.side_effect = CustomException('', _FAKE_STATUS_CODE_1)
mock_time.return_value = 0
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(mock_call, settings)
self.assertRaises(CustomException, my_callable, None)
self.assertEqual(mock_call.call_count, 1)
@@ -179,7 +179,7 @@ class TestCreateApiCallable(unittest2.TestCase):
BackoffSettings(0, 0, 0, 0, 0, 0, 1))
mock_time.side_effect = [0, 2]
mock_exc_to_code.side_effect = lambda e: e.code
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(fake_call, settings)
try:
@@ -198,7 +198,7 @@ class TestCreateApiCallable(unittest2.TestCase):
mock_call = mock.Mock()
mock_call.side_effect = CustomException('', _FAKE_STATUS_CODE_1)
mock_time.side_effect = ([0] * to_attempt + [2])
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(mock_call, settings)
try:
@@ -219,7 +219,7 @@ class TestCreateApiCallable(unittest2.TestCase):
mock_call = mock.Mock()
mock_call.side_effect = CustomException('', _FAKE_STATUS_CODE_2)
mock_time.return_value = 0
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(mock_call, settings)
self.assertRaises(Exception, my_callable, None)
self.assertEqual(mock_call.call_count, 1)
@@ -230,7 +230,7 @@ class TestCreateApiCallable(unittest2.TestCase):
retry = RetryOptions(
[_FAKE_STATUS_CODE_1],
BackoffSettings(0, 0, 0, 0, 0, 0, 0))
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(lambda: None, settings)
self.assertRaises(RetryError, my_callable, None)
@@ -258,7 +258,7 @@ class TestCreateApiCallable(unittest2.TestCase):
params = BackoffSettings(3, 2, 24, 5, 2, 80, 2500)
retry = RetryOptions([_FAKE_STATUS_CODE_1], params)
- settings = CallSettings(timeout=0, retry=retry)
+ settings = _CallSettings(timeout=0, retry=retry)
my_callable = api_callable.create_api_call(mock_call, settings)
try:
@@ -315,7 +315,7 @@ class TestCreateApiCallable(unittest2.TestCase):
with mock.patch('grpc.UnaryUnaryMultiCallable') as mock_grpc:
mock_grpc.side_effect = grpc_return_value
- settings = CallSettings(
+ settings = _CallSettings(
page_descriptor=fake_grpc_func_descriptor, timeout=0)
my_callable = api_callable.create_api_call(
mock_grpc, settings=settings)
@@ -344,7 +344,7 @@ class TestCreateApiCallable(unittest2.TestCase):
expected)
def test_bundling_page_streaming_error(self):
- settings = CallSettings(
+ settings = _CallSettings(
page_descriptor=object(), bundle_descriptor=object(),
bundler=object())
with self.assertRaises(ValueError):
@@ -362,7 +362,7 @@ class TestCreateApiCallable(unittest2.TestCase):
def my_func(request, dummy_timeout):
return len(request.elements)
- settings = CallSettings(
+ settings = _CallSettings(
bundler=bundler, bundle_descriptor=fake_grpc_func_descriptor,
timeout=0)
my_callable = api_callable.create_api_call(my_func, settings)
@@ -478,9 +478,9 @@ class TestCreateApiCallable(unittest2.TestCase):
raise AnotherException
gax_error_callable = api_callable.create_api_call(
- abortion_error_func, CallSettings())
+ abortion_error_func, _CallSettings())
self.assertRaises(GaxError, gax_error_callable, None)
other_error_callable = api_callable.create_api_call(
- other_error_func, CallSettings())
+ other_error_func, _CallSettings())
self.assertRaises(AnotherException, other_error_callable, None)
diff --git a/test/test_gax.py b/test/test_gax.py
index 77220f8..74ea365 100644
--- a/test/test_gax.py
+++ b/test/test_gax.py
@@ -35,7 +35,7 @@ from __future__ import absolute_import
import unittest2
from google.gax import (
- BundleOptions, CallOptions, CallSettings, INITIAL_PAGE, OPTION_INHERIT,
+ BundleOptions, CallOptions, _CallSettings, INITIAL_PAGE, OPTION_INHERIT,
RetryOptions)
@@ -72,7 +72,7 @@ class TestCallSettings(unittest2.TestCase):
def test_settings_merge_options1(self):
options = CallOptions(timeout=46)
- settings = CallSettings(timeout=9, page_descriptor=None, retry=None)
+ settings = _CallSettings(timeout=9, page_descriptor=None, retry=None)
final = settings.merge(options)
self.assertEqual(final.timeout, 46)
self.assertIsNone(final.retry)
@@ -81,7 +81,7 @@ class TestCallSettings(unittest2.TestCase):
def test_settings_merge_options2(self):
retry = RetryOptions(None, None)
options = CallOptions(retry=retry)
- settings = CallSettings(
+ settings = _CallSettings(
timeout=9, page_descriptor=None, retry=RetryOptions(None, None))
final = settings.merge(options)
self.assertEqual(final.timeout, 9)
@@ -92,8 +92,8 @@ class TestCallSettings(unittest2.TestCase):
retry = RetryOptions(None, None)
page_descriptor = object()
options = CallOptions(timeout=46, page_token=INITIAL_PAGE)
- settings = CallSettings(timeout=9, retry=retry,
- page_descriptor=page_descriptor)
+ settings = _CallSettings(timeout=9, retry=retry,
+ page_descriptor=page_descriptor)
final = settings.merge(options)
self.assertEqual(final.timeout, 46)
self.assertEqual(final.page_descriptor, page_descriptor)
@@ -102,7 +102,7 @@ class TestCallSettings(unittest2.TestCase):
self.assertEqual(final.retry, retry)
def test_settings_merge_none(self):
- settings = CallSettings(
+ settings = _CallSettings(
timeout=23, page_descriptor=object(), bundler=object(),
retry=object())
final = settings.merge(None)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
exceptiongroup==1.2.2
future==1.0.0
-e git+https://github.com/googleapis/gax-python.git@e8f6daaecfcbbf01fc78a54542108295aaf7f356#egg=google_gax
grpcio==1.62.3
grpcio-tools==1.62.3
httplib2==0.22.0
importlib-metadata==6.7.0
iniconfig==2.0.0
linecache2==1.0.0
mock==5.2.0
oauth2client==4.1.3
packaging==24.0
pluggy==1.2.0
ply==3.8
protobuf==4.24.4
pyasn1==0.5.1
pyasn1-modules==0.3.0
pyparsing==3.1.4
pytest==7.4.4
pytest-cov==4.1.0
pytest-timeout==2.3.1
rsa==4.9
six==1.17.0
tomli==2.0.1
traceback2==1.4.0
typing_extensions==4.7.1
unittest2==1.1.0
zipp==3.15.0
| name: gax-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- coverage==7.2.7
- exceptiongroup==1.2.2
- future==1.0.0
- grpcio==1.62.3
- grpcio-tools==1.62.3
- httplib2==0.22.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- linecache2==1.0.0
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.0
- pluggy==1.2.0
- ply==3.8
- protobuf==4.24.4
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pyparsing==3.1.4
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-timeout==2.3.1
- rsa==4.9
- six==1.17.0
- tomli==2.0.1
- traceback2==1.4.0
- typing-extensions==4.7.1
- unittest2==1.1.0
- zipp==3.15.0
prefix: /opt/conda/envs/gax-python
| [
"test/test_api_callable.py::TestCreateApiCallable::test_bundling",
"test/test_api_callable.py::TestCreateApiCallable::test_bundling_page_streaming_error",
"test/test_api_callable.py::TestCreateApiCallable::test_call_api_call",
"test/test_api_callable.py::TestCreateApiCallable::test_call_kwargs",
"test/test_api_callable.py::TestCreateApiCallable::test_call_override",
"test/test_api_callable.py::TestCreateApiCallable::test_catch_error",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings_override",
"test/test_api_callable.py::TestCreateApiCallable::test_construct_settings_override2",
"test/test_api_callable.py::TestCreateApiCallable::test_no_retry_if_no_codes",
"test/test_api_callable.py::TestCreateApiCallable::test_page_streaming",
"test/test_api_callable.py::TestCreateApiCallable::test_retry",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_aborts_on_unexpected_exception",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_aborts_simple",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_exponential_backoff",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_times_out_no_response",
"test/test_api_callable.py::TestCreateApiCallable::test_retry_times_out_simple",
"test/test_gax.py::TestBundleOptions::test_cannot_construct_with_bad_options",
"test/test_gax.py::TestBundleOptions::test_cannot_construct_with_noarg_options",
"test/test_gax.py::TestCallSettings::test_call_options_simple",
"test/test_gax.py::TestCallSettings::test_cannot_construct_bad_options",
"test/test_gax.py::TestCallSettings::test_settings_merge_none",
"test/test_gax.py::TestCallSettings::test_settings_merge_options1",
"test/test_gax.py::TestCallSettings::test_settings_merge_options2",
"test/test_gax.py::TestCallSettings::test_settings_merge_options_page_streaming"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 710 | [
"google/gax/__init__.py",
"google/gax/api_callable.py"
]
| [
"google/gax/__init__.py",
"google/gax/api_callable.py"
]
|
laterpay__laterpay-client-python-82 | 3744237922be5c422568b701a6bf930418581be8 | 2016-08-22 06:03:06 | 3744237922be5c422568b701a6bf930418581be8 | coveralls:
[](https://coveralls.io/builds/7544833)
Coverage decreased (-0.8%) to 95.984% when pulling **77d70e7cc4f9e64280da7b9ea47521632be54062 on bugfix/requests-string-headers** into **3744237922be5c422568b701a6bf930418581be8 on develop**.
| diff --git a/laterpay/__init__.py b/laterpay/__init__.py
index c92f556..f2df4a4 100644
--- a/laterpay/__init__.py
+++ b/laterpay/__init__.py
@@ -421,10 +421,7 @@ class LaterPayClient(object):
"""
params = self._sign_and_encode(params=params, url=url, method=method)
- headers = {
- 'X-LP-APIVersion': 2,
- 'User-Agent': 'LaterPay Client - Python - v0.2'
- }
+ headers = self.get_request_headers()
if method == 'POST':
req = Request(url, data=params, headers=headers)
@@ -503,7 +500,7 @@ class LaterPayClient(object):
Return a ``dict`` of request headers to be sent to the API.
"""
return {
- 'X-LP-APIVersion': 2,
+ 'X-LP-APIVersion': '2',
# TODO: Add client version information.
'User-Agent': 'LaterPay Client Python',
}
| python-requests requires string only headers
As of python-requests 2.11 headers that are neither string not bytes are not accepted anymore. See kennethreitz/requests#3477 | laterpay/laterpay-client-python | diff --git a/tests/test_client.py b/tests/test_client.py
index e6ebc1c..f3069e7 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -302,7 +302,7 @@ class TestLaterPayClient(unittest.TestCase):
call = responses.calls[0]
- self.assertEqual(call.request.headers['X-LP-APIVersion'], 2)
+ self.assertEqual(call.request.headers['X-LP-APIVersion'], '2')
qd = parse_qs(urlparse(call.request.url).query)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio",
"flake8",
"coverage",
"pydocstyle",
"furl",
"mock",
"responses"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
execnet==1.9.0
flake8==5.0.4
furl==2.1.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
-e git+https://github.com/laterpay/laterpay-client-python.git@3744237922be5c422568b701a6bf930418581be8#egg=laterpay_client
mccabe==0.7.0
mock==5.2.0
orderedmultidict==1.0.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pydocstyle==6.3.0
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
requests==2.27.1
responses==0.17.0
six==1.17.0
snowballstemmer==2.2.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: laterpay-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- execnet==1.9.0
- flake8==5.0.4
- furl==2.1.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- orderedmultidict==1.0.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pydocstyle==6.3.0
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- requests==2.27.1
- responses==0.17.0
- six==1.17.0
- snowballstemmer==2.2.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/laterpay-client-python
| [
"tests/test_client.py::TestLaterPayClient::test_get_access_data_success"
]
| []
| [
"tests/test_client.py::TestItemDefinition::test_item_definition",
"tests/test_client.py::TestLaterPayClient::test_failure_url_param",
"tests/test_client.py::TestLaterPayClient::test_get_access_params",
"tests/test_client.py::TestLaterPayClient::test_get_add_url_no_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_add_url_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_add_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url_no_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url_product_key_param",
"tests/test_client.py::TestLaterPayClient::test_get_buy_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_controls_balance_url_all_defaults",
"tests/test_client.py::TestLaterPayClient::test_get_controls_balance_url_all_set",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_defaults",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_set_long",
"tests/test_client.py::TestLaterPayClient::test_get_controls_links_url_all_set_short",
"tests/test_client.py::TestLaterPayClient::test_get_gettoken_redirect",
"tests/test_client.py::TestLaterPayClient::test_get_login_dialog_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_login_dialog_url_without_use_dialog_api",
"tests/test_client.py::TestLaterPayClient::test_get_logout_dialog_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_logout_dialog_url_without_use_dialog_api",
"tests/test_client.py::TestLaterPayClient::test_get_signup_dialog_url_with_use_dialog_api_false",
"tests/test_client.py::TestLaterPayClient::test_get_signup_dialog_url_without_use_dialog_api",
"tests/test_client.py::TestLaterPayClient::test_get_web_url_extra_kwargs",
"tests/test_client.py::TestLaterPayClient::test_get_web_url_has_no_none_params",
"tests/test_client.py::TestLaterPayClient::test_has_token",
"tests/test_client.py::TestLaterPayClient::test_log_warning_for_skip_add_to_invoice_deprecation",
"tests/test_client.py::TestLaterPayClient::test_transaction_reference"
]
| []
| MIT License | 712 | [
"laterpay/__init__.py"
]
| [
"laterpay/__init__.py"
]
|
6si__shipwright-79 | 7d3ccf39acc79bb6d33a787e773227358764dd2c | 2016-08-22 09:51:49 | 7d3ccf39acc79bb6d33a787e773227358764dd2c | diff --git a/CHANGES.rst b/CHANGES.rst
index f034d37..89cf5f1 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,7 +1,8 @@
0.5.1 (unreleased)
------------------
-- Nothing changed yet.
+- Add --pull-cache to pull images from repository before building.
+ (`Issue #49 <https://github.com/6si/shipwright/issues/49>`_).
0.5.0 (2016-08-19)
diff --git a/shipwright/base.py b/shipwright/base.py
index 213d597..421f1af 100644
--- a/shipwright/base.py
+++ b/shipwright/base.py
@@ -4,10 +4,11 @@ from . import build, dependencies, docker, push
class Shipwright(object):
- def __init__(self, source_control, docker_client, tags):
+ def __init__(self, source_control, docker_client, tags, pull_cache=False):
self.source_control = source_control
self.docker_client = docker_client
self.tags = tags
+ self._pull_cache = pull_cache
def targets(self):
return self.source_control.targets()
@@ -18,7 +19,10 @@ class Shipwright(object):
return self._build(this_ref_str, targets)
def _build(self, this_ref_str, targets):
- for evt in build.do_build(self.docker_client, this_ref_str, targets):
+ client = self.docker_client
+ pull_cache = self._pull_cache
+ ref = this_ref_str
+ for evt in build.do_build(client, ref, targets, pull_cache):
yield evt
# now that we're built and tagged all the images.
diff --git a/shipwright/build.py b/shipwright/build.py
index 707d4f9..4ee1558 100644
--- a/shipwright/build.py
+++ b/shipwright/build.py
@@ -13,7 +13,7 @@ def _merge(d1, d2):
return d
-def do_build(client, build_ref, targets):
+def do_build(client, build_ref, targets, pull_cache):
"""
Generic function for building multiple images while
notifying a callback function with output produced.
@@ -39,11 +39,11 @@ def do_build(client, build_ref, targets):
parent_ref = None
if target.parent:
parent_ref = build_index.get(target.parent)
- for evt in build(client, parent_ref, target):
+ for evt in build(client, parent_ref, target, pull_cache):
yield evt
-def build(client, parent_ref, image):
+def build(client, parent_ref, image, pull_cache):
"""
builds the given image tagged with <build_ref> and ensures that
it depends on it's parent if it's part of this build group (shares
@@ -62,7 +62,25 @@ def build(client, parent_ref, image):
built_tags = docker.last_built_from_docker(client, image.name)
if image.ref in built_tags:
- return []
+ return
+
+ if pull_cache:
+ pull_evts = client.pull(
+ repository=image.name,
+ tag=image.ref,
+ stream=True,
+ )
+
+ failed = False
+ for evt in pull_evts:
+ event = process_event_(evt)
+ if 'error' in event:
+ failed = True
+ else:
+ yield event
+
+ if not failed:
+ return
build_evts = client.build(
fileobj=mkcontext(parent_ref, image.path),
@@ -73,4 +91,5 @@ def build(client, parent_ref, image):
dockerfile=os.path.basename(image.path),
)
- return (process_event_(evt) for evt in build_evts)
+ for evt in build_evts:
+ yield process_event_(evt)
diff --git a/shipwright/cli.py b/shipwright/cli.py
index 24f6f78..82eaf50 100644
--- a/shipwright/cli.py
+++ b/shipwright/cli.py
@@ -109,6 +109,11 @@ def argparser():
help='Build working tree, including uncommited and untracked changes',
action='store_true',
)
+ common.add_argument(
+ '--pull-cache',
+ help='When building try to pull previously built images',
+ action='store_true',
+ )
a_arg(
common, '-d', '--dependants',
help='Build DEPENDANTS and all its dependants',
@@ -157,7 +162,6 @@ def old_style_arg_dict(namespace):
'--exclude': _flatten(ns.exclude),
'--help': False,
'--no-build': getattr(ns, 'no_build', False),
- '--dirty': getattr(ns, 'dirty', False),
'--upto': _flatten(ns.upto),
'--x-assert-hostname': ns.x_assert_hostname,
'-H': ns.docker_host,
@@ -237,8 +241,10 @@ def run(path, arguments, client_cfg, environ, new_style_args=None):
if new_style_args is None:
dirty = False
+ pull_cache = False
else:
dirty = new_style_args.dirty
+ pull_cache = new_style_args.pull_cache
namespace = config['namespace']
name_map = config.get('names', {})
@@ -249,7 +255,7 @@ def run(path, arguments, client_cfg, environ, new_style_args=None):
'to commit these changes, re-run with the --dirty flag.'
)
- sw = Shipwright(scm, client, arguments['tags'])
+ sw = Shipwright(scm, client, arguments['tags'], pull_cache)
command = getattr(sw, command_name)
show_progress = sys.stdout.isatty()
| docker pull all images for current branch and master before building
Because our buildserver forgets the docker cache between builds we pull the previous build for all the images.
it would be great if we could get shipwright to do it.
Otherwise a command like "shipright images" which lists all the images that shipwright *would* build would let us write our own command to do this. | 6si/shipwright | diff --git a/tests/integration/test_docker_builds.py b/tests/integration/test_docker_builds.py
index 00aa6be..3a22616 100644
--- a/tests/integration/test_docker_builds.py
+++ b/tests/integration/test_docker_builds.py
@@ -12,7 +12,7 @@ from .utils import commit_untracked, create_repo, get_defaults
def default_args():
- return argparse.Namespace(dirty=False)
+ return argparse.Namespace(dirty=False, pull_cache=False)
def test_sample(tmpdir, docker_client):
@@ -734,3 +734,85 @@ def test_build_with_repo_digest(tmpdir, docker_client, registry):
)
for image in old_images:
cli.remove_image(image, force=True)
+
+
+def test_docker_buld_pull_cache(tmpdir, docker_client, registry):
+ path = str(tmpdir.join('shipwright-localhost-sample'))
+ source = pkg_resources.resource_filename(
+ __name__,
+ 'examples/shipwright-localhost-sample',
+ )
+ repo = create_repo(path, source)
+ tag = repo.head.ref.commit.hexsha[:12]
+
+ client_cfg = docker_utils.kwargs_from_env()
+ cli = docker_client
+
+ defaults = get_defaults()
+ defaults['push'] = True
+ try:
+ shipw_cli.run(
+ path=path,
+ client_cfg=client_cfg,
+ arguments=defaults,
+ environ={},
+ )
+
+ # Remove the build images:
+ old_images = (
+ cli.images(name='localhost:5000/service1', quiet=True) +
+ cli.images(name='localhost:5000/shared', quiet=True) +
+ cli.images(name='localhost:5000/base', quiet=True)
+ )
+ for image in old_images:
+ cli.remove_image(image, force=True)
+
+ images_after_delete = (
+ cli.images(name='localhost:5000/service1') +
+ cli.images(name='localhost:5000/shared') +
+ cli.images(name='localhost:5000/base')
+ )
+ assert images_after_delete == []
+
+ args = default_args()
+ args.pull_cache = True
+
+ shipw_cli.run(
+ path=path,
+ client_cfg=client_cfg,
+ arguments=defaults,
+ environ={},
+ new_style_args=args,
+ )
+
+ service1, shared, base = (
+ cli.images(name='localhost:5000/service1') +
+ cli.images(name='localhost:5000/shared') +
+ cli.images(name='localhost:5000/base')
+ )
+
+ assert set(service1['RepoTags']) == {
+ 'localhost:5000/service1:master',
+ 'localhost:5000/service1:latest',
+ 'localhost:5000/service1:' + tag,
+ }
+
+ assert set(shared['RepoTags']) == {
+ 'localhost:5000/shared:master',
+ 'localhost:5000/shared:latest',
+ 'localhost:5000/shared:' + tag,
+ }
+
+ assert set(base['RepoTags']) == {
+ 'localhost:5000/base:master',
+ 'localhost:5000/base:latest',
+ 'localhost:5000/base:' + tag,
+ }
+ finally:
+ old_images = (
+ cli.images(name='localhost:5000/service1', quiet=True) +
+ cli.images(name='localhost:5000/shared', quiet=True) +
+ cli.images(name='localhost:5000/base', quiet=True)
+ )
+ for image in old_images:
+ cli.remove_image(image, force=True)
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 260eb92..064f931 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -16,7 +16,6 @@ def get_defaults():
'--exclude': [],
'--help': False,
'--no-build': False,
- '--dirty': False,
'--upto': [],
'--x-assert-hostname': False,
'-H': None,
@@ -90,7 +89,6 @@ def test_args():
'--exclude': [],
'--help': False,
'--no-build': False,
- '--dirty': False,
'--upto': [],
'--x-assert-hostname': True,
'-H': None,
@@ -105,7 +103,7 @@ def test_args_2():
args = [
'--account=x', '--x-assert-hostname', 'build',
'-d', 'foo', 'bar',
- '-t', 'foo', '--dirty',
+ '-t', 'foo', '--dirty', '--pull-cache',
]
parser = cli.argparser()
arguments = cli.old_style_arg_dict(parser.parse_args(args))
@@ -118,7 +116,6 @@ def test_args_2():
'--exclude': [],
'--help': False,
'--no-build': False,
- '--dirty': True,
'--upto': [],
'--x-assert-hostname': True,
'-H': None,
@@ -142,7 +139,6 @@ def test_args_base():
'--exclude': [],
'--help': False,
'--no-build': False,
- '--dirty': False,
'--upto': [],
'--x-assert-hostname': False,
'-H': None,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docker-py==1.10.6
docker-pycreds==0.4.0
exceptiongroup==1.2.2
gitdb2==2.0.6
GitPython==2.1.15
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
-e git+https://github.com/6si/shipwright.git@7d3ccf39acc79bb6d33a787e773227358764dd2c#egg=shipwright
six==1.17.0
smmap==5.0.2
smmap2==3.0.1
tomli==2.2.1
urllib3==2.3.0
websocket-client==1.8.0
| name: shipwright
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docker-py==1.10.6
- docker-pycreds==0.4.0
- exceptiongroup==1.2.2
- gitdb2==2.0.6
- gitpython==2.1.15
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- smmap==5.0.2
- smmap2==3.0.1
- tomli==2.2.1
- urllib3==2.3.0
- websocket-client==1.8.0
prefix: /opt/conda/envs/shipwright
| [
"tests/test_cli.py::test_args",
"tests/test_cli.py::test_args_2",
"tests/test_cli.py::test_args_base"
]
| [
"tests/integration/test_docker_builds.py::test_sample",
"tests/integration/test_docker_builds.py::test_multi_dockerfile",
"tests/integration/test_docker_builds.py::test_clean_tree_avoids_rebuild",
"tests/integration/test_docker_builds.py::test_clean_tree_avoids_rebuild_new_image_definition",
"tests/integration/test_docker_builds.py::test_dump_file",
"tests/integration/test_docker_builds.py::test_exclude",
"tests/integration/test_docker_builds.py::test_exact",
"tests/integration/test_docker_builds.py::test_dirty_flag",
"tests/integration/test_docker_builds.py::test_exit_on_failure_but_build_completes",
"tests/integration/test_docker_builds.py::test_short_name_target",
"tests/integration/test_docker_builds.py::test_child_inherits_parents_build_tag"
]
| [
"tests/integration/test_docker_builds.py::test_dirty_fails_without_flag",
"tests/test_cli.py::test_without_json_manifest",
"tests/test_cli.py::test_push_also_builds",
"tests/test_cli.py::test_assert_hostname"
]
| []
| Apache License 2.0 | 713 | [
"shipwright/base.py",
"shipwright/cli.py",
"shipwright/build.py",
"CHANGES.rst"
]
| [
"shipwright/base.py",
"shipwright/cli.py",
"shipwright/build.py",
"CHANGES.rst"
]
|
|
napjon__krisk-21 | 22d261e50cfac645255a4b6569633633a0b6d52e | 2016-08-23 10:46:13 | 22d261e50cfac645255a4b6569633633a0b6d52e | diff --git a/README.md b/README.md
index 0ac2848..2392b25 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,5 @@
[](https://circleci.com/gh/napjon/krisk)
[](https://pypi.python.org/pypi/krisk/)
-[](https://codecov.io/gh/napjon/krisk)

# Overview
diff --git a/krisk/make_chart.py b/krisk/make_chart.py
index 3e73504..9d4656b 100644
--- a/krisk/make_chart.py
+++ b/krisk/make_chart.py
@@ -19,34 +19,27 @@ def round_list(arr):
def make_chart(df,**kwargs):
- def insert_series_on(f):
-
+ def insert_series_on(f,df=df):
+ data = f(df)
if category:
#Iterate and append Data for every category
- for cat, subset in df.groupby(category):
+ for cat in data.columns:
cat = str(cat)
- insert_data_to_series(f,subset,cat)
+ insert_data_to_series(data[cat], cat)
c._option['legend']['data'].append(cat)
else:
- insert_data_to_series(f,df)
+ insert_data_to_series(data)
- def insert_data_to_series(f,df,cat=None):
- data = f(df)
+ def insert_data_to_series(data, cat=None):
series = deepcopy(elem_series)
series['data'] = round_list(data)
series['type'] = kwargs['type']
series['name'] = cat if cat else x
c._option['series'].append(series)
-
c = Chart(**kwargs)
-
- elem_series = {
- 'name': '',
- 'type': kwargs['type'],
- 'data': []}
-
+ elem_series = {'name': '', 'type': kwargs['type'], 'data': []}
x = kwargs['x']
y = kwargs.get('y')
category = kwargs['category']
@@ -57,8 +50,7 @@ def make_chart(df,**kwargs):
"""Provide stacked,annotate, area for bar line hist"""
series = c._option['series']
- d_annotate = {'normal':{'show':True,
- 'position':'top'}}
+ d_annotate = {'normal':{'show': True, 'position': 'top'}}
if category and kwargs['stacked'] == True:
for s in series:
@@ -81,75 +73,68 @@ def make_chart(df,**kwargs):
# TODO: make annotate receive all kinds supported in echarts.
if kwargs['type'] in ['bar','line']:
-
-
-
+
+ @insert_series_on
def get_bar_line_data(df):
-# c._option['yAxis']['scale'] = True #TODO: Still need to be evaluated
-
- if y is None:
- data = df[x].value_counts()
+ if category:
+ if y is None:
+ data = pd.crosstab(df[x], df[category])
+ else:
+ data = df.pivot_table(index=x,values=y,columns=category,
+ aggfunc=kwargs['how'],fill_value=0)
else:
- data = (df[y]
- if kwargs['how'] is None else
- df.groupby(x)[y].aggregate(kwargs['how']))
-
- c._option['xAxis']['data'] = data.index.values.tolist()
+ if y is None:
+ data = df[x].value_counts()
+ else:
+ raise AssertionError('Use y in category instead')
- # TODO: Still need to be evaluated
-# data = (df[x].value_counts()
-# if y is None else
-# df.groupby(x)[y].aggregate(kwargs['how']))
+ c._option['xAxis']['data'] = data.index.values.tolist()
return data
-
- insert_series_on(get_bar_line_data)
bar_line_hist_condition()
-
+
+
elif kwargs['type'] == 'hist':
kwargs['type'] = 'bar'
-
+ @insert_series_on
def get_hist_data(df):
- y_val,x_val = np.histogram(df[x],
- bins=kwargs['bins'],
+
+ y_val,x_val = np.histogram(df[x],bins=kwargs['bins'],
normed=kwargs['normed'])
- data = pd.Series(y_val)
+ if category:
+ data = pd.DataFrame()
+ for cat, sub in df.groupby(category):
+ data[cat] = (pd.cut(sub[x], x_val)
+ .value_counts(sort=False, normalize=kwargs['normed']))
+ else:
+ data = pd.Series(y_val)
+
bins = x_val.astype(int).tolist()
c._option['xAxis']['data'] = bins
return data
- insert_series_on(get_hist_data)
bar_line_hist_condition()
-
+
elif kwargs['type'] == 'scatter':
- c._option['xAxis'] = {'type': 'value',
- 'name': x,
- 'max': int(df[x].max())}
- c._option['yAxis'] = {'type': 'value',
- 'name': y,
- 'max': int(df[y].max())}
+ c._option['xAxis'] = {'type': 'value', 'name': x, 'max': int(df[x].max())}
+ c._option['yAxis'] = {'type': 'value', 'name': y, 'max': int(df[y].max())}
c._option['visualMap'] = []
-
-
cols = [x,y]
size = kwargs['size']
if size is not None:
- vmap_template_size = {
- 'show': False,
+ vmap_template_size = {'show': False,
'dimension': 2,
'min': 0,
'max': 250,
'precision': 0.1,
- 'inRange': {
- 'symbolSize': [10, 70]
- }
- }
+ 'inRange': {'symbolSize': [10, 70]}
+ }
vmap_size = deepcopy(vmap_template_size)
vmap_size['min'] = df[size].min()
vmap_size['max'] = df[size].max()
@@ -168,16 +153,20 @@ def make_chart(df,**kwargs):
# c._option['visualMap'].append(vmap_saturate)
# cols.append(saturate)
- columns = cols+df.columns.difference(cols).tolist()
+ columns = cols + df.columns.difference(cols).tolist()
c._kwargs_chart_['columns'] = columns
- def get_scatter_data(df):
+ def insert_scatter_data(df):
data = df[columns]
-# print(columns)
- return data
-
- insert_series_on(get_scatter_data)
-
-
-
- return c
+ if category:
+ #Iterate and append Data for every category
+ for cat, subset in data.groupby(category):
+ cat = str(cat)
+ insert_data_to_series(subset,cat)
+ c._option['legend']['data'].append(cat)
+ else:
+ insert_data_to_series(data)
+
+ insert_scatter_data(df)
+
+ return c
\ No newline at end of file
diff --git a/krisk/plot.py b/krisk/plot.py
index fb5c50c..6de3d18 100644
--- a/krisk/plot.py
+++ b/krisk/plot.py
@@ -1,8 +1,7 @@
from krisk.make_chart import make_chart
-def bar(df,x,y=None,category=None,how='count',stacked=False,
- annotate=None,**kwargs):
+def bar(df,x,y=None,category=None,how='count',stacked=False,annotate=None):
"""
Parameters
----------
@@ -29,7 +28,7 @@ def bar(df,x,y=None,category=None,how='count',stacked=False,
"""
# TODO: add optional argument trendline
-
+ kwargs = {}
kwargs['x'] = x
kwargs['y'] = y
kwargs['category'] = category
@@ -40,8 +39,7 @@ def bar(df,x,y=None,category=None,how='count',stacked=False,
return make_chart(df,**kwargs)
-def line(df,x,y=None,category=None,how=None,stacked=False,area=False,
- annotate=None,**kwargs):
+def line(df,x,y=None,category=None,how=None,stacked=False,area=False,annotate=None):
"""
Parameters
----------
@@ -66,7 +64,7 @@ def line(df,x,y=None,category=None,how=None,stacked=False,area=False,
-------
Chart Object
"""
-
+ kwargs = {}
kwargs['x'] = x
kwargs['y'] = y
kwargs['category'] = category
@@ -78,8 +76,7 @@ def line(df,x,y=None,category=None,how=None,stacked=False,area=False,
return make_chart(df,**kwargs)
-def hist(df,x,category=None,bins=10,normed=False,stacked=False,
- annotate=None,**kwargs):
+def hist(df,x,category=None,bins=10,normed=False,stacked=False,annotate=None):
"""
Parameters
----------
@@ -103,7 +100,7 @@ def hist(df,x,category=None,bins=10,normed=False,stacked=False,
-------
Chart Object
"""
-
+ kwargs = {}
kwargs['x'] = x
kwargs['category'] = category
kwargs['bins'] = bins
@@ -114,7 +111,7 @@ def hist(df,x,category=None,bins=10,normed=False,stacked=False,
return make_chart(df,**kwargs)
-def scatter(df,x,y,size=None,category=None,size_px=(10,70),**kwargs):
+def scatter(df,x,y,size=None,category=None,size_px=(10,70)):
"""
Parameters
----------
@@ -134,6 +131,7 @@ def scatter(df,x,y,size=None,category=None,size_px=(10,70),**kwargs):
Chart Object
"""
+ kwargs = {}
kwargs['x'] = x
kwargs['y'] = y
kwargs['category'] = category
| if category, bar plot not shown all index values
See http://nbviewer.jupyter.org/github/napjon/krisk/blob/master/notebooks/themes-colors.ipynb
Not all year index values shown | napjon/krisk | diff --git a/krisk/tests/data/bar.json b/krisk/tests/data/bar.json
index 32e0928..4d92a24 100644
--- a/krisk/tests/data/bar.json
+++ b/krisk/tests/data/bar.json
@@ -1,1 +1,139 @@
-{"xAxis": {"data": [69.12, 70.33, 70.93, 71.1, 71.93, 73.49, 74.74, 76.32, 77.56, 78.83, 80.37, 81.235]}, "series": [{"type": "bar", "data": [9279525.0, 10270856.0, 11000948.0, 12760499.0, 14760787.0, 17152804.0, 20033753.0, 23254956.0, 26298373.0, 29072015.0, 31287142.0, 33333216.0], "stack": "continent", "name": "Africa"}, {"type": "bar", "data": [17876956.0, 19610538.0, 21283783.0, 22934225.0, 24779799.0, 26983828.0, 29341374.0, 31620918.0, 33958947.0, 36203463.0, 38331121.0, 40301927.0], "stack": "continent", "name": "Americas"}, {"type": "bar", "data": [8425333.0, 9240934.0, 10267083.0, 11537966.0, 13079460.0, 14880372.0, 12881816.0, 13867957.0, 16317921.0, 22227415.0, 25268405.0, 31889923.0], "stack": "continent", "name": "Asia"}, {"type": "bar", "data": [1282697.0, 1476505.0, 1728137.0, 1984060.0, 2263554.0, 2509048.0, 2780097.0, 3326498.0, 3075321.0, 3428038.0, 3508512.0, 3600523.0], "stack": "continent", "name": "Europe"}, {"type": "bar", "data": [8691212.0, 9712569.0, 10794968.0, 11872264.0, 13177000.0, 14074100.0, 15184200.0, 16257249.0, 17481977.0, 18565243.0, 19546792.0, 20434176.0], "stack": "continent", "name": "Oceania"}], "title": {"text": ""}, "tooltip": {"axisPointer": {"type": ""}}, "yAxis": {}, "legend": {"data": ["Africa", "Americas", "Asia", "Europe", "Oceania"]}}
\ No newline at end of file
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 9279525,
+ 10270856,
+ 11000948,
+ 12760499,
+ 14760787,
+ 17152804,
+ 20033753,
+ 23254956,
+ 26298373,
+ 29072015,
+ 31287142,
+ 33333216
+ ]
+ },
+ {
+ "name": "Americas",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 17876956,
+ 19610538,
+ 21283783,
+ 22934225,
+ 24779799,
+ 26983828,
+ 29341374,
+ 31620918,
+ 33958947,
+ 36203463,
+ 38331121,
+ 40301927
+ ]
+ },
+ {
+ "name": "Asia",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 8425333,
+ 9240934,
+ 10267083,
+ 11537966,
+ 13079460,
+ 14880372,
+ 12881816,
+ 13867957,
+ 16317921,
+ 22227415,
+ 25268405,
+ 31889923
+ ]
+ },
+ {
+ "name": "Europe",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 1282697,
+ 1476505,
+ 1728137,
+ 1984060,
+ 2263554,
+ 2509048,
+ 2780097,
+ 3075321,
+ 3326498,
+ 3428038,
+ 3508512,
+ 3600523
+ ]
+ },
+ {
+ "name": "Oceania",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 8691212,
+ 9712569,
+ 10794968,
+ 11872264,
+ 13177000,
+ 14074100,
+ 15184200,
+ 16257249,
+ 17481977,
+ 18565243,
+ 19546792,
+ 20434176
+ ]
+ }
+ ],
+ "yAxis": {},
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/bar_ann_all.json b/krisk/tests/data/bar_ann_all.json
new file mode 100644
index 0000000..83dcc8d
--- /dev/null
+++ b/krisk/tests/data/bar_ann_all.json
@@ -0,0 +1,163 @@
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "inside"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 9279525,
+ 10270856,
+ 11000948,
+ 12760499,
+ 14760787,
+ 17152804,
+ 20033753,
+ 23254956,
+ 26298373,
+ 29072015,
+ 31287142,
+ 33333216
+ ]
+ },
+ {
+ "name": "Americas",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "inside"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 17876956,
+ 19610538,
+ 21283783,
+ 22934225,
+ 24779799,
+ 26983828,
+ 29341374,
+ 31620918,
+ 33958947,
+ 36203463,
+ 38331121,
+ 40301927
+ ]
+ },
+ {
+ "name": "Asia",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "inside"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 8425333,
+ 9240934,
+ 10267083,
+ 11537966,
+ 13079460,
+ 14880372,
+ 12881816,
+ 13867957,
+ 16317921,
+ 22227415,
+ 25268405,
+ 31889923
+ ]
+ },
+ {
+ "name": "Europe",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "inside"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 1282697,
+ 1476505,
+ 1728137,
+ 1984060,
+ 2263554,
+ 2509048,
+ 2780097,
+ 3075321,
+ 3326498,
+ 3428038,
+ 3508512,
+ 3600523
+ ]
+ },
+ {
+ "name": "Oceania",
+ "type": "bar",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "inside"
+ }
+ },
+ "stack": "continent",
+ "data": [
+ 8691212,
+ 9712569,
+ 10794968,
+ 11872264,
+ 13177000,
+ 14074100,
+ 15184200,
+ 16257249,
+ 17481977,
+ 18565243,
+ 19546792,
+ 20434176
+ ]
+ }
+ ],
+ "yAxis": {},
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/hist.json b/krisk/tests/data/hist.json
index 97fe107..5259b2d 100644
--- a/krisk/tests/data/hist.json
+++ b/krisk/tests/data/hist.json
@@ -1,1 +1,182 @@
-{"xAxis": {"data": [69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81]}, "series": [{"type": "bar", "data": [0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.285, 0.0, 0.0, 0.0, 0.285], "stack": "continent", "name": "Africa"}, {"type": "bar", "data": [0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.649], "stack": "continent", "name": "Americas"}, {"type": "bar", "data": [0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555, 0.555, 0.0, 0.555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555], "stack": "continent", "name": "Asia"}, {"type": "bar", "data": [0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.393, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.393, 0.0, 0.0, 0.393], "stack": "continent", "name": "Europe"}, {"type": "bar", "data": [0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.688], "stack": "continent", "name": "Oceania"}], "title": {"text": ""}, "tooltip": {"axisPointer": {"type": ""}}, "yAxis": {}, "legend": {"data": ["Africa", "Americas", "Asia", "Europe", "Oceania"]}}
\ No newline at end of file
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "xAxis": {
+ "data": [
+ 28,
+ 31,
+ 34,
+ 36,
+ 39,
+ 41,
+ 44,
+ 47,
+ 49,
+ 52,
+ 55,
+ 57,
+ 60,
+ 62,
+ 65,
+ 68,
+ 70,
+ 73,
+ 75,
+ 78,
+ 81
+ ]
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.083,
+ 0.083,
+ 0.083,
+ 0.083,
+ 0.083,
+ 0.0,
+ 0.083,
+ 0.083,
+ 0.0,
+ 0.167,
+ 0.083,
+ 0.167,
+ 0.0,
+ 0.0,
+ 0.0
+ ]
+ },
+ {
+ "name": "Americas",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.083,
+ 0.167,
+ 0.167,
+ 0.167,
+ 0.25,
+ 0.167,
+ 0.0,
+ 0.0
+ ]
+ },
+ {
+ "name": "Asia",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 0.091,
+ 0.182,
+ 0.091,
+ 0.091,
+ 0.364,
+ 0.182,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ]
+ },
+ {
+ "name": "Europe",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.083,
+ 0.083,
+ 0.0,
+ 0.083,
+ 0.167,
+ 0.167,
+ 0.25,
+ 0.083,
+ 0.083,
+ 0.0
+ ]
+ },
+ {
+ "name": "Oceania",
+ "type": "bar",
+ "stack": "continent",
+ "data": [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.167,
+ 0.25,
+ 0.167,
+ 0.167,
+ 0.25
+ ]
+ }
+ ],
+ "yAxis": {},
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/line.json b/krisk/tests/data/line.json
index e04d100..13a96b8 100644
--- a/krisk/tests/data/line.json
+++ b/krisk/tests/data/line.json
@@ -1,1 +1,178 @@
-{"xAxis": {"data": [1952, 1957, 1962, 1967, 1972, 1977, 1982, 1987, 1992, 1997, 2002, 2007]}, "series": [{"areaStyle": {"normal": {}}, "type": "line", "data": [43.077, 45.685, 48.303, 51.407, 54.518, 58.014, 61.368, 65.799, 67.744, 69.152, 70.994, 72.301], "stack": "continent", "name": "Africa"}, {"areaStyle": {"normal": {}}, "type": "line", "data": [62.485, 64.399, 65.142, 65.634, 67.065, 68.481, 69.942, 70.774, 71.868, 73.275, 74.34, 75.32], "stack": "continent", "name": "Americas"}, {"areaStyle": {"normal": {}}, "type": "line", "data": [28.801, 30.332, 31.997, 34.02, 36.088, 38.438, 39.854, 40.822, 41.674, 41.763, 42.129, 43.828], "stack": "continent", "name": "Asia"}, {"areaStyle": {"normal": {}}, "type": "line", "data": [55.23, 59.28, 64.82, 66.22, 67.69, 68.93, 70.42, 72.0, 71.581, 72.95, 75.651, 76.423], "stack": "continent", "name": "Europe"}, {"areaStyle": {"normal": {}}, "type": "line", "data": [69.12, 70.33, 70.93, 71.1, 71.93, 73.49, 74.74, 76.32, 77.56, 78.83, 80.37, 81.235], "stack": "continent", "name": "Oceania"}], "title": {"text": ""}, "tooltip": {"axisPointer": {"type": ""}}, "yAxis": {}, "legend": {"data": ["Africa", "Americas", "Asia", "Europe", "Oceania"]}}
\ No newline at end of file
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "type": "line",
+ "data": [
+ 43.077,
+ 45.685,
+ 48.303,
+ 51.407,
+ 54.518,
+ 58.014,
+ 61.368,
+ 65.799,
+ 67.744,
+ 69.152,
+ 70.994,
+ 72.301
+ ],
+ "areaStyle": {
+ "normal": {}
+ },
+ "stack": "continent"
+ },
+ {
+ "name": "Americas",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "type": "line",
+ "data": [
+ 62.485,
+ 64.399,
+ 65.142,
+ 65.634,
+ 67.065,
+ 68.481,
+ 69.942,
+ 70.774,
+ 71.868,
+ 73.275,
+ 74.34,
+ 75.32
+ ],
+ "areaStyle": {
+ "normal": {}
+ },
+ "stack": "continent"
+ },
+ {
+ "name": "Asia",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "type": "line",
+ "data": [
+ 28.801,
+ 30.332,
+ 31.997,
+ 34.02,
+ 36.088,
+ 38.438,
+ 39.854,
+ 40.822,
+ 41.674,
+ 41.763,
+ 42.129,
+ 43.828
+ ],
+ "areaStyle": {
+ "normal": {}
+ },
+ "stack": "continent"
+ },
+ {
+ "name": "Europe",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "type": "line",
+ "data": [
+ 55.23,
+ 59.28,
+ 64.82,
+ 66.22,
+ 67.69,
+ 68.93,
+ 70.42,
+ 72.0,
+ 71.581,
+ 72.95,
+ 75.651,
+ 76.423
+ ],
+ "areaStyle": {
+ "normal": {}
+ },
+ "stack": "continent"
+ },
+ {
+ "name": "Oceania",
+ "label": {
+ "normal": {
+ "show": true,
+ "position": "top"
+ }
+ },
+ "type": "line",
+ "data": [
+ 69.12,
+ 70.33,
+ 70.93,
+ 71.1,
+ 71.93,
+ 73.49,
+ 74.74,
+ 76.32,
+ 77.56,
+ 78.83,
+ 80.37,
+ 81.235
+ ],
+ "areaStyle": {
+ "normal": {}
+ },
+ "stack": "continent"
+ }
+ ],
+ "yAxis": {},
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/scatter.json b/krisk/tests/data/scatter.json
index ede290a..aa4d4ea 100644
--- a/krisk/tests/data/scatter.json
+++ b/krisk/tests/data/scatter.json
@@ -1,1 +1,116 @@
-{"xAxis": {"type": "value", "max": 69, "name": "lifeExp"}, "series": [{"type": "scatter", "data": [[43.077, 2449.008, 9279525.0, "Africa", "Algeria", 1952]], "name": "Africa"}, {"type": "scatter", "data": [[62.485, 5911.315, 17876956.0, "Americas", "Argentina", 1952]], "name": "Americas"}, {"type": "scatter", "data": [[28.801, 779.445, 8425333.0, "Asia", "Afghanistan", 1952]], "name": "Asia"}, {"type": "scatter", "data": [[55.23, 1601.056, 1282697.0, "Europe", "Albania", 1952]], "name": "Europe"}, {"type": "scatter", "data": [[69.12, 10039.596, 8691212.0, "Oceania", "Australia", 1952]], "name": "Oceania"}], "title": {"text": ""}, "tooltip": {"axisPointer": {"type": ""}}, "yAxis": {"type": "value", "max": 10039, "name": "gdpPercap"}, "visualMap": [{"min": 1282697.0, "dimension": 2, "precision": 0.1, "max": 17876956.0, "show": false, "inRange": {"symbolSize": [10, 70]}}], "legend": {"data": ["Africa", "Americas", "Asia", "Europe", "Oceania"]}}
\ No newline at end of file
+{
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "visualMap": [
+ {
+ "min": 1282697.0,
+ "show": false,
+ "precision": 0.1,
+ "max": 17876956.0,
+ "dimension": 2,
+ "inRange": {
+ "symbolSize": [
+ 10,
+ 70
+ ]
+ }
+ }
+ ],
+ "xAxis": {
+ "name": "lifeExp",
+ "max": 69,
+ "type": "value"
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "type": "scatter",
+ "data": [
+ [
+ 43.077,
+ 2449.008,
+ 9279525.0,
+ "Africa",
+ "Algeria",
+ 1952
+ ]
+ ]
+ },
+ {
+ "name": "Americas",
+ "type": "scatter",
+ "data": [
+ [
+ 62.485,
+ 5911.315,
+ 17876956.0,
+ "Americas",
+ "Argentina",
+ 1952
+ ]
+ ]
+ },
+ {
+ "name": "Asia",
+ "type": "scatter",
+ "data": [
+ [
+ 28.801,
+ 779.445,
+ 8425333.0,
+ "Asia",
+ "Afghanistan",
+ 1952
+ ]
+ ]
+ },
+ {
+ "name": "Europe",
+ "type": "scatter",
+ "data": [
+ [
+ 55.23,
+ 1601.056,
+ 1282697.0,
+ "Europe",
+ "Albania",
+ 1952
+ ]
+ ]
+ },
+ {
+ "name": "Oceania",
+ "type": "scatter",
+ "data": [
+ [
+ 69.12,
+ 10039.596,
+ 8691212.0,
+ "Oceania",
+ "Australia",
+ 1952
+ ]
+ ]
+ }
+ ],
+ "yAxis": {
+ "name": "gdpPercap",
+ "max": 10039,
+ "type": "value"
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/data/scatter_single.json b/krisk/tests/data/scatter_single.json
new file mode 100644
index 0000000..641fa20
--- /dev/null
+++ b/krisk/tests/data/scatter_single.json
@@ -0,0 +1,86 @@
+{
+ "legend": {
+ "data": []
+ },
+ "visualMap": [
+ {
+ "min": 1282697.0,
+ "show": false,
+ "precision": 0.1,
+ "max": 17876956.0,
+ "dimension": 2,
+ "inRange": {
+ "symbolSize": [
+ 10,
+ 70
+ ]
+ }
+ }
+ ],
+ "xAxis": {
+ "name": "lifeExp",
+ "max": 69,
+ "type": "value"
+ },
+ "series": [
+ {
+ "name": "lifeExp",
+ "type": "scatter",
+ "data": [
+ [
+ 43.077,
+ 2449.008,
+ 9279525.0,
+ "Africa",
+ "Algeria",
+ 1952
+ ],
+ [
+ 62.485,
+ 5911.315,
+ 17876956.0,
+ "Americas",
+ "Argentina",
+ 1952
+ ],
+ [
+ 28.801,
+ 779.445,
+ 8425333.0,
+ "Asia",
+ "Afghanistan",
+ 1952
+ ],
+ [
+ 55.23,
+ 1601.056,
+ 1282697.0,
+ "Europe",
+ "Albania",
+ 1952
+ ],
+ [
+ 69.12,
+ 10039.596,
+ 8691212.0,
+ "Oceania",
+ "Australia",
+ 1952
+ ]
+ ]
+ }
+ ],
+ "yAxis": {
+ "name": "gdpPercap",
+ "max": 10039,
+ "type": "value"
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ }
+}
\ No newline at end of file
diff --git a/krisk/tests/test_chart.py b/krisk/tests/test_chart.py
index b985f7c..73709f8 100644
--- a/krisk/tests/test_chart.py
+++ b/krisk/tests/test_chart.py
@@ -1,15 +1,18 @@
import krisk.plot as kk
+def test_replot_and_resync(bar_simple,df_simple):
+
+ c = bar_simple
+ stripped = lambda x: x.data.replace('\n', '').replace(' ','')
+ assert stripped(c.replot(c)) == stripped(c.resync_data(df_simple))
+
+
def test_flip(bar_simple):
assert bar_simple.get_option()['xAxis'] == bar_simple.flip_axes().get_option()['yAxis']
+
-def test_color(bar_simple):
-
- colored = bar_simple.set_color(background='green',palette=['purple']).get_option()
- assert colored['backgroundColor'] == 'green'
- assert colored['color'] == ['purple']
def test_read_df(gapminder):
@@ -33,4 +36,12 @@ def test_on_event(df_simple):
assert on_event._events == {'click': 'handler_foo'}
code_handler = on_event._repr_javascript_().split('\n')[-13]
input_code = ' var code_input = "import json; handler_foo(json.loads(\'" + json_strings + "\'))";'
- assert code_handler == input_code
\ No newline at end of file
+ assert code_handler == input_code
+
+
+def test_color(bar_simple):
+
+ # Set color modify the bar_simple itself! Potentially bug
+ colored = bar_simple.set_color(background='green',palette=['purple']).get_option()
+ assert colored['backgroundColor'] == 'green'
+ assert colored['color'] == ['purple']
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 77e1ebd..97e2a87 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -4,36 +4,43 @@ import pytest
import krisk.plot as kk
DATA_DIR = 'krisk/tests/data'
[email protected]
-def df():
- import pandas as pd
- return (pd.read_csv(DATA_DIR+'/gapminderDataFiveYear.txt',sep='\t')
- .groupby(['year','continent'],as_index=False).first())
-
-def test_bar(df):
+def test_bar(gapminder):
+ #Bar
true_option = json.load(open(DATA_DIR+'/bar.json','r'))
- p = kk.bar(df,'lifeExp',y='pop',category='continent',how='mean',stacked=True)
-
- assert p._option == true_option
+ p = kk.bar(gapminder,'year',y='pop',category='continent',how='mean',stacked=True, annotate=True)
+ assert p.get_option() == true_option
+
+ # Bar Annotate All
+ true_option = json.load(open(DATA_DIR+'/bar_ann_all.json','r'))
+ p = kk.bar(gapminder,'year',y='pop',category='continent',how='mean',stacked=True, annotate='all')
+ assert p.get_option() == true_option
-def test_line(df):
+def test_line(gapminder):
true_option = json.load(open(DATA_DIR+'/line.json','r'))
- p = kk.line(df,'year',y='lifeExp',category='continent',how='mean',stacked=True,area=True)
+ p = kk.line(gapminder,'year',y='lifeExp',category='continent',how='mean',
+ stacked=True,area=True,annotate='all')
- assert p._option == true_option
+ assert p.get_option() == true_option
-def test_hist(df):
+def test_hist(gapminder):
true_option = json.load(open(DATA_DIR+'/hist.json','r'))
- p = kk.hist(df,'lifeExp',category='continent',bins=100,normed=True,stacked=True)
+ p = kk.hist(gapminder,'lifeExp',category='continent',bins=20,normed=True,stacked=True)
- assert p._option == true_option
+ assert p.get_option() == true_option
-def test_scatter(df):
+def test_scatter(gapminder):
+ # Grouped Scatter
true_option = json.load(open(DATA_DIR+'/scatter.json','r'))
- p = kk.scatter(df[df.year == 1952],'lifeExp','gdpPercap',size='pop',category='continent')
+ p = kk.scatter(gapminder[gapminder.year == 1952],'lifeExp','gdpPercap',size='pop',category='continent')
+ assert p.get_option() == true_option
+
+ # Scatter
+ true_option = json.load(open(DATA_DIR+'/scatter_single.json','r'))
+ p = kk.scatter(gapminder[gapminder.year == 1952],'lifeExp','gdpPercap',size='pop')
+ assert p.get_option() == true_option
- assert p._option == true_option
\ No newline at end of file
+
\ No newline at end of file
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@22d261e50cfac645255a4b6569633633a0b6d52e#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_hist"
]
| []
| [
"krisk/tests/test_chart.py::test_replot_and_resync",
"krisk/tests/test_chart.py::test_flip",
"krisk/tests/test_chart.py::test_read_df",
"krisk/tests/test_chart.py::test_on_event",
"krisk/tests/test_chart.py::test_color",
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| BSD 3-Clause "New" or "Revised" License | 714 | [
"krisk/plot.py",
"krisk/make_chart.py",
"README.md"
]
| [
"krisk/plot.py",
"krisk/make_chart.py",
"README.md"
]
|
|
paver__paver-174 | 45609b2ad1901144e68746b903345d7de1d03404 | 2016-08-23 14:20:07 | 62fb3d8e32c3727777b3ff7d75ba1b9b75703027 | Almad: Thanks!
Before merging in, I'd like to do two things:
* Add test case for pessimistic scenario (codify behavior on conflicting command arguments)
* Add docs to describe how this should behave
rcoup: Ahem, forgot all about this one :)
Added some more tests and docs as requested.
Almad: @rcoup ...and I have missed this one because of the failed cross, ahem forever :dagger:
Would you mind rebasing now please? It should work, and should be straightforward to release now ;)
rcoup: @Almad done! :shipit: | diff --git a/docs/source/pavement.rst b/docs/source/pavement.rst
index cf09246..9dae827 100644
--- a/docs/source/pavement.rst
+++ b/docs/source/pavement.rst
@@ -203,6 +203,22 @@ For sharing, following must be fullfilled:
Otherwise, ``PavementError`` is raised.
+You can combine both ``@consume_args`` and ``@cmdopts`` together::
+
+ @task
+ @cmdopts([
+ ('username=', 'u', 'Username to use when logging in to the servers')
+ ])
+ @consume_args
+ def exec(options):
+ pass
+
+
+* ``paver exec -u root`` will result in ``options.username = 'root', options.args = []``
+* ``paver exec -u root production`` will result in ``options.username = 'root', options.args = ['production']``
+* ``paver exec production -u root`` will result in ``options.args = ['production', '-u', 'root']`` with no ``options.username`` attribute.
+* ``paver exec -u root production -u other`` will result in ``options.username = 'root', options.args = ['production', '-u', 'other']``
+
Hiding tasks
---------------
diff --git a/paver/tasks.py b/paver/tasks.py
index 7300969..09f7d4e 100644
--- a/paver/tasks.py
+++ b/paver/tasks.py
@@ -741,7 +741,8 @@ def _parse_command_line(args):
if not isinstance(task, Task):
raise BuildFailure("%s is not a Task" % taskname)
- if task.consume_args != float('inf'):
+ if task.user_options or task.consume_args != float('inf'):
+ # backwards compatibility around mixing of @cmdopts & @consume_args
args = task.parse_args(args)
if task.consume_args > 0:
args = _consume_nargs(task, args)
| Define behavior with both @cmdopts and @consume_args set
Latter now overwrites former, which is not what one generally wants.
Parse for cmdopts and remove it from args.
| paver/paver | diff --git a/paver/tests/test_tasks.py b/paver/tests/test_tasks.py
index 7b971f2..1df7c4e 100644
--- a/paver/tests/test_tasks.py
+++ b/paver/tests/test_tasks.py
@@ -404,6 +404,74 @@ def test_consume_args():
tasks._process_commands("t3 -v 1".split())
assert t3.called
+def test_consume_args_and_options():
+ @tasks.task
+ @tasks.cmdopts([
+ ("foo=", "f", "Help for foo")
+ ])
+ @tasks.consume_args
+ def t1(options):
+ assert options.foo == "1"
+ assert options.t1.foo == "1"
+ assert options.args == ['abc', 'def']
+
+ environment = _set_environment(t1=t1)
+ tasks._process_commands([
+ 't1', '--foo', '1', 'abc', 'def',
+ ])
+ assert t1.called
+
+def test_consume_args_and_options_2():
+ @tasks.task
+ @tasks.cmdopts([
+ ("foo=", "f", "Help for foo")
+ ])
+ @tasks.consume_args
+ def t1(options):
+ assert not hasattr(options, 'foo')
+ assert not hasattr(options.t1, 'foo')
+ assert options.args == ['abc', 'def', '--foo', '1']
+
+ environment = _set_environment(t1=t1)
+ tasks._process_commands([
+ 't1', 'abc', 'def', '--foo', '1',
+ ])
+ assert t1.called
+
+def test_consume_args_and_options_3():
+ @tasks.task
+ @tasks.cmdopts([
+ ("foo=", "f", "Help for foo")
+ ])
+ @tasks.consume_args
+ def t1(options):
+ assert options.foo == "1"
+ assert options.t1.foo == "1"
+ assert options.args == []
+
+ environment = _set_environment(t1=t1)
+ tasks._process_commands([
+ 't1', '--foo', '1',
+ ])
+ assert t1.called
+
+def test_consume_args_and_options_conflict():
+ @tasks.task
+ @tasks.cmdopts([
+ ("foo=", "f", "Help for foo")
+ ])
+ @tasks.consume_args
+ def t1(options):
+ assert options.foo == "1"
+ assert options.t1.foo == "1"
+ assert options.args == ['abc', 'def', '--foo', '2']
+
+ environment = _set_environment(t1=t1)
+ tasks._process_commands([
+ 't1', '--foo', '1', 'abc', 'def', '--foo', '2',
+ ])
+ assert t1.called
+
def test_consume_nargs():
# consume all args on first task
@tasks.task
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
packaging==24.2
-e git+https://github.com/paver/paver.git@45609b2ad1901144e68746b903345d7de1d03404#egg=Paver
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
| name: paver
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/paver
| [
"paver/tests/test_tasks.py::test_consume_args_and_options",
"paver/tests/test_tasks.py::test_consume_args_and_options_2",
"paver/tests/test_tasks.py::test_consume_args_and_options_3",
"paver/tests/test_tasks.py::test_consume_args_and_options_conflict"
]
| []
| [
"paver/tests/test_tasks.py::test_basic_dependencies",
"paver/tests/test_tasks.py::test_longname_resolution_in_dependencies",
"paver/tests/test_tasks.py::test_chained_dependencies",
"paver/tests/test_tasks.py::test_backwards_compatible_needs",
"paver/tests/test_tasks.py::test_tasks_dont_repeat",
"paver/tests/test_tasks.py::test_basic_command_line",
"paver/tests/test_tasks.py::test_list_tasks",
"paver/tests/test_tasks.py::test_environment_insertion",
"paver/tests/test_tasks.py::test_add_options_to_environment",
"paver/tests/test_tasks.py::test_shortname_access",
"paver/tests/test_tasks.py::test_longname_access",
"paver/tests/test_tasks.py::test_task_command_line_options",
"paver/tests/test_tasks.py::test_setting_of_options_with_equals",
"paver/tests/test_tasks.py::test_options_inherited_via_needs",
"paver/tests/test_tasks.py::test_options_inherited_via_needs_even_from_grandparents",
"paver/tests/test_tasks.py::test_options_shouldnt_overlap",
"paver/tests/test_tasks.py::test_options_shouldnt_overlap_when_bad_task_specified",
"paver/tests/test_tasks.py::test_options_may_overlap_if_explicitly_allowed",
"paver/tests/test_tasks.py::test_exactly_same_parameters_must_be_specified_in_order_to_allow_sharing",
"paver/tests/test_tasks.py::test_dest_parameter_should_map_opt_to_property",
"paver/tests/test_tasks.py::test_dotted_options",
"paver/tests/test_tasks.py::test_dry_run",
"paver/tests/test_tasks.py::test_consume_args",
"paver/tests/test_tasks.py::test_consume_nargs",
"paver/tests/test_tasks.py::test_consume_nargs_and_options",
"paver/tests/test_tasks.py::test_optional_args_in_tasks",
"paver/tests/test_tasks.py::test_debug_logging",
"paver/tests/test_tasks.py::test_base_logging",
"paver/tests/test_tasks.py::test_error_show_up_no_matter_what",
"paver/tests/test_tasks.py::test_all_messages_for_a_task_are_captured",
"paver/tests/test_tasks.py::test_messages_with_formatting_and_no_args_still_work",
"paver/tests/test_tasks.py::test_alternate_pavement_option",
"paver/tests/test_tasks.py::test_captured_output_shows_up_on_exception",
"paver/tests/test_tasks.py::test_calling_subpavement",
"paver/tests/test_tasks.py::test_task_finders",
"paver/tests/test_tasks.py::test_calling_a_function_rather_than_task",
"paver/tests/test_tasks.py::test_depending_on_a_function_rather_than_task",
"paver/tests/test_tasks.py::test_description_retrieval_trial",
"paver/tests/test_tasks.py::test_description_empty_without_docstring",
"paver/tests/test_tasks.py::test_description_retrieval_first_sentence",
"paver/tests/test_tasks.py::test_description_retrieval_first_sentence_even_with_version_numbers",
"paver/tests/test_tasks.py::test_auto_task_is_not_run_with_noauto",
"paver/tests/test_tasks.py::test_auto_task_is_run_when_present",
"paver/tests/test_tasks.py::test_task_can_be_called_repeatedly",
"paver/tests/test_tasks.py::test_options_passed_to_task",
"paver/tests/test_tasks.py::test_calling_task_with_option_arguments",
"paver/tests/test_tasks.py::test_calling_task_with_arguments_do_not_overwrite_it_for_other_tasks",
"paver/tests/test_tasks.py::test_options_might_be_provided_if_task_might_be_called",
"paver/tests/test_tasks.py::test_calling_task_with_arguments",
"paver/tests/test_tasks.py::test_calling_task_with_empty_arguments",
"paver/tests/test_tasks.py::test_calling_nonconsuming_task_with_arguments",
"paver/tests/test_tasks.py::test_options_may_overlap_between_multiple_tasks_even_when_specified_in_reverse_order",
"paver/tests/test_tasks.py::test_options_might_be_shared_both_way",
"paver/tests/test_tasks.py::test_paver_doesnt_crash_on_task_function_with_annotations"
]
| []
| BSD License | 715 | [
"docs/source/pavement.rst",
"paver/tasks.py"
]
| [
"docs/source/pavement.rst",
"paver/tasks.py"
]
|
googleapis__gax-python-127 | d59635e37d2420908a2828da8f54c0bbbd4cf4e6 | 2016-08-23 18:31:52 | 7b3172b9e8ebd1e513955bf60b42aa5f63d37d4a | codecov-io: ## [Current coverage](https://codecov.io/gh/googleapis/gax-python/pull/127?src=pr) is 97.20% (diff: 100%)
> Merging [#127](https://codecov.io/gh/googleapis/gax-python/pull/127?src=pr) into [master](https://codecov.io/gh/googleapis/gax-python/branch/master?src=pr) will increase coverage by **<.01%**
```diff
@@ master #127 diff @@
==========================================
Files 8 8
Lines 607 608 +1
Methods 0 0
Messages 0 0
Branches 0 0
==========================================
+ Hits 590 591 +1
Misses 17 17
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [d59635e...06e8393](https://codecov.io/gh/googleapis/gax-python/compare/d59635e37d2420908a2828da8f54c0bbbd4cf4e6...06e83934e1138579078192bcd706593e6426f070?src=pr)
bjwatson: @geigerj FYI, I published a copy of this at https://testpypi.python.org/pypi/google-gax
bjwatson: LGTM. Can you update https://pypi.python.org/pypi/google-gax? Should be almost identical to `0.13.0` on https://testpypi.python.org/pypi/google-gax.
dhermes: Yay! | diff --git a/google/gax/__init__.py b/google/gax/__init__.py
index 09c38a5..ef0116a 100644
--- a/google/gax/__init__.py
+++ b/google/gax/__init__.py
@@ -33,7 +33,7 @@ from __future__ import absolute_import
import collections
-__version__ = '0.12.5'
+__version__ = '0.13.0'
INITIAL_PAGE = object()
diff --git a/google/gax/grpc.py b/google/gax/grpc.py
index 4cefc35..8d43e56 100644
--- a/google/gax/grpc.py
+++ b/google/gax/grpc.py
@@ -30,8 +30,8 @@
"""Adapts the grpc surface."""
from __future__ import absolute_import
-from grpc.beta import implementations
-from grpc.beta.interfaces import StatusCode
+import grpc
+from grpc import StatusCode
from grpc.framework.interfaces.face import face
from . import auth
@@ -82,8 +82,8 @@ def _make_grpc_auth_func(auth_func):
def _make_channel_creds(auth_func, ssl_creds):
"""Converts the auth func into the composite creds expected by grpc."""
grpc_auth_func = _make_grpc_auth_func(auth_func)
- call_creds = implementations.metadata_call_credentials(grpc_auth_func)
- return implementations.composite_channel_credentials(ssl_creds, call_creds)
+ call_creds = grpc.metadata_call_credentials(grpc_auth_func)
+ return grpc.composite_channel_credentials(ssl_creds, call_creds)
def create_stub(generated_create_stub, service_path, port, ssl_creds=None,
@@ -108,15 +108,14 @@ def create_stub(generated_create_stub, service_path, port, ssl_creds=None,
"""
if channel is None:
if ssl_creds is None:
- ssl_creds = implementations.ssl_channel_credentials(
- None, None, None)
+ ssl_creds = grpc.ssl_channel_credentials()
if metadata_transformer is None:
if scopes is None:
scopes = []
metadata_transformer = auth.make_auth_func(scopes)
channel_creds = _make_channel_creds(metadata_transformer, ssl_creds)
- channel = implementations.secure_channel(
- service_path, port, channel_creds)
+ target = '{}:{}'.format(service_path, port)
+ channel = grpc.secure_channel(target, channel_creds)
return generated_create_stub(channel)
diff --git a/setup.py b/setup.py
index b3b1a20..6ac8e92 100644
--- a/setup.py
+++ b/setup.py
@@ -50,9 +50,9 @@ with open('google/gax/__init__.py', 'r') as f:
install_requires = [
'future>=0.15.2',
- 'grpcio>=1.0rc1',
+ 'grpcio>=1.0.0',
'ply==3.8',
- 'protobuf>=3.0.0b3',
+ 'protobuf>=3.0.0',
'oauth2client>=1.5.2',
]
| Remove `beta` from gRPC imports
### What:
Change imports of `grpc.beta...` to `grpc...`.
### Why:
gRPC is almost GA, and gcloud has already made this change: https://github.com/GoogleCloudPlatform/gcloud-python/pull/2149 | googleapis/gax-python | diff --git a/test-requirements.txt b/test-requirements.txt
index a7edf65..db1112c 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -3,11 +3,4 @@ pytest>=2.8.3
pytest-cov>=1.8.1
pytest-timeout>=1.0.0
unittest2>=1.1.0
-
-# TODO: remove this line when grpcio goes to 1.0.0. This is only necessary
-# because pip (running in Travis CI) will not install the release candidate
-# when it gets pulled in as a dependency of grpcio-tools, below, which causes
-# build failure in Python 3 environments
-grpcio>=1.0.0rc1
-
-grpcio-tools>=1.0.0rc2
+grpcio-tools>=1.0.0
diff --git a/test/test_grpc.py b/test/test_grpc.py
index 56329d9..0a81550 100644
--- a/test/test_grpc.py
+++ b/test/test_grpc.py
@@ -46,23 +46,24 @@ class TestCreateStub(unittest2.TestCase):
FAKE_SERVICE_PATH = 'service_path'
FAKE_PORT = 10101
- @mock.patch('grpc.beta.implementations.composite_channel_credentials')
- @mock.patch('grpc.beta.implementations.ssl_channel_credentials')
- @mock.patch('grpc.beta.implementations.secure_channel')
+ @mock.patch('grpc.composite_channel_credentials')
+ @mock.patch('grpc.ssl_channel_credentials')
+ @mock.patch('grpc.secure_channel')
@mock.patch('google.gax.auth.make_auth_func')
def test_creates_a_stub_ok_with_no_scopes(
self, auth, chan, chan_creds, comp):
got_channel = grpc.create_stub(
_fake_create_stub, self.FAKE_SERVICE_PATH, self.FAKE_PORT)
- chan_creds.assert_called_once_with(None, None, None)
- chan.assert_called_once_with(self.FAKE_SERVICE_PATH, self.FAKE_PORT,
- comp.return_value)
+ chan_creds.assert_called_once_with()
+ chan.assert_called_once_with(
+ '{}:{}'.format(self.FAKE_SERVICE_PATH, self.FAKE_PORT),
+ comp.return_value)
auth.assert_called_once_with([])
self.assertEqual(got_channel, chan.return_value)
- @mock.patch('grpc.beta.implementations.composite_channel_credentials')
- @mock.patch('grpc.beta.implementations.ssl_channel_credentials')
- @mock.patch('grpc.beta.implementations.secure_channel')
+ @mock.patch('grpc.composite_channel_credentials')
+ @mock.patch('grpc.ssl_channel_credentials')
+ @mock.patch('grpc.secure_channel')
@mock.patch('google.gax.auth.make_auth_func')
def test_creates_a_stub_ok_with_scopes(
self, auth, chan, chan_creds, comp):
@@ -70,15 +71,16 @@ class TestCreateStub(unittest2.TestCase):
grpc.create_stub(
_fake_create_stub, self.FAKE_SERVICE_PATH, self.FAKE_PORT,
scopes=fake_scopes)
- chan_creds.assert_called_once_with(None, None, None)
- chan.assert_called_once_with(self.FAKE_SERVICE_PATH, self.FAKE_PORT,
- comp.return_value)
+ chan_creds.assert_called_once_with()
+ chan.assert_called_once_with(
+ '{}:{}'.format(self.FAKE_SERVICE_PATH, self.FAKE_PORT),
+ comp.return_value)
auth.assert_called_once_with(fake_scopes)
- @mock.patch('grpc.beta.implementations.metadata_call_credentials')
- @mock.patch('grpc.beta.implementations.composite_channel_credentials')
- @mock.patch('grpc.beta.implementations.ssl_channel_credentials')
- @mock.patch('grpc.beta.implementations.secure_channel')
+ @mock.patch('grpc.metadata_call_credentials')
+ @mock.patch('grpc.composite_channel_credentials')
+ @mock.patch('grpc.ssl_channel_credentials')
+ @mock.patch('grpc.secure_channel')
@mock.patch('google.gax.auth.make_auth_func')
def test_creates_a_stub_with_given_channel(
self, auth, chan, chan_creds, comp, md):
@@ -93,10 +95,10 @@ class TestCreateStub(unittest2.TestCase):
self.assertFalse(comp.called)
self.assertFalse(md.called)
- @mock.patch('grpc.beta.implementations.metadata_call_credentials')
- @mock.patch('grpc.beta.implementations.composite_channel_credentials')
- @mock.patch('grpc.beta.implementations.ssl_channel_credentials')
- @mock.patch('grpc.beta.implementations.secure_channel')
+ @mock.patch('grpc.metadata_call_credentials')
+ @mock.patch('grpc.composite_channel_credentials')
+ @mock.patch('grpc.ssl_channel_credentials')
+ @mock.patch('grpc.secure_channel')
@mock.patch('google.gax.auth.make_auth_func')
def test_creates_a_stub_ok_with_given_creds(self, auth, chan, chan_creds,
comp, md):
@@ -104,8 +106,9 @@ class TestCreateStub(unittest2.TestCase):
got_channel = grpc.create_stub(
_fake_create_stub, self.FAKE_SERVICE_PATH, self.FAKE_PORT,
ssl_creds=fake_creds)
- chan.assert_called_once_with(self.FAKE_SERVICE_PATH, self.FAKE_PORT,
- comp.return_value)
+ chan.assert_called_once_with(
+ '{}:{}'.format(self.FAKE_SERVICE_PATH, self.FAKE_PORT),
+ comp.return_value)
auth.assert_called_once_with([])
self.assertTrue(chan.called)
self.assertFalse(chan_creds.called)
@@ -113,9 +116,9 @@ class TestCreateStub(unittest2.TestCase):
self.assertTrue(md.called)
self.assertEqual(got_channel, chan.return_value)
- @mock.patch('grpc.beta.implementations.composite_channel_credentials')
- @mock.patch('grpc.beta.implementations.ssl_channel_credentials')
- @mock.patch('grpc.beta.implementations.secure_channel')
+ @mock.patch('grpc.composite_channel_credentials')
+ @mock.patch('grpc.ssl_channel_credentials')
+ @mock.patch('grpc.secure_channel')
@mock.patch('google.gax.auth.make_auth_func')
def test_creates_a_stub_ok_with_given_auth_func(self, auth, dummy_chan,
dummy_chan_creds, dummy_md):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 3
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"test-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
exceptiongroup==1.2.2
future==1.0.0
-e git+https://github.com/googleapis/gax-python.git@d59635e37d2420908a2828da8f54c0bbbd4cf4e6#egg=google_gax
grpcio==1.62.3
grpcio-tools==1.62.3
httplib2==0.22.0
importlib-metadata==6.7.0
iniconfig==2.0.0
linecache2==1.0.0
mock==5.2.0
oauth2client==4.1.3
packaging==24.0
pluggy==1.2.0
ply==3.8
protobuf==4.24.4
pyasn1==0.5.1
pyasn1-modules==0.3.0
pyparsing==3.1.4
pytest==7.4.4
pytest-cov==4.1.0
pytest-timeout==2.3.1
rsa==4.9
six==1.17.0
tomli==2.0.1
traceback2==1.4.0
typing_extensions==4.7.1
unittest2==1.1.0
zipp==3.15.0
| name: gax-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- coverage==7.2.7
- exceptiongroup==1.2.2
- future==1.0.0
- grpcio==1.62.3
- grpcio-tools==1.62.3
- httplib2==0.22.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- linecache2==1.0.0
- mock==5.2.0
- oauth2client==4.1.3
- packaging==24.0
- pluggy==1.2.0
- ply==3.8
- protobuf==4.24.4
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pyparsing==3.1.4
- pytest==7.4.4
- pytest-cov==4.1.0
- pytest-timeout==2.3.1
- rsa==4.9
- six==1.17.0
- tomli==2.0.1
- traceback2==1.4.0
- typing-extensions==4.7.1
- unittest2==1.1.0
- zipp==3.15.0
prefix: /opt/conda/envs/gax-python
| [
"test/test_grpc.py::TestCreateStub::test_creates_a_stub_ok_with_given_creds",
"test/test_grpc.py::TestCreateStub::test_creates_a_stub_ok_with_no_scopes",
"test/test_grpc.py::TestCreateStub::test_creates_a_stub_ok_with_scopes"
]
| []
| [
"test/test_grpc.py::TestCreateStub::test_creates_a_stub_ok_with_given_auth_func",
"test/test_grpc.py::TestCreateStub::test_creates_a_stub_with_given_channel"
]
| []
| BSD 3-Clause "New" or "Revised" License | 716 | [
"setup.py",
"google/gax/__init__.py",
"google/gax/grpc.py"
]
| [
"setup.py",
"google/gax/__init__.py",
"google/gax/grpc.py"
]
|
globocom__m3u8-77 | 1897052d9362c21a4af52340e945edaec7de8fe8 | 2016-08-23 20:04:34 | 1897052d9362c21a4af52340e945edaec7de8fe8 | coveralls:
[](https://coveralls.io/builds/7575105)
Coverage increased (+0.6%) to 95.122% when pulling **2876d443742bc91dfeda646072a6b71235bacfa8 on ziima:73-commaless-extinf** into **651c14ab5368692fd23f321a78bfe558d2b22f33 on globocom:master**.
leandromoreira: Thanks @ziima can you fixe https://travis-ci.org/globocom/m3u8/jobs/154559924#L442 for python 3 ?
ziima: I think that's unrelated existing bug. This is just first time it's covered in tests.
ziima: The bug should be addressed in #78
leandromoreira: Thanks @ziima can you rebase it? | diff --git a/m3u8/parser.py b/m3u8/parser.py
index f1c7590..a0b7f5d 100644
--- a/m3u8/parser.py
+++ b/m3u8/parser.py
@@ -91,7 +91,7 @@ def parse(content, strict=False):
data['key'] = data.get('key', state['current_key'])
elif line.startswith(protocol.extinf):
- _parse_extinf(line, data, state)
+ _parse_extinf(line, data, state, lineno, strict)
state['expect_segment'] = True
elif line.startswith(protocol.ext_x_stream_inf):
@@ -145,8 +145,16 @@ def _parse_key(line):
key[normalize_attribute(name)] = remove_quotes(value)
return key
-def _parse_extinf(line, data, state):
- duration, title = line.replace(protocol.extinf + ':', '').split(',')
+def _parse_extinf(line, data, state, lineno, strict):
+ chunks = line.replace(protocol.extinf + ':', '').split(',')
+ if len(chunks) == 2:
+ duration, title = chunks
+ elif len(chunks) == 1:
+ if strict:
+ raise ParseError(lineno, line)
+ else:
+ duration = chunks[0]
+ title = ''
if 'segment' not in state:
state['segment'] = {}
state['segment']['duration'] = float(duration)
| EXTINF tag with optional comma?
m3u8 library requires EXTINF tag to have title, but the title is optional, see https://tools.ietf.org/html/draft-pantos-http-live-streaming-09#section-3.3.2 | globocom/m3u8 | diff --git a/tests/playlists.py b/tests/playlists.py
index b663eba..909af78 100755
--- a/tests/playlists.py
+++ b/tests/playlists.py
@@ -287,6 +287,15 @@ JUNK
#EXT-X-ENDLIST
'''
+# Playlist with EXTINF record not ending with comma
+SIMPLE_PLAYLIST_COMMALESS_EXTINF = '''
+#EXTM3U
+#EXT-X-TARGETDURATION:5220
+#EXTINF:5220
+http://media.example.com/entire.ts
+#EXT-X-ENDLIST
+'''
+
DISCONTINUITY_PLAYLIST_WITH_PROGRAM_DATE_TIME = '''
#EXTM3U
#EXT-X-MEDIA-SEQUENCE:50116
diff --git a/tests/test_parser.py b/tests/test_parser.py
index 9c20f8d..fbbf834 100644
--- a/tests/test_parser.py
+++ b/tests/test_parser.py
@@ -191,3 +191,15 @@ def test_parse_simple_playlist_messy_strict():
with pytest.raises(ParseError) as catch:
m3u8.parse(playlists.SIMPLE_PLAYLIST_MESSY, strict=True)
assert str(catch.value) == 'Syntax error in manifest on line 5: JUNK'
+
+def test_commaless_extinf():
+ data = m3u8.parse(playlists.SIMPLE_PLAYLIST_COMMALESS_EXTINF)
+ assert 5220 == data['targetduration']
+ assert 0 == data['media_sequence']
+ assert ['http://media.example.com/entire.ts'] == [c['uri'] for c in data['segments']]
+ assert [5220] == [c['duration'] for c in data['segments']]
+
+def test_commaless_extinf_strict():
+ with pytest.raises(ParseError) as e:
+ m3u8.parse(playlists.SIMPLE_PLAYLIST_COMMALESS_EXTINF, strict=True)
+ assert str(e.value) == 'Syntax error in manifest on line 3: #EXTINF:5220'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
iso8601==2.1.0
-e git+https://github.com/globocom/m3u8.git@1897052d9362c21a4af52340e945edaec7de8fe8#egg=m3u8
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: m3u8
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- iso8601==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/m3u8
| [
"tests/test_parser.py::test_commaless_extinf",
"tests/test_parser.py::test_commaless_extinf_strict"
]
| []
| [
"tests/test_parser.py::test_should_parse_simple_playlist_from_string",
"tests/test_parser.py::test_should_parse_non_integer_duration_from_playlist_string",
"tests/test_parser.py::test_should_parse_simple_playlist_from_string_with_different_linebreaks",
"tests/test_parser.py::test_should_parse_sliding_window_playlist_from_string",
"tests/test_parser.py::test_should_parse_playlist_with_encripted_segments_from_string",
"tests/test_parser.py::test_should_load_playlist_with_iv_from_string",
"tests/test_parser.py::test_should_add_key_attribute_to_segment_from_playlist",
"tests/test_parser.py::test_should_parse_title_from_playlist",
"tests/test_parser.py::test_should_parse_variant_playlist",
"tests/test_parser.py::test_should_parse_variant_playlist_with_average_bandwidth",
"tests/test_parser.py::test_should_parse_variant_playlist_with_iframe_playlists",
"tests/test_parser.py::test_should_parse_variant_playlist_with_alt_iframe_playlists_layout",
"tests/test_parser.py::test_should_parse_iframe_playlist",
"tests/test_parser.py::test_should_parse_playlist_using_byteranges",
"tests/test_parser.py::test_should_parse_endlist_playlist",
"tests/test_parser.py::test_should_parse_ALLOW_CACHE",
"tests/test_parser.py::test_should_parse_VERSION",
"tests/test_parser.py::test_should_parse_program_date_time_from_playlist",
"tests/test_parser.py::test_should_parse_scte35_from_playlist",
"tests/test_parser.py::test_parse_simple_playlist_messy",
"tests/test_parser.py::test_parse_simple_playlist_messy_strict"
]
| []
| MIT License | 717 | [
"m3u8/parser.py"
]
| [
"m3u8/parser.py"
]
|
ros-infrastructure__catkin_pkg-151 | c6389ac1d8fdec2ffa16662c033e9f2c653e7a0d | 2016-08-23 22:27:53 | ba1bcce63f91520b6b9e76f980df38b9a76cd561 | diff --git a/src/catkin_pkg/package.py b/src/catkin_pkg/package.py
index f7e0717..0cfce13 100644
--- a/src/catkin_pkg/package.py
+++ b/src/catkin_pkg/package.py
@@ -262,6 +262,9 @@ class Dependency(object):
return False
return all([getattr(self, attr) == getattr(other, attr) for attr in self.__slots__])
+ def __hash__(self):
+ return hash(tuple([getattr(self, slot) for slot in self.__slots__]))
+
def __str__(self):
return self.name
| Dependency should have a hash function
Although they are not immutable, this is important for being able to deduplicate/manage them using `set()`. I'm monkey-patching in this one, but it'd be great to have it merge upstream:
```
import catkin_pkg.package
catkin_pkg.package.Dependency.__hash__ = lambda self: hash(tuple([getattr(self, slot) for slot in self.__slots__]))
``` | ros-infrastructure/catkin_pkg | diff --git a/test/test_package.py b/test/test_package.py
index 527f5b0..850d515 100644
--- a/test/test_package.py
+++ b/test/test_package.py
@@ -59,6 +59,26 @@ class PackageTest(unittest.TestCase):
self.assertEquals(5, dep.version_gt)
self.assertRaises(TypeError, Dependency, 'foo', unknownattribute=42)
+ d = {}
+ d[dep] = None
+ dep2 = Dependency('foo',
+ version_lt=1,
+ version_lte=2,
+ version_eq=3,
+ version_gte=4,
+ version_gt=5)
+ d[dep2] = None
+ self.assertEquals(len(d), 1)
+ dep3 = Dependency('foo',
+ version_lt=1,
+ version_lte=2,
+ version_eq=3,
+ version_gte=4,
+ version_gt=6)
+ d[dep3] = None
+ self.assertEquals(len(d), 2)
+
+
def test_init_kwargs_string(self):
pack = Package('foo',
name='bar',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"nose"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "ROS_PYTHON_VERSION=3 pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/ros-infrastructure/catkin_pkg.git@c6389ac1d8fdec2ffa16662c033e9f2c653e7a0d#egg=catkin_pkg
docutils==0.21.2
exceptiongroup==1.2.2
iniconfig==2.1.0
mock==5.2.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tomli==2.2.1
| name: catkin_pkg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mock==5.2.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/catkin_pkg
| [
"test/test_package.py::PackageTest::test_init_dependency"
]
| []
| [
"test/test_package.py::PackageTest::test_init",
"test/test_package.py::PackageTest::test_init_kwargs_object",
"test/test_package.py::PackageTest::test_init_kwargs_string",
"test/test_package.py::PackageTest::test_validate_package",
"test/test_package.py::PackageTest::test_validate_person"
]
| []
| BSD License | 718 | [
"src/catkin_pkg/package.py"
]
| [
"src/catkin_pkg/package.py"
]
|
|
docker__docker-py-1167 | 2ef02df2f06fafe7d71c96bac1e18d68217703ab | 2016-08-23 23:53:03 | a44d65be370c28abd666a299456b83659dd1a1df | dnephin: LGTM, just a minor nit | diff --git a/docker/client.py b/docker/client.py
index d1c6ee5f..dc28ac46 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -114,7 +114,8 @@ class Client(
@classmethod
def from_env(cls, **kwargs):
- return cls(**kwargs_from_env(**kwargs))
+ version = kwargs.pop('version', None)
+ return cls(version=version, **kwargs_from_env(**kwargs))
def _retrieve_server_version(self):
try:
@@ -268,7 +269,7 @@ class Client(
else:
# Response isn't chunked, meaning we probably
# encountered an error immediately
- yield self._result(response, json=decode)
+ yield self._result(response)
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index c108a835..a5fbe0ba 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -623,7 +623,7 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
device_write_iops=None, oom_kill_disable=False,
shm_size=None, sysctls=None, version=None, tmpfs=None,
oom_score_adj=None, dns_opt=None, cpu_shares=None,
- cpuset_cpus=None, userns_mode=None, pids_limit=None):
+ cpuset_cpus=None, userns_mode=None):
host_config = {}
@@ -904,13 +904,6 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
raise host_config_value_error("userns_mode", userns_mode)
host_config['UsernsMode'] = userns_mode
- if pids_limit:
- if not isinstance(pids_limit, int):
- raise host_config_type_error('pids_limit', pids_limit, 'int')
- if version_lt(version, '1.23'):
- raise host_config_version_error('pids_limit', '1.23')
- host_config["PidsLimit"] = pids_limit
-
return host_config
@@ -1000,7 +993,7 @@ def format_environment(environment):
def format_env(key, value):
if value is None:
return key
- return u'{key}={value}'.format(key=key, value=value)
+ return '{key}={value}'.format(key=key, value=value)
return [format_env(*var) for var in six.iteritems(environment)]
diff --git a/docs/hostconfig.md b/docs/hostconfig.md
index 008d5cf2..6645bd1f 100644
--- a/docs/hostconfig.md
+++ b/docs/hostconfig.md
@@ -111,12 +111,11 @@ for example:
CPU period.
* cpu_shares (int): CPU shares (relative weight)
* cpuset_cpus (str): CPUs in which to allow execution (0-3, 0,1)
-* blkio_weight: Block IO weight (relative weight), accepts a weight value
- between 10 and 1000.
+* blkio_weight: Block IO weight (relative weight), accepts a weight value between 10 and 1000.
* blkio_weight_device: Block IO weight (relative device weight) in the form of:
`[{"Path": "device_path", "Weight": weight}]`
-* device_read_bps: Limit read rate (bytes per second) from a device in the
- form of: `[{"Path": "device_path", "Rate": rate}]`
+* device_read_bps: Limit read rate (bytes per second) from a device in the form of:
+ `[{"Path": "device_path", "Rate": rate}]`
* device_write_bps: Limit write rate (bytes per second) from a device.
* device_read_iops: Limit read rate (IO per second) from a device.
* device_write_iops: Limit write rate (IO per second) from a device.
@@ -129,7 +128,6 @@ for example:
* sysctls (dict): Kernel parameters to set in the container.
* userns_mode (str): Sets the user namespace mode for the container when user
namespace remapping option is enabled. Supported values are: `host`
-* pids_limit (int): Tune a container’s pids limit. Set -1 for unlimited.
**Returns** (dict) HostConfig dictionary
| Feature Request: docker.from_env(version='auto')
Feature request to add auto api version support for ```docker.from_env()``` similar to ```docker.Client(version='auto')```?
I noticed that one of the suggestions from #402 for the ```version='auto'``` option was now available for ```docker.Client()``` but doesn't work for ```docker.from_env()```. | docker/docker-py | diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index 5850afa2..712f57e0 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -22,11 +22,9 @@ import sys
import tempfile
import threading
import time
-import io
import docker
import requests
-from requests.packages import urllib3
import six
from .. import base
@@ -44,7 +42,7 @@ DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
- request=None, raw=None):
+ request=None):
res = requests.Response()
res.status_code = status_code
if not isinstance(content, six.binary_type):
@@ -54,7 +52,6 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
res.reason = reason
res.elapsed = datetime.timedelta(elapsed)
res.request = request
- res.raw = raw
return res
@@ -331,43 +328,6 @@ class DockerApiTest(DockerClientTest):
TypeError, self.client.create_host_config, security_opt='wrong'
)
- def test_stream_helper_decoding(self):
- status_code, content = fake_api.fake_responses[url_prefix + 'events']()
- content_str = json.dumps(content)
- if six.PY3:
- content_str = content_str.encode('utf-8')
- body = io.BytesIO(content_str)
-
- # mock a stream interface
- raw_resp = urllib3.HTTPResponse(body=body)
- setattr(raw_resp._fp, 'chunked', True)
- setattr(raw_resp._fp, 'chunk_left', len(body.getvalue())-1)
-
- # pass `decode=False` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str)
-
- # pass `decode=True` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
-
- # non-chunked response, pass `decode=False` to the helper
- setattr(raw_resp._fp, 'chunked', False)
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str.decode('utf-8'))
-
- # non-chunked response, pass `decode=True` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
-
class StreamTest(base.Cleanup, base.BaseTestCase):
def setUp(self):
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index b21f1d6a..6ceb8cbb 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -25,6 +25,14 @@ class ClientTest(base.BaseTestCase):
client = Client.from_env()
self.assertEqual(client.base_url, "https://192.168.59.103:2376")
+ def test_from_env_with_version(self):
+ os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
+ DOCKER_CERT_PATH=TEST_CERT_DIR,
+ DOCKER_TLS_VERIFY='1')
+ client = Client.from_env(version='2.32')
+ self.assertEqual(client.base_url, "https://192.168.59.103:2376")
+ self.assertEqual(client._version, '2.32')
+
class DisableSocketTest(base.BaseTestCase):
class DummySocket(object):
diff --git a/tests/unit/container_test.py b/tests/unit/container_test.py
index 3cea42fb..c480462f 100644
--- a/tests/unit/container_test.py
+++ b/tests/unit/container_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import datetime
import json
import signal
@@ -1157,24 +1155,6 @@ class CreateContainerTest(DockerClientTest):
args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
)
- def test_create_container_with_unicode_envvars(self):
- envvars_dict = {
- 'foo': u'☃',
- }
-
- expected = [
- u'foo=☃'
- ]
-
- self.client.create_container(
- 'busybox', 'true',
- environment=envvars_dict,
- )
-
- args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data'])['Env'], expected)
-
class ContainerTest(DockerClientTest):
def test_list_containers(self):
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 2a2759d0..3476f041 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -185,15 +185,6 @@ class HostConfigTest(base.BaseTestCase):
InvalidVersion, lambda: create_host_config(
version='1.20', kernel_memory=67108864))
- def test_create_host_config_with_pids_limit(self):
- config = create_host_config(version='1.23', pids_limit=1024)
- self.assertEqual(config.get('PidsLimit'), 1024)
-
- with pytest.raises(InvalidVersion):
- create_host_config(version='1.22', pids_limit=1024)
- with pytest.raises(TypeError):
- create_host_config(version='1.22', pids_limit='1024')
-
class UlimitTest(base.BaseTestCase):
def test_create_host_config_dict_ulimit(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@2ef02df2f06fafe7d71c96bac1e18d68217703ab#egg=docker_py
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/client_test.py::ClientTest::test_from_env_with_version"
]
| []
| [
"tests/unit/api_test.py::DockerApiTest::test_auto_retrieve_server_version",
"tests/unit/api_test.py::DockerApiTest::test_create_host_config_secopt",
"tests/unit/api_test.py::DockerApiTest::test_ctor",
"tests/unit/api_test.py::DockerApiTest::test_events",
"tests/unit/api_test.py::DockerApiTest::test_events_with_filters",
"tests/unit/api_test.py::DockerApiTest::test_events_with_since_until",
"tests/unit/api_test.py::DockerApiTest::test_info",
"tests/unit/api_test.py::DockerApiTest::test_remove_link",
"tests/unit/api_test.py::DockerApiTest::test_retrieve_server_version",
"tests/unit/api_test.py::DockerApiTest::test_search",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_http",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_http_unix_triple_slash",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_tcp",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_unix",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_unix_triple_slash",
"tests/unit/api_test.py::DockerApiTest::test_url_invalid_resource",
"tests/unit/api_test.py::DockerApiTest::test_url_no_resource",
"tests/unit/api_test.py::DockerApiTest::test_url_unversioned_api",
"tests/unit/api_test.py::DockerApiTest::test_url_valid_resource",
"tests/unit/api_test.py::DockerApiTest::test_version",
"tests/unit/api_test.py::DockerApiTest::test_version_no_api_version",
"tests/unit/api_test.py::StreamTest::test_early_stream_response",
"tests/unit/api_test.py::UserAgentTest::test_custom_user_agent",
"tests/unit/api_test.py::UserAgentTest::test_default_user_agent",
"tests/unit/client_test.py::ClientTest::test_from_env",
"tests/unit/client_test.py::DisableSocketTest::test_disable_socket_timeout",
"tests/unit/client_test.py::DisableSocketTest::test_disable_socket_timeout2",
"tests/unit/client_test.py::DisableSocketTest::test_disable_socket_timout_non_blocking",
"tests/unit/container_test.py::StartContainerTest::test_start_container",
"tests/unit/container_test.py::StartContainerTest::test_start_container_none",
"tests/unit/container_test.py::StartContainerTest::test_start_container_privileged",
"tests/unit/container_test.py::StartContainerTest::test_start_container_regression_573",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_ro",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_rw",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_dict_instead_of_id",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_links",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_links_as_list_of_tuples",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf_compat",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_multiple_links",
"tests/unit/container_test.py::StartContainerTest::test_start_container_with_port_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_empty_volumes_from",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_privileged",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_added_capabilities",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_aliases",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_list",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode_and_ro_error",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_ro",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_rw",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cgroup_parent",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpu_shares",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpuset",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_devices",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_dropped_capabilities",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_entrypoint",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_host_config_cpu_shares",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_host_config_cpuset",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_dict",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_list",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links_as_list_of_tuples",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf_compat",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mac_address",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_int",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_g_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_k_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_m_unit",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_wrong_value",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_multiple_links",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_named_volume",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_port_binds",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_ports",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_restart_policy",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stdin_open",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stop_signal",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_sysctl",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_tmpfs_dict",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_tmpfs_list",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volume_string",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volumes_from",
"tests/unit/container_test.py::CreateContainerTest::test_create_container_with_working_dir",
"tests/unit/container_test.py::CreateContainerTest::test_create_named_container",
"tests/unit/container_test.py::ContainerTest::test_container_stats",
"tests/unit/container_test.py::ContainerTest::test_container_top",
"tests/unit/container_test.py::ContainerTest::test_container_top_with_psargs",
"tests/unit/container_test.py::ContainerTest::test_container_update",
"tests/unit/container_test.py::ContainerTest::test_diff",
"tests/unit/container_test.py::ContainerTest::test_diff_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_export",
"tests/unit/container_test.py::ContainerTest::test_export_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_inspect_container",
"tests/unit/container_test.py::ContainerTest::test_inspect_container_undefined_id",
"tests/unit/container_test.py::ContainerTest::test_kill_container",
"tests/unit/container_test.py::ContainerTest::test_kill_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_kill_container_with_signal",
"tests/unit/container_test.py::ContainerTest::test_list_containers",
"tests/unit/container_test.py::ContainerTest::test_log_following",
"tests/unit/container_test.py::ContainerTest::test_log_following_backwards",
"tests/unit/container_test.py::ContainerTest::test_log_since",
"tests/unit/container_test.py::ContainerTest::test_log_since_with_datetime",
"tests/unit/container_test.py::ContainerTest::test_log_streaming",
"tests/unit/container_test.py::ContainerTest::test_log_streaming_and_following",
"tests/unit/container_test.py::ContainerTest::test_log_tail",
"tests/unit/container_test.py::ContainerTest::test_log_tty",
"tests/unit/container_test.py::ContainerTest::test_logs",
"tests/unit/container_test.py::ContainerTest::test_logs_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_pause_container",
"tests/unit/container_test.py::ContainerTest::test_port",
"tests/unit/container_test.py::ContainerTest::test_remove_container",
"tests/unit/container_test.py::ContainerTest::test_remove_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_rename_container",
"tests/unit/container_test.py::ContainerTest::test_resize_container",
"tests/unit/container_test.py::ContainerTest::test_restart_container",
"tests/unit/container_test.py::ContainerTest::test_restart_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_stop_container",
"tests/unit/container_test.py::ContainerTest::test_stop_container_with_dict_instead_of_id",
"tests/unit/container_test.py::ContainerTest::test_unpause_container",
"tests/unit/container_test.py::ContainerTest::test_wait",
"tests/unit/container_test.py::ContainerTest::test_wait_with_dict_instead_of_id",
"tests/unit/utils_test.py::DecoratorsTest::test_update_headers",
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_dns_opt",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_kernel_memory",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_mem_reservation",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_userns_mode",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 719 | [
"docs/hostconfig.md",
"docker/utils/utils.py",
"docker/client.py"
]
| [
"docs/hostconfig.md",
"docker/utils/utils.py",
"docker/client.py"
]
|
docker__docker-py-1168 | fb41965272b5c0e7c911ee268270b92e2da06c1d | 2016-08-24 00:06:19 | a44d65be370c28abd666a299456b83659dd1a1df | diff --git a/docker/client.py b/docker/client.py
index d1c6ee5f..75867536 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -268,7 +268,7 @@ class Client(
else:
# Response isn't chunked, meaning we probably
# encountered an error immediately
- yield self._result(response, json=decode)
+ yield self._result(response)
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index a5fbe0ba..bc6c7a64 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -623,7 +623,7 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
device_write_iops=None, oom_kill_disable=False,
shm_size=None, sysctls=None, version=None, tmpfs=None,
oom_score_adj=None, dns_opt=None, cpu_shares=None,
- cpuset_cpus=None, userns_mode=None):
+ cpuset_cpus=None, userns_mode=None, pids_limit=None):
host_config = {}
@@ -904,6 +904,13 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
raise host_config_value_error("userns_mode", userns_mode)
host_config['UsernsMode'] = userns_mode
+ if pids_limit:
+ if not isinstance(pids_limit, int):
+ raise host_config_type_error('pids_limit', pids_limit, 'int')
+ if version_lt(version, '1.23'):
+ raise host_config_version_error('pids_limit', '1.23')
+ host_config["PidsLimit"] = pids_limit
+
return host_config
diff --git a/docs/hostconfig.md b/docs/hostconfig.md
index 6645bd1f..008d5cf2 100644
--- a/docs/hostconfig.md
+++ b/docs/hostconfig.md
@@ -111,11 +111,12 @@ for example:
CPU period.
* cpu_shares (int): CPU shares (relative weight)
* cpuset_cpus (str): CPUs in which to allow execution (0-3, 0,1)
-* blkio_weight: Block IO weight (relative weight), accepts a weight value between 10 and 1000.
+* blkio_weight: Block IO weight (relative weight), accepts a weight value
+ between 10 and 1000.
* blkio_weight_device: Block IO weight (relative device weight) in the form of:
`[{"Path": "device_path", "Weight": weight}]`
-* device_read_bps: Limit read rate (bytes per second) from a device in the form of:
- `[{"Path": "device_path", "Rate": rate}]`
+* device_read_bps: Limit read rate (bytes per second) from a device in the
+ form of: `[{"Path": "device_path", "Rate": rate}]`
* device_write_bps: Limit write rate (bytes per second) from a device.
* device_read_iops: Limit read rate (IO per second) from a device.
* device_write_iops: Limit write rate (IO per second) from a device.
@@ -128,6 +129,7 @@ for example:
* sysctls (dict): Kernel parameters to set in the container.
* userns_mode (str): Sets the user namespace mode for the container when user
namespace remapping option is enabled. Supported values are: `host`
+* pids_limit (int): Tune a container’s pids limit. Set -1 for unlimited.
**Returns** (dict) HostConfig dictionary
| support PidsLimit in host config | docker/docker-py | diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index 5850afa2..712f57e0 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -22,11 +22,9 @@ import sys
import tempfile
import threading
import time
-import io
import docker
import requests
-from requests.packages import urllib3
import six
from .. import base
@@ -44,7 +42,7 @@ DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
- request=None, raw=None):
+ request=None):
res = requests.Response()
res.status_code = status_code
if not isinstance(content, six.binary_type):
@@ -54,7 +52,6 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
res.reason = reason
res.elapsed = datetime.timedelta(elapsed)
res.request = request
- res.raw = raw
return res
@@ -331,43 +328,6 @@ class DockerApiTest(DockerClientTest):
TypeError, self.client.create_host_config, security_opt='wrong'
)
- def test_stream_helper_decoding(self):
- status_code, content = fake_api.fake_responses[url_prefix + 'events']()
- content_str = json.dumps(content)
- if six.PY3:
- content_str = content_str.encode('utf-8')
- body = io.BytesIO(content_str)
-
- # mock a stream interface
- raw_resp = urllib3.HTTPResponse(body=body)
- setattr(raw_resp._fp, 'chunked', True)
- setattr(raw_resp._fp, 'chunk_left', len(body.getvalue())-1)
-
- # pass `decode=False` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str)
-
- # pass `decode=True` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
-
- # non-chunked response, pass `decode=False` to the helper
- setattr(raw_resp._fp, 'chunked', False)
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str.decode('utf-8'))
-
- # non-chunked response, pass `decode=True` to the helper
- raw_resp._fp.seek(0)
- resp = response(status_code=status_code, content=content, raw=raw_resp)
- result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
-
class StreamTest(base.Cleanup, base.BaseTestCase):
def setUp(self):
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 3476f041..2a2759d0 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -185,6 +185,15 @@ class HostConfigTest(base.BaseTestCase):
InvalidVersion, lambda: create_host_config(
version='1.20', kernel_memory=67108864))
+ def test_create_host_config_with_pids_limit(self):
+ config = create_host_config(version='1.23', pids_limit=1024)
+ self.assertEqual(config.get('PidsLimit'), 1024)
+
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.22', pids_limit=1024)
+ with pytest.raises(TypeError):
+ create_host_config(version='1.22', pids_limit='1024')
+
class UlimitTest(base.BaseTestCase):
def test_create_host_config_dict_ulimit(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 3
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@fb41965272b5c0e7c911ee268270b92e2da06c1d#egg=docker_py
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_pids_limit"
]
| []
| [
"tests/unit/api_test.py::DockerApiTest::test_auto_retrieve_server_version",
"tests/unit/api_test.py::DockerApiTest::test_create_host_config_secopt",
"tests/unit/api_test.py::DockerApiTest::test_ctor",
"tests/unit/api_test.py::DockerApiTest::test_events",
"tests/unit/api_test.py::DockerApiTest::test_events_with_filters",
"tests/unit/api_test.py::DockerApiTest::test_events_with_since_until",
"tests/unit/api_test.py::DockerApiTest::test_info",
"tests/unit/api_test.py::DockerApiTest::test_remove_link",
"tests/unit/api_test.py::DockerApiTest::test_retrieve_server_version",
"tests/unit/api_test.py::DockerApiTest::test_search",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_http",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_http_unix_triple_slash",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_tcp",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_unix",
"tests/unit/api_test.py::DockerApiTest::test_url_compatibility_unix_triple_slash",
"tests/unit/api_test.py::DockerApiTest::test_url_invalid_resource",
"tests/unit/api_test.py::DockerApiTest::test_url_no_resource",
"tests/unit/api_test.py::DockerApiTest::test_url_unversioned_api",
"tests/unit/api_test.py::DockerApiTest::test_url_valid_resource",
"tests/unit/api_test.py::DockerApiTest::test_version",
"tests/unit/api_test.py::DockerApiTest::test_version_no_api_version",
"tests/unit/api_test.py::StreamTest::test_early_stream_response",
"tests/unit/api_test.py::UserAgentTest::test_custom_user_agent",
"tests/unit/api_test.py::UserAgentTest::test_default_user_agent",
"tests/unit/utils_test.py::DecoratorsTest::test_update_headers",
"tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_blkio_constraints",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_dns_opt",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_kernel_memory",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_mem_reservation",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_score_adj",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb",
"tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_userns_mode",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals",
"tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit",
"tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig",
"tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_alternate_env",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false",
"tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input",
"tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper",
"tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_with_equals_character",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls",
"tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls_tcp_proto",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha",
"tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag",
"tests/unit/utils_test.py::ParseDeviceTest::test_dict",
"tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list",
"tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition",
"tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint",
"tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid",
"tests/unit/utils_test.py::UtilsTest::test_convert_filters",
"tests/unit/utils_test.py::UtilsTest::test_create_ipam_config",
"tests/unit/utils_test.py::UtilsTest::test_decode_json_header",
"tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port",
"tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range",
"tests/unit/utils_test.py::PortsTest::test_host_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges",
"tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid",
"tests/unit/utils_test.py::PortsTest::test_port_only_with_colon",
"tests/unit/utils_test.py::PortsTest::test_split_port_invalid",
"tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port",
"tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child",
"tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes",
"tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes",
"tests/unit/utils_test.py::ExcludePathsTest::test_question_mark",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_leading_dot_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_with_path_traversal",
"tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception",
"tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception",
"tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks",
"tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory",
"tests/unit/utils_test.py::TarTest::test_tar_with_excludes",
"tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks"
]
| []
| Apache License 2.0 | 720 | [
"docs/hostconfig.md",
"docker/utils/utils.py",
"docker/client.py"
]
| [
"docs/hostconfig.md",
"docker/utils/utils.py",
"docker/client.py"
]
|
|
zalando-stups__senza-323 | e40cebaeb29bf5af792b8aa79627f7e5ce9a72fa | 2016-08-25 11:30:13 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/cli.py b/senza/cli.py
index e20c744..15927fd 100755
--- a/senza/cli.py
+++ b/senza/cli.py
@@ -985,11 +985,13 @@ def instances(stack_ref, all, terminated, docker_image, piu, odd_host, region,
if piu is not None:
odd_host = odd_host or Piu.find_odd_host(region)
+ auto_connect = len(rows) == 1
for row in rows:
if row['private_ip'] is not None:
Piu.request_access(instance=row['private_ip'],
reason=piu,
- odd_host=odd_host)
+ odd_host=odd_host,
+ connect=auto_connect)
@cli.command()
diff --git a/senza/stups/piu.py b/senza/stups/piu.py
index 3d1784a..643ac93 100644
--- a/senza/stups/piu.py
+++ b/senza/stups/piu.py
@@ -13,13 +13,18 @@ class Piu:
http://stups.readthedocs.io/en/latest/user-guide/ssh-access.html#ssh-access
"""
@staticmethod
- def request_access(instance: str, reason: str, odd_host: Optional[str]):
+ def request_access(instance: str, reason: str, odd_host: Optional[str],
+ connect: bool):
"""
Request SSH access to a single host
"""
reason = '{} via senza'.format(reason)
- cmd = ['piu', 'request-access', '--connect',
+ cmd = ['piu', 'request-access',
instance, reason]
+
+ if connect:
+ cmd.append('--connect')
+
if odd_host is not None:
cmd.extend(['-O', odd_host])
| Please set the --connect option as optional
After 479e4d8 you connect automatically to ***all*** instances if you use `--piu`
But this is not useful, if you get more then one instance back. | zalando-stups/senza | diff --git a/tests/test_stups/test_piu.py b/tests/test_stups/test_piu.py
index 125a016..447b2bf 100644
--- a/tests/test_stups/test_piu.py
+++ b/tests/test_stups/test_piu.py
@@ -9,14 +9,21 @@ def test_request_access(monkeypatch):
m_call = MagicMock()
monkeypatch.setattr('senza.stups.piu.call', m_call)
- Piu.request_access('127.0.0.1', 'no reason', None)
- m_call.assert_called_once_with(['piu', 'request-access', '--connect',
+ Piu.request_access('127.0.0.1', 'no reason', None, True)
+ m_call.assert_called_once_with(['piu', 'request-access',
+ '127.0.0.1', 'no reason via senza',
+ '--connect'])
+
+ m_call.reset_mock()
+ Piu.request_access('127.0.0.1', 'no reason', None, False)
+ m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza'])
m_call.reset_mock()
- Piu.request_access('127.0.0.1', 'no reason', 'example.com')
- m_call.assert_called_once_with(['piu', 'request-access', '--connect',
+ Piu.request_access('127.0.0.1', 'no reason', 'example.com', True)
+ m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza',
+ '--connect',
'-O', 'example.com'])
@@ -56,6 +63,7 @@ def test_request_access_not_installed(monkeypatch):
monkeypatch.setattr('senza.stups.piu.call', m_call)
with pytest.raises(PiuNotFound):
- Piu.request_access('127.0.0.1', 'no reason', None)
- m_call.assert_called_once_with(['piu', 'request-access', '--connect',
- '127.0.0.1', 'no reason via senza'])
+ Piu.request_access('127.0.0.1', 'no reason', None, True)
+ m_call.assert_called_once_with(['piu', 'request-access',
+ '127.0.0.1', 'no reason via senza',
+ '--connect'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@e40cebaeb29bf5af792b8aa79627f7e5ce9a72fa#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_stups/test_piu.py::test_request_access",
"tests/test_stups/test_piu.py::test_request_access_not_installed"
]
| []
| [
"tests/test_stups/test_piu.py::test_find_odd_host"
]
| []
| Apache License 2.0 | 721 | [
"senza/cli.py",
"senza/stups/piu.py"
]
| [
"senza/cli.py",
"senza/stups/piu.py"
]
|
|
jboss-dockerfiles__dogen-33 | 26146b1b97b780e2f280fadb4cc85336307a2ea6 | 2016-08-26 09:41:51 | aeca374f5ee8721228cb42d920a25f659a3a748e | diff --git a/dogen/plugins/cct.py b/dogen/plugins/cct.py
index b4ada30..3b3ab63 100644
--- a/dogen/plugins/cct.py
+++ b/dogen/plugins/cct.py
@@ -19,7 +19,7 @@ class CCT(Plugin):
Read in a schema definition for our part of the config and hook it
into the parent schema at the cct: top-level key.
"""
- schema_path = os.path.join(self.dogen.pwd, "plugins", "cct", "cct_schema.yaml")
+ schema_path = os.path.join(self.dogen.pwd, "schema", "cct_schema.yaml")
schema = {}
with open(schema_path, 'r') as fh:
schema = yaml.safe_load(fh)
@@ -50,6 +50,9 @@ class CCT(Plugin):
if 'runtime' in cfg['cct']:
self.runtime_changes(cfg)
+ if not 'user' in cfg['cct']:
+ cfg['cct']['user'] = 'root'
+
def _prepare_modules(self, cfg):
for module in cfg['cct']['modules']:
name = None
diff --git a/dogen/plugins/cct/__init__.py b/dogen/plugins/cct/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/dogen/plugins/cct/cct_schema.yaml b/dogen/schema/cct_schema.yaml
similarity index 96%
rename from dogen/plugins/cct/cct_schema.yaml
rename to dogen/schema/cct_schema.yaml
index 1cf2751..0b32caf 100644
--- a/dogen/plugins/cct/cct_schema.yaml
+++ b/dogen/schema/cct_schema.yaml
@@ -15,3 +15,4 @@ map:
runtime_changes: { type: str }
# same rationale as for configure
runtime: { type: any }
+ user: { type: any }
diff --git a/dogen/templates/template.jinja b/dogen/templates/template.jinja
index eac14d4..b0a6b41 100644
--- a/dogen/templates/template.jinja
+++ b/dogen/templates/template.jinja
@@ -102,6 +102,7 @@ RUN [ "bash", "-x", "/tmp/scripts/{{ script.package }}/{{ script.exec }}" ]
{% if cct %}
ADD cct /tmp/cct/
ENV CCT_MODULES_PATH=/tmp/cct/
+USER {{ cct.user }}
RUN cct {%if cct.verbose %}-v{% endif %} {% for run in cct.run %} /tmp/cct/{{ run }} {% endfor %}
# Add cct in command mode as entrypoint
| build-time cct user depends on whether or not scripts run
since we don't explicitly define a USER for the cct block, it uses whatever was last set. That depends on what scripts are run and under what uid.
We should probably default to USER root in the cct block and perhaps let it be overridden in the YAML. | jboss-dockerfiles/dogen | diff --git a/tests/test_cct_plugin.py b/tests/test_cct_plugin.py
new file mode 100644
index 0000000..1cc4c24
--- /dev/null
+++ b/tests/test_cct_plugin.py
@@ -0,0 +1,26 @@
+import tempfile
+import unittest
+import shutil
+
+from dogen.plugins import cct
+
+# minimal fake dogen object
+class MockDogen():
+ def __init__(self):
+ self.log = 0
+ self.descriptor = 0
+ self.output = ""
+
+class TestCCTPlugin(unittest.TestCase):
+ def setUp(self):
+ self.workdir = tempfile.mkdtemp(prefix='test_cct_plugin')
+ self.cct = cct.CCT(dogen=MockDogen())
+
+ def teardown(self):
+ shutil.rmtree(self.workdir)
+
+ def test_default_user_root(self):
+ cfg = { 'cct': { 'configure': '' }}
+ self.cct.prepare(cfg)
+ self.assertTrue('user' in cfg['cct'])
+ self.assertEqual(cfg['cct']['user'], "root")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@26146b1b97b780e2f280fadb4cc85336307a2ea6#egg=dogen
exceptiongroup==1.2.2
iniconfig==2.1.0
Jinja2==2.8
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.8.1
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.10.0
tomli==2.2.1
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- jinja2==2.8
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.8.1
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.10.0
- tomli==2.2.1
prefix: /opt/conda/envs/dogen
| [
"tests/test_cct_plugin.py::TestCCTPlugin::test_default_user_root"
]
| []
| []
| []
| MIT License | 722 | [
"dogen/plugins/cct.py",
"dogen/plugins/cct/__init__.py",
"dogen/templates/template.jinja",
"dogen/plugins/cct/cct_schema.yaml"
]
| [
"dogen/plugins/cct.py",
"dogen/plugins/cct/__init__.py",
"dogen/schema/cct_schema.yaml",
"dogen/templates/template.jinja"
]
|
|
Axelrod-Python__Axelrod-699 | 10762a191c6f16a0ca385752bd48c867c7043fe7 | 2016-08-26 12:33:19 | 10762a191c6f16a0ca385752bd48c867c7043fe7 | diff --git a/axelrod/strategies/__init__.py b/axelrod/strategies/__init__.py
index c86195be..83169217 100644
--- a/axelrod/strategies/__init__.py
+++ b/axelrod/strategies/__init__.py
@@ -1,5 +1,6 @@
from ..player import is_basic, obey_axelrod
from ._strategies import *
+from ._filters import passes_filterset
# `from ._strategies import *` import the collection `strategies`
# Now import the Meta strategies. This cannot be done in _strategies
@@ -29,3 +30,44 @@ long_run_time_strategies = [s for s in all_strategies if
cheating_strategies = [s for s in all_strategies if not obey_axelrod(s())]
ordinary_strategies = strategies # This is a legacy and will be removed
+
+
+def filtered_strategies(filterset, strategies=all_strategies):
+ """
+ Applies the filters defined in the given filterset dict and returns those
+ strategy classes which pass all of those filters from the given list of
+ strategies.
+
+ e.g.
+
+ For the filterset dict:
+ {
+ 'stochastic': True,
+ 'min_memory_depth': 2
+ }
+
+ the function will return a list of all deterministic strategies with a
+ memory_depth of 2 or more.
+
+ Parameters
+ ----------
+ filterset : dict
+ mapping filter name to criterion.
+ e.g.
+ {
+ 'stochastic': True,
+ 'min_memory_depth': 2
+ }
+ strategies: list
+ of subclasses of axelrod.Player
+
+ Returns
+ -------
+ list
+
+ of subclasses of axelrod.Player
+
+ """
+ return [
+ s for s in strategies
+ if passes_filterset(s, filterset)]
diff --git a/axelrod/strategies/_filters.py b/axelrod/strategies/_filters.py
new file mode 100644
index 00000000..c18a8652
--- /dev/null
+++ b/axelrod/strategies/_filters.py
@@ -0,0 +1,219 @@
+from collections import namedtuple
+import operator
+
+
+def passes_operator_filter(strategy, classifier_key, value, operator):
+ """
+ Tests whether a given strategy passes a filter for a
+ given key in its classifier dict using a given (in)equality operator.
+
+ e.g.
+
+ For the following strategy:
+
+ class ExampleStrategy(Player):
+ classifier = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'memory_depth': 10,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ passes_operator_filter(ExampleStrategy, 'memory_depth', 10, operator.eq)
+
+ would test whether the 'memory_depth' entry equals 10 and return True
+
+ Parameters
+ ----------
+ strategy : a descendant class of axelrod.Player
+ classifier_key: string
+ Defining which entry from the strategy's classifier dict is to be
+ tested (e.g. 'memory_depth').
+ value: int
+ The value against which the strategy's classifier dict entry is to
+ be tested.
+ operator: operator.le, operator.ge or operator.eq
+ Indicating whether a 'less than or equal to' or 'greater than or
+ equal to' test should be applied.
+
+ Returns
+ -------
+ boolean
+
+ True if the value from the strategy's classifier dictionary matches
+ the value and operator passed to the function.
+ """
+ classifier_value = strategy.classifier[classifier_key]
+ if (isinstance(classifier_value, str) and
+ classifier_value.lower() == 'infinity'):
+ classifier_value = float('inf')
+
+ return operator(classifier_value, value)
+
+
+def passes_in_list_filter(strategy, classifier_key, value):
+ """
+ Tests whether a given list of values exist in the list returned from the
+ given strategy's classifier dict for the given classifier_key.
+
+ e.g.
+
+ For the following strategy:
+
+ class ExampleStrategy(Player):
+ classifier = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'memory_depth': 10,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ passes_in_list_filter(ExampleStrategy, 'makes_use_of', 'game', operator.eq)
+
+ would test whether 'game' exists in the strategy's' 'makes_use_of' entry
+ and return True.
+
+ Parameters
+ ----------
+ strategy : a descendant class of axelrod.Player
+ classifier_key: string
+ Defining which entry from the strategy's classifier dict is to be
+ tested (e.g. 'makes_use_of').
+ value: list
+ The values against which the strategy's classifier dict entry is to
+ be tested.
+
+ Returns
+ -------
+ boolean
+ """
+ result = True
+ for entry in value:
+ if entry not in strategy.classifier[classifier_key]:
+ result = False
+ return result
+
+
+def passes_filterset(strategy, filterset):
+ """
+ Determines whether a given strategy meets the criteria defined in a
+ dictionary of filters.
+
+ e.g.
+
+ For the following strategy:
+
+ class ExampleStrategy(Player):
+ classifier = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'memory_depth': 10,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ and this filterset dict:
+
+ example_filterset = {
+ 'stochastic': True,
+ 'memory_depth': 10
+ }
+
+ passes_filterset(ExampleStrategy, example_filterset)
+
+ would test whether both the strategy's 'stochastic' entry is True AND
+ that its 'memory_depth' equals 10 and return True.
+
+ Parameters
+ ----------
+ strategy : a descendant class of axelrod.Player
+ filterset : dict
+ mapping filter name to criterion.
+ e.g.
+ {
+ 'stochastic': True,
+ 'min_memory_depth': 2
+ }
+
+ Returns
+ -------
+ boolean
+
+ True if the given strategy meets all the supplied criteria in the
+ filterset, otherwise false.
+
+ """
+ FilterFunction = namedtuple('FilterFunction', 'function kwargs')
+
+ # A dictionary mapping filter name (from the supplied filterset) to
+ # the relevant function and arguments for that filter.
+ filter_functions = {
+ 'stochastic': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'stochastic',
+ 'operator': operator.eq
+ }),
+ 'long_run_time': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'long_run_time',
+ 'operator': operator.eq
+ }),
+ 'manipulates_state': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'manipulates_state',
+ 'operator': operator.eq
+ }),
+ 'manipulates_source': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'manipulates_source',
+ 'operator': operator.eq
+ }),
+ 'inspects_source': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'inspects_source',
+ 'operator': operator.eq
+ }),
+ 'memory_depth': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'memory_depth',
+ 'operator': operator.eq
+ }),
+ 'min_memory_depth': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'memory_depth',
+ 'operator': operator.ge
+ }),
+ 'max_memory_depth': FilterFunction(
+ function=passes_operator_filter,
+ kwargs={
+ 'classifier_key': 'memory_depth',
+ 'operator': operator.le
+ }),
+ 'makes_use_of': FilterFunction(
+ function=passes_in_list_filter,
+ kwargs={'classifier_key': 'makes_use_of'})
+ }
+
+ # A list of boolean values to record whether the strategy passed or failed
+ # each of the filters in the supplied filterset.
+ passes_filters = []
+
+ # Loop through each of the entries in the filter_functions dict and, if
+ # that filter is defined in the supplied filterset, call the relevant
+ # function and record its result in the passes_filters list.
+ for _filter, filter_function in filter_functions.items():
+
+ if filterset.get(_filter, None) is not None:
+ kwargs = filter_function.kwargs
+ kwargs['strategy'] = strategy
+ kwargs['value'] = filterset[_filter]
+ passes_filters.append(filter_function.function(**kwargs))
+
+ # Return True if the strategy passed all the supplied filters
+ return all(passes_filters)
diff --git a/docs/tutorials/advanced/classification_of_strategies.rst b/docs/tutorials/advanced/classification_of_strategies.rst
index 8f95b64a..61037b44 100644
--- a/docs/tutorials/advanced/classification_of_strategies.rst
+++ b/docs/tutorials/advanced/classification_of_strategies.rst
@@ -10,7 +10,15 @@ various dimensions.
Here is the :code:`classifier` for the :code:`Cooperator` strategy::
>>> import axelrod as axl
- >>> expected_dictionary = {'manipulates_state': False, 'makes_use_of': set([]), 'long_run_time': False, 'stochastic': False, 'manipulates_source': False, 'inspects_source': False, 'memory_depth': 0} # Order of this dictionary might be different on your machine
+ >>> expected_dictionary = {
+ ... 'manipulates_state': False,
+ ... 'makes_use_of': set([]),
+ ... 'long_run_time': False,
+ ... 'stochastic': False,
+ ... 'manipulates_source': False,
+ ... 'inspects_source': False,
+ ... 'memory_depth': 0
+ ... } # Order of this dictionary might be different on your machine
>>> axl.Cooperator.classifier == expected_dictionary
True
@@ -20,37 +28,82 @@ Note that instances of the class also have this classifier::
>>> s.classifier == expected_dictionary
True
-This allows us to, for example, quickly identify all the stochastic
+and that we can retrieve individual entries from that :code:`classifier` dictionary::
+
+ >>> s = axl.TitForTat
+ >>> s.classifier['memory_depth']
+ 1
+ >>> s = axl.Random
+ >>> s.classifier['stochastic']
+ True
+
+We can use this classification to generate sets of strategies according to
+filters which we define in a 'filterset' dictionary and then pass to the
+'filtered_strategies' function. For example, to identify all the stochastic
strategies::
- >>> len([s for s in axl.strategies if s().classifier['stochastic']])
- 43
+ >>> filterset = {
+ ... 'stochastic': True
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
+ 35
+
-Or indeed find out how many strategy only use 1 turn worth of memory to
+Or, to find out how many strategies only use 1 turn worth of memory to
make a decision::
- >>> len([s for s in axl.strategies if s().classifier['memory_depth']==1])
+ >>> filterset = {
+ ... 'memory_depth': 1
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
24
+Multiple filter can be specified within the filterset dictionary. To specify a
+range of memory_depth values, we can use the 'min_memory_depth' and
+'max_memory_depth' filters::
+
+ >>> filterset = {
+ ... 'min_memory_depth': 1,
+ ... 'max_memory_depth': 4
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
+ 41
+
We can also identify strategies that make use of particular properties of the
tournament. For example, here is the number of strategies that make use of the
length of each match of the tournament::
- >>> len([s() for s in axl.strategies if 'length' in s().classifier['makes_use_of']])
- 10
+ >>> filterset = {
+ ... 'makes_use_of': ['length']
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
+ 4
-Here are how many of the strategies that make use of the particular game being
-played (whether or not it's the default Prisoner's dilemma)::
+Note that in the filterset dictionary, the value for the 'makes_use_of' key
+must be a list. Here is how we might identify the number of strategies that use
+both the length of the tournament and the game being played::
- >>> len([s() for s in axl.strategies if 'game' in s().classifier['makes_use_of']])
- 22
+ >>> filterset = {
+ ... 'makes_use_of': ['length', 'game']
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
+ 0
Some strategies have been classified as having a particularly long run time::
- >>> len([s() for s in axl.strategies if s().classifier['long_run_time']])
+ >>> filterset = {
+ ... 'long_run_time': True
+ ... }
+ >>> strategies = axl.filtered_strategies(filterset)
+ >>> len(strategies)
10
-Similarly, strategies that :code:`manipulate_source`, :code:`manipulate_state`
+Strategies that :code:`manipulate_source`, :code:`manipulate_state`
and/or :code:`inspect_source` return :code:`False` for the :code:`obey_axelrod`
function::
| Strategy Filtering by Dictionary
Within the api, there are a set of functions to filter strategies from their classifier entries using a dictionary. (This is because parameters within a url are passed as dictionary).
For example, the url https://axelrod-api.herokuapp.com/strategies/?stochastic=false&inspects_source=true passes the following dictionary:
```
{
'stochastic': 'false',
'inspects_source': 'true'
}
```
and it's then used to generate the list of strategies which satisfy those filtering criteria.
We could implement that functionality within the library itself, so that we could do something like:
```
filter = {
'stochastic': 'false',
'inspects_source': 'true'
}
print(axelrod.filtered_strategies(filter))
```
This wouldn't add anything that couldn't already be done with a list comprehension. The same filtering can be done already with:
```
print([[s in axl.strategies if not s.classifier[’stochastic’] and s.classifier[‘inspects_source']]))
```
However, it would add an extra capability to the library rather than it existing only within the api. | Axelrod-Python/Axelrod | diff --git a/axelrod/tests/integration/test_filtering.py b/axelrod/tests/integration/test_filtering.py
new file mode 100644
index 00000000..8bfc2873
--- /dev/null
+++ b/axelrod/tests/integration/test_filtering.py
@@ -0,0 +1,86 @@
+import unittest
+from hypothesis import given, example
+from hypothesis.strategies import integers
+from axelrod import all_strategies, filtered_strategies
+
+
+class TestFiltersAgainstComprehensions(unittest.TestCase):
+ """
+ Test that the results of filtering strategies via a filterset dict
+ match the results from using a list comprehension.
+ """
+
+ def test_boolean_filtering(self):
+
+ classifiers = [
+ 'stochastic',
+ 'long_run_time',
+ 'manipulates_state',
+ 'manipulates_source',
+ 'inspects_source']
+
+ for classifier in classifiers:
+ comprehension = set([
+ s for s in all_strategies if
+ s.classifier[classifier]])
+ filterset = {
+ classifier: True
+ }
+ filtered = set(filtered_strategies(filterset))
+ self.assertEqual(comprehension, filtered)
+
+ @given(
+ min_memory_depth=integers(min_value=1, max_value=10),
+ max_memory_depth=integers(min_value=1, max_value=10),
+ memory_depth=integers(min_value=1, max_value=10))
+ @example(
+ min_memory_depth=float('inf'),
+ max_memory_depth=float('inf'),
+ memory_depth=float('inf'))
+ def test_memory_depth_filtering(self, min_memory_depth, max_memory_depth,
+ memory_depth):
+
+ min_comprehension = set([
+ s for s in all_strategies if
+ s.classifier['memory_depth'] >= min_memory_depth])
+ min_filterset = {
+ 'min_memory_depth': min_memory_depth
+ }
+ min_filtered = set(filtered_strategies(min_filterset))
+ self.assertEqual(min_comprehension, min_filtered)
+
+ max_comprehension = set([
+ s for s in all_strategies if
+ s.classifier['memory_depth'] <= max_memory_depth])
+ max_filterset = {
+ 'max_memory_depth': max_memory_depth
+ }
+ max_filtered = set(filtered_strategies(max_filterset))
+ self.assertEqual(max_comprehension, max_filtered)
+
+ comprehension = set([
+ s for s in all_strategies if
+ s.classifier['memory_depth'] == memory_depth])
+ filterset = {
+ 'memory_depth': memory_depth
+ }
+ filtered = set(filtered_strategies(filterset))
+ self.assertEqual(comprehension, filtered)
+
+ def test_makes_use_of_filtering(self):
+ classifiers = [
+ ['game'],
+ ['length'],
+ ['game', 'length']
+ ]
+
+ for classifier in classifiers:
+ comprehension = set([
+ s for s in all_strategies if
+ set(classifier).issubset(set(s.classifier['makes_use_of']))
+ ])
+ filterset = {
+ 'makes_use_of': classifier
+ }
+ filtered = set(filtered_strategies(filterset))
+ self.assertEqual(comprehension, filtered)
diff --git a/axelrod/tests/unit/test_classification.py b/axelrod/tests/unit/test_classification.py
index d6fba108..110fbaa5 100644
--- a/axelrod/tests/unit/test_classification.py
+++ b/axelrod/tests/unit/test_classification.py
@@ -17,7 +17,8 @@ class TestClassification(unittest.TestCase):
for s in axelrod.all_strategies:
s = s()
- self.assertTrue(None not in [s.classifier[key] for key in known_keys])
+ self.assertTrue(
+ None not in [s.classifier[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
@@ -184,7 +185,8 @@ class TestStrategies(unittest.TestCase):
self.assertTrue(axelrod.MetaMajority in axelrod.strategies)
self.assertTrue(axelrod.MetaHunter in axelrod.strategies)
- self.assertFalse(axelrod.MetaHunter in axelrod.long_run_time_strategies)
+ self.assertFalse(
+ axelrod.MetaHunter in axelrod.long_run_time_strategies)
def test_demo_strategies(self):
demo_strategies = [axelrod.Cooperator,
diff --git a/axelrod/tests/unit/test_filters.py b/axelrod/tests/unit/test_filters.py
new file mode 100644
index 00000000..65da7c77
--- /dev/null
+++ b/axelrod/tests/unit/test_filters.py
@@ -0,0 +1,161 @@
+import unittest
+from axelrod.strategies._filters import *
+from axelrod import filtered_strategies
+from hypothesis import given, example
+from hypothesis.strategies import integers
+import operator
+
+
+class TestFilters(unittest.TestCase):
+
+ class TestStrategy(object):
+ classifier = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'memory_depth': 10,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ def test_equality_filter(self):
+ self.assertTrue(
+ passes_operator_filter(
+ self.TestStrategy, 'stochastic', True, operator.eq))
+ self.assertFalse(
+ passes_operator_filter(
+ self.TestStrategy, 'stochastic', False, operator.eq))
+ self.assertTrue(
+ passes_operator_filter(
+ self.TestStrategy, 'inspects_source', False, operator.eq))
+ self.assertFalse(
+ passes_operator_filter(
+ self.TestStrategy, 'inspects_source', True, operator.eq))
+
+ @given(
+ smaller=integers(min_value=0, max_value=9),
+ larger=integers(min_value=11, max_value=100),
+ )
+ @example(smaller=0, larger=float('inf'))
+ def test_inequality_filter(self, smaller, larger):
+ self.assertTrue(passes_operator_filter(
+ self.TestStrategy, 'memory_depth', smaller, operator.ge))
+ self.assertTrue(passes_operator_filter(
+ self.TestStrategy, 'memory_depth', larger, operator.le))
+ self.assertFalse(passes_operator_filter(
+ self.TestStrategy, 'memory_depth', smaller, operator.le))
+ self.assertFalse(passes_operator_filter(
+ self.TestStrategy, 'memory_depth', larger, operator.ge))
+
+ def test_list_filter(self):
+ self.assertTrue(passes_in_list_filter(
+ self.TestStrategy, 'makes_use_of', ['game']))
+ self.assertTrue(passes_in_list_filter(
+ self.TestStrategy, 'makes_use_of', ['length']))
+ self.assertTrue(passes_in_list_filter(
+ self.TestStrategy, 'makes_use_of', ['game', 'length']))
+ self.assertFalse(passes_in_list_filter(
+ self.TestStrategy, 'makes_use_of', 'test'))
+
+ @given(
+ smaller=integers(min_value=0, max_value=9),
+ larger=integers(min_value=11, max_value=100),
+ )
+ @example(smaller=0, larger=float('inf'))
+ def test_passes_filterset(self, smaller, larger):
+
+ full_passing_filterset_1 = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'min_memory_depth': smaller,
+ 'max_memory_depth': larger,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ full_passing_filterset_2 = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'memory_depth': 10,
+ 'makes_use_of': ['game', 'length']
+ }
+
+ sparse_passing_filterset = {
+ 'stochastic': True,
+ 'inspects_source': False,
+ 'makes_use_of': ['length']
+ }
+
+ full_failing_filterset = {
+ 'stochastic': False,
+ 'inspects_source': False,
+ 'min_memory_depth': smaller,
+ 'max_memory_depth': larger,
+ 'makes_use_of': ['length']
+ }
+
+ sparse_failing_filterset = {
+ 'stochastic': False,
+ 'inspects_source': False,
+ 'min_memory_depth': smaller,
+ }
+
+ self.assertTrue(passes_filterset(
+ self.TestStrategy, full_passing_filterset_1))
+ self.assertTrue(passes_filterset(
+ self.TestStrategy, full_passing_filterset_2))
+ self.assertTrue(passes_filterset(
+ self.TestStrategy, sparse_passing_filterset))
+ self.assertFalse(passes_filterset(
+ self.TestStrategy, full_failing_filterset))
+ self.assertFalse(passes_filterset(
+ self.TestStrategy, sparse_failing_filterset))
+
+ def test_filtered_strategies(self):
+
+ class StochasticTestStrategy(object):
+ classifier = {
+ 'stochastic': True,
+ 'memory_depth': float('inf'),
+ 'makes_use_of': []
+ }
+
+ class MemoryDepth2TestStrategy(object):
+ classifier = {
+ 'stochastic': False,
+ 'memory_depth': 2,
+ 'makes_use_of': []
+ }
+
+ class UsesLengthTestStrategy(object):
+ classifier = {
+ 'stochastic': True,
+ 'memory_depth': float('inf'),
+ 'makes_use_of': ['length']
+ }
+
+ strategies = [
+ StochasticTestStrategy,
+ MemoryDepth2TestStrategy,
+ UsesLengthTestStrategy
+ ]
+
+ stochastic_filterset = {
+ 'stochastic': True
+ }
+
+ deterministic_filterset = {
+ 'stochastic': False
+ }
+
+ uses_length_filterset = {
+ 'stochastic': True,
+ 'makes_use_of': ['length']
+ }
+
+ self.assertEqual(
+ filtered_strategies(stochastic_filterset, strategies),
+ [StochasticTestStrategy, UsesLengthTestStrategy])
+ self.assertEqual(
+ filtered_strategies(deterministic_filterset, strategies),
+ [MemoryDepth2TestStrategy])
+ self.assertEqual(
+ filtered_strategies(uses_length_filterset, strategies),
+ [UsesLengthTestStrategy])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 2
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
-e git+https://github.com/Axelrod-Python/Axelrod.git@10762a191c6f16a0ca385752bd48c867c7043fe7#egg=Axelrod
cycler==0.12.1
exceptiongroup==1.2.2
hypothesis==6.130.5
iniconfig==2.1.0
kiwisolver==1.4.7
matplotlib==3.3.4
numpy==2.0.2
packaging==24.2
pillow==11.1.0
pluggy==1.5.0
pyparsing==2.1.1
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
tqdm==3.4.0
| name: Axelrod
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- cycler==0.12.1
- exceptiongroup==1.2.2
- hypothesis==6.130.5
- iniconfig==2.1.0
- kiwisolver==1.4.7
- matplotlib==3.3.4
- numpy==2.0.2
- packaging==24.2
- pillow==11.1.0
- pluggy==1.5.0
- pyparsing==2.1.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- tqdm==3.4.0
prefix: /opt/conda/envs/Axelrod
| [
"axelrod/tests/integration/test_filtering.py::TestFiltersAgainstComprehensions::test_boolean_filtering",
"axelrod/tests/integration/test_filtering.py::TestFiltersAgainstComprehensions::test_makes_use_of_filtering",
"axelrod/tests/integration/test_filtering.py::TestFiltersAgainstComprehensions::test_memory_depth_filtering",
"axelrod/tests/unit/test_classification.py::TestClassification::test_is_basic",
"axelrod/tests/unit/test_classification.py::TestClassification::test_known_classifiers",
"axelrod/tests/unit/test_classification.py::TestClassification::test_manipulation_of_classifier",
"axelrod/tests/unit/test_classification.py::TestClassification::test_multiple_instances",
"axelrod/tests/unit/test_classification.py::TestClassification::test_obey_axelrod",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_demo_strategies",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_inclusion_of_strategy_lists",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_lists_not_empty",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_long_run_strategies",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_meta_inclusion",
"axelrod/tests/unit/test_classification.py::TestStrategies::test_strategy_list",
"axelrod/tests/unit/test_filters.py::TestFilters::test_equality_filter",
"axelrod/tests/unit/test_filters.py::TestFilters::test_filtered_strategies",
"axelrod/tests/unit/test_filters.py::TestFilters::test_inequality_filter",
"axelrod/tests/unit/test_filters.py::TestFilters::test_list_filter",
"axelrod/tests/unit/test_filters.py::TestFilters::test_passes_filterset"
]
| []
| []
| []
| MIT License | 723 | [
"axelrod/strategies/_filters.py",
"docs/tutorials/advanced/classification_of_strategies.rst",
"axelrod/strategies/__init__.py"
]
| [
"axelrod/strategies/_filters.py",
"docs/tutorials/advanced/classification_of_strategies.rst",
"axelrod/strategies/__init__.py"
]
|
|
XD-embedded__xd-docker-52 | 00db236f2385d476cd3f32e3fd4bf68fd1dd8a35 | 2016-08-27 14:53:21 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/xd/docker/client.py b/xd/docker/client.py
index 3d2fe2e..ec42404 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -484,3 +484,27 @@ class DockerClient(object):
return False
raise e
return True
+
+ def container_wait(self,
+ container: Union[Container, ContainerName, str]) -> int:
+ """Block until container stops.
+
+ Block until container stops, then returns the exit code.
+
+ Arguments:
+ container: The container to remove (id or name).
+
+ Returns:
+ Container exit code.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ r = self._post('/containers/{}/wait'.format(id_or_name))
+ return r.json()['StatusCode']
| client.container_wait()
Client API command to wait for a container to stop. | XD-embedded/xd-docker | diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 5b2eccd..0f1f130 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1245,3 +1245,41 @@ class container_start_tests(ContextClientTestCase):
with pytest.raises(ClientError) as clienterror:
self.client.container_start('foobar')
assert clienterror.value.code == 404
+
+
+class container_wait_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_0(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait("foobar") == 0
+
+ @mock.patch('requests.post')
+ def test_42(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 42}), 200)
+ assert self.client.container_wait("foobar") == 42
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 42}), 200)
+ post_mock.return_value = requests_mock.Response(
+ "No such container", 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_wait('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait(ContainerName("foobar")) == 0
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(json.dumps(
+ {'StatusCode': 0}), 200)
+ assert self.client.container_wait(Container(
+ self.client,name="foobar")) == 0
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@00db236f2385d476cd3f32e3fd4bf68fd1dd8a35#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container"
]
| []
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str"
]
| []
| MIT License | 724 | [
"xd/docker/client.py"
]
| [
"xd/docker/client.py"
]
|
|
XD-embedded__xd-docker-53 | 4a5dc455d476da11c4b6ed4632c63e7932e99f97 | 2016-08-27 19:03:06 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/xd/docker/client.py b/xd/docker/client.py
index ec42404..382c1ec 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -508,3 +508,35 @@ class DockerClient(object):
r = self._post('/containers/{}/wait'.format(id_or_name))
return r.json()['StatusCode']
+
+ def container_stop(self, container: Union[Container, ContainerName, str],
+ timeout: Optional[int]=None):
+ """Stop container.
+
+ Stop the container, and optionally killing the container after a
+ timeout.
+
+ Arguments:
+ container: The container to remove (id or name).
+ timeout: Number of seconds to wait before killing the container.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ params = {}
+ if timeout is not None:
+ params['t'] = timeout
+
+ try:
+ self._post('/containers/{}/stop'.format(id_or_name), params=params)
+ except HTTPError as e:
+ if e.code == 304:
+ return False
+ raise e
+ return True
| client.container_stop()
Client API command to stop a container. | XD-embedded/xd-docker | diff --git a/tests/integration/container_stop_test.py b/tests/integration/container_stop_test.py
new file mode 100644
index 0000000..e11e8f3
--- /dev/null
+++ b/tests/integration/container_stop_test.py
@@ -0,0 +1,27 @@
+import pytest
+import os
+import time
+
+from xd.docker.client import *
+
+
+def test_stop(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 10")
+ assert docker.container_stop('xd-docker-test') == True
+
+
+def test_already_stopped(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest true")
+ assert docker.container_stop('xd-docker-test') == False
+
+
+def test_not_started(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest true")
+ assert docker.container_stop('xd-docker-test') == False
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_stop('xd-docker-test')
+ assert clienterror.value.code == 404
diff --git a/tests/integration/container_wait_test.py b/tests/integration/container_wait_test.py
new file mode 100644
index 0000000..dc5d44d
--- /dev/null
+++ b/tests/integration/container_wait_test.py
@@ -0,0 +1,27 @@
+import pytest
+import os
+import time
+
+from xd.docker.client import *
+
+
+def test_waiting(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 2")
+ assert docker.container_wait('xd-docker-test') == 0
+
+
+def test_already_stopped_0(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest true")
+ assert docker.container_wait('xd-docker-test') == 0
+
+
+def test_already_stopped_1(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest false")
+ assert docker.container_wait('xd-docker-test') == 1
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_wait('xd-docker-test')
+ assert clienterror.value.code == 404
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 0f1f130..6bf24f2 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1263,10 +1263,7 @@ class container_wait_tests(ContextClientTestCase):
@mock.patch('requests.post')
def test_no_such_container(self, post_mock):
- post_mock.return_value = requests_mock.Response(json.dumps(
- {'StatusCode': 42}), 200)
- post_mock.return_value = requests_mock.Response(
- "No such container", 404)
+ post_mock.return_value = requests_mock.Response(None, 404)
with pytest.raises(ClientError) as clienterror:
self.client.container_wait('foobar')
assert clienterror.value.code == 404
@@ -1283,3 +1280,43 @@ class container_wait_tests(ContextClientTestCase):
{'StatusCode': 0}), 200)
assert self.client.container_wait(Container(
self.client,name="foobar")) == 0
+
+
+class container_stop_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_normal(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ assert self.client.container_stop("foobar") == True
+
+ @mock.patch('requests.post')
+ def test_already_stopped(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 304)
+ assert self.client.container_stop("foobar") == False
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_stop('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ assert self.client.container_stop(ContainerName("foobar")) == True
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ assert self.client.container_stop(Container(
+ self.client,name="foobar")) == True
+
+ @mock.patch('requests.post')
+ def test_timeout(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ assert self.client.container_stop("foobar", timeout=42) == True
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 't' in params
+ assert params['t'] == 42
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-unixsocket==0.4.1
tomli==2.2.1
typing==3.7.4.3
urllib3==2.3.0
-e git+https://github.com/XD-embedded/xd-docker.git@4a5dc455d476da11c4b6ed4632c63e7932e99f97#egg=XD_Docker
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-unixsocket==0.4.1
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==2.3.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_stop_tests::test_already_stopped",
"tests/unit/client_test.py::container_stop_tests::test_container",
"tests/unit/client_test.py::container_stop_tests::test_containername",
"tests/unit/client_test.py::container_stop_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_normal",
"tests/unit/client_test.py::container_stop_tests::test_timeout"
]
| [
"tests/integration/container_stop_test.py::test_stop",
"tests/integration/container_stop_test.py::test_already_stopped",
"tests/integration/container_stop_test.py::test_not_started",
"tests/integration/container_stop_test.py::test_no_such_container",
"tests/integration/container_wait_test.py::test_waiting",
"tests/integration/container_wait_test.py::test_already_stopped_0",
"tests/integration/container_wait_test.py::test_already_stopped_1",
"tests/integration/container_wait_test.py::test_no_such_container"
]
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str",
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container"
]
| []
| MIT License | 725 | [
"xd/docker/client.py"
]
| [
"xd/docker/client.py"
]
|
|
enthought__okonomiyaki-229 | ecbef5d20e17783436532cce856d201df280eb09 | 2016-08-27 19:13:21 | ecbef5d20e17783436532cce856d201df280eb09 | diff --git a/okonomiyaki/platforms/abi.py b/okonomiyaki/platforms/abi.py
index a69af1f..a3947aa 100644
--- a/okonomiyaki/platforms/abi.py
+++ b/okonomiyaki/platforms/abi.py
@@ -44,7 +44,7 @@ def _default_cpython_abi(platform, implementation_version):
abi = u"msvc2008"
elif implementation_version.minor <= 4:
abi = u"msvc2010"
- elif implementation_version.minor == 5:
+ elif implementation_version.minor <= 6:
abi = u"msvc2015"
if abi is None:
diff --git a/setup.py b/setup.py
index fe8f301..47e2270 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ IS_RELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
INSTALL_REQUIRES = [
- "attrs >= 16.0.0",
+ "attrs < 16.1.0", # Needed to support Python 2.6
"jsonschema >= 2.5.1",
"six >= 1.9.0",
"zipfile2 >= 0.0.12",
| Update _default_cpython_abi() to support Python 3.6
In okonomiyaki/platforms/abi.py#L47, we do not specify a default platform abi for the upcoming Python 3.6. This is needed to set the cp36 tag as indexable in https://github.com/enthought/brood/issues/1189. Otherwise, the following error is raised:
```
okonomiyaki.errors.OkonomiyakiError: Unsupported platform/version combo for cpython: Platform(os='windows', name='windows', family='windows', arch='x86', machine='x86')/<okonomiyaki.versions.runtime_version.RuntimeVersion object at 0x7f8bc32327f0>
```
Is it safe to assume that CPython 3.6 is also using MSVC 2015? We can also add the cp36 tag as indexable in a later migration once CPython 3.6 is released. | enthought/okonomiyaki | diff --git a/okonomiyaki/file_formats/tests/test__egg_info.py b/okonomiyaki/file_formats/tests/test__egg_info.py
index c7962dc..af539c9 100644
--- a/okonomiyaki/file_formats/tests/test__egg_info.py
+++ b/okonomiyaki/file_formats/tests/test__egg_info.py
@@ -708,6 +708,35 @@ class TestGuessPlatformAbi(unittest.TestCase):
# Then
self.assertEqual(abi, "msvc2015")
+ def test_python_36(self):
+ # Given
+ platform = EPDPlatform.from_epd_string("rh5-64")
+ python_tag = "cp36"
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "gnu")
+
+ # Given
+ platform = EPDPlatform.from_epd_string("osx-64")
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "darwin")
+
+ # Given
+ platform = EPDPlatform.from_epd_string("win-64")
+
+ # When
+ abi = _guess_platform_abi(platform, python_tag)
+
+ # Then
+ self.assertEqual(abi, "msvc2015")
+
def test_no_platform(self):
# Given
platform = None
diff --git a/okonomiyaki/platforms/tests/test_abi.py b/okonomiyaki/platforms/tests/test_abi.py
index 5138054..3528d24 100644
--- a/okonomiyaki/platforms/tests/test_abi.py
+++ b/okonomiyaki/platforms/tests/test_abi.py
@@ -18,6 +18,7 @@ class TestDefaultABI(unittest.TestCase):
(("win_x86", "cpython", "2.7.10+1"), u"msvc2008"),
(("win_x86", "cpython", "3.4.3+1"), u"msvc2010"),
(("win_x86", "cpython", "3.5.0+1"), u"msvc2015"),
+ (("win_x86", "cpython", "3.6.0+1"), u"msvc2015"),
(("osx_x86_64", "pypy", "2.6.1+1"), u"darwin"),
(("rh5_x86_64", "pypy", "2.6.1+1"), u"gnu"),
(("win_x86", "pypy", "2.6.1+1"), u"msvc2008"),
@@ -34,7 +35,7 @@ class TestDefaultABI(unittest.TestCase):
def test_non_supported(self):
# Given
args = (
- ("win_x86", "cpython", "3.6.0+1"),
+ ("win_x86", "cpython", "3.7.0+1"),
("win_x86", "pypy", "4.1.0+1"),
("rh5_x86_64", "r", "3.0.0+1"),
)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"flake8",
"mock",
"testfixtures",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
mccabe==0.7.0
mock==5.2.0
-e git+https://github.com/enthought/okonomiyaki.git@ecbef5d20e17783436532cce856d201df280eb09#egg=okonomiyaki
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.1
pytest==8.3.5
referencing==0.36.2
rpds-py==0.24.0
six==1.17.0
testfixtures==8.3.0
tomli==2.2.1
typing_extensions==4.13.0
zipfile2==0.0.12
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest==8.3.5
- referencing==0.36.2
- rpds-py==0.24.0
- six==1.17.0
- testfixtures==8.3.0
- tomli==2.2.1
- typing-extensions==4.13.0
- zipfile2==0.0.12
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_36",
"okonomiyaki/platforms/tests/test_abi.py::TestDefaultABI::test_basics"
]
| []
| [
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_spec_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_str",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_error_python_to_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_3",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_4",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_from_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_missing_spec_depend",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_unsupported_metadata_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_windows_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_extension_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg_windows",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg_pypi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_to_string",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_all_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_32",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_64",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_python_implementation",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_27",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_34",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_35",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name_invalid",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_invalid_spec_strings",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_1",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_2",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_unsupported",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_dependencies",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_none",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_blacklisted_python_tag",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_blacklisted",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_blacklisted_platform",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_dump_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_fixed_requirement",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_cross_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_json_dict",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_from_platform_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_mkl_roundtrip",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_no_pkg_info",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_platform_abi",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_platform_abi_no_python",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_simple",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_simple_non_python_egg",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_strictness",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_support_higher_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_support_lower_compatible_version",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_to_json_dict",
"okonomiyaki/file_formats/tests/test__egg_info.py::TestEggMetadata::test_to_spec_string",
"okonomiyaki/platforms/tests/test_abi.py::TestDefaultABI::test_non_supported"
]
| []
| BSD License | 726 | [
"setup.py",
"okonomiyaki/platforms/abi.py"
]
| [
"setup.py",
"okonomiyaki/platforms/abi.py"
]
|
|
XD-embedded__xd-docker-54 | 1f7353342a8eda120d4fddb87a14e63f5da799b3 | 2016-08-27 21:04:41 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | coveralls:
[](https://coveralls.io/builds/7638771)
Coverage remained the same at 100.0% when pulling **b5f87cb77d21bf715abb3a74102224baeffccd53 on esben:container-restart** into **1f7353342a8eda120d4fddb87a14e63f5da799b3 on XD-embedded:master**.
coveralls:
[](https://coveralls.io/builds/7638771)
Coverage remained the same at 100.0% when pulling **b5f87cb77d21bf715abb3a74102224baeffccd53 on esben:container-restart** into **1f7353342a8eda120d4fddb87a14e63f5da799b3 on XD-embedded:master**.
| diff --git a/xd/docker/client.py b/xd/docker/client.py
index 382c1ec..a0c7398 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -519,6 +519,10 @@ class DockerClient(object):
Arguments:
container: The container to remove (id or name).
timeout: Number of seconds to wait before killing the container.
+
+ Returns:
+ True if container was stopped.
+ False if container was already stopped.
"""
# Handle convenience argument types
@@ -540,3 +544,31 @@ class DockerClient(object):
return False
raise e
return True
+
+ def container_restart(self,
+ container: Union[Container, ContainerName, str],
+ timeout: Optional[int]=None):
+ """Restart container.
+
+ Restart the container, and optionally killing the container after a
+ timeout waiting for the container to stop.
+
+ Arguments:
+ container: The container to remove (id or name).
+ timeout: Number of seconds to wait before killing the container.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ params = {}
+ if timeout is not None:
+ params['t'] = timeout
+
+ self._post('/containers/{}/restart'.format(id_or_name),
+ params=params)
| client.container_restart()
Client API command to restart a container. | XD-embedded/xd-docker | diff --git a/tests/integration/container_restart_test.py b/tests/integration/container_restart_test.py
new file mode 100644
index 0000000..53d998b
--- /dev/null
+++ b/tests/integration/container_restart_test.py
@@ -0,0 +1,26 @@
+import pytest
+import os
+
+from xd.docker.client import *
+
+
+def test_restart(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 5")
+ docker.container_restart('xd-docker-test')
+
+
+def test_already_stopped(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest true")
+ docker.container_restart('xd-docker-test')
+
+
+def test_not_started(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest true")
+ docker.container_restart('xd-docker-test')
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_stop('xd-docker-test')
+ assert clienterror.value.code == 404
diff --git a/tests/integration/container_stop_test.py b/tests/integration/container_stop_test.py
index e11e8f3..3aaa43b 100644
--- a/tests/integration/container_stop_test.py
+++ b/tests/integration/container_stop_test.py
@@ -1,6 +1,5 @@
import pytest
import os
-import time
from xd.docker.client import *
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index 6bf24f2..e890e2d 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1320,3 +1320,37 @@ class container_stop_tests(ContextClientTestCase):
params = post_mock.call_args[1]['params']
assert 't' in params
assert params['t'] == 42
+
+
+class container_restart_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_normal(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_restart("foobar")
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_restart('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_restart(ContainerName("foobar"))
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_restart(Container(self.client,name="foobar"))
+
+ @mock.patch('requests.post')
+ def test_timeout(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_restart("foobar", timeout=42)
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 't' in params
+ assert params['t'] == 42
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
mccabe==0.7.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pycodestyle==2.13.0
pyflakes==3.3.2
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
requests-unixsocket==0.4.1
tomli==2.2.1
typing==3.7.4.3
urllib3==2.3.0
-e git+https://github.com/XD-embedded/xd-docker.git@1f7353342a8eda120d4fddb87a14e63f5da799b3#egg=XD_Docker
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pycodestyle==2.13.0
- pyflakes==3.3.2
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- requests-unixsocket==0.4.1
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==2.3.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_restart_tests::test_container",
"tests/unit/client_test.py::container_restart_tests::test_containername",
"tests/unit/client_test.py::container_restart_tests::test_no_such_container",
"tests/unit/client_test.py::container_restart_tests::test_normal",
"tests/unit/client_test.py::container_restart_tests::test_timeout"
]
| [
"tests/integration/container_restart_test.py::test_restart",
"tests/integration/container_restart_test.py::test_already_stopped",
"tests/integration/container_restart_test.py::test_not_started",
"tests/integration/container_restart_test.py::test_no_such_container",
"tests/integration/container_stop_test.py::test_stop",
"tests/integration/container_stop_test.py::test_already_stopped",
"tests/integration/container_stop_test.py::test_not_started",
"tests/integration/container_stop_test.py::test_no_such_container"
]
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str",
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_already_stopped",
"tests/unit/client_test.py::container_stop_tests::test_container",
"tests/unit/client_test.py::container_stop_tests::test_containername",
"tests/unit/client_test.py::container_stop_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_normal",
"tests/unit/client_test.py::container_stop_tests::test_timeout"
]
| []
| MIT License | 727 | [
"xd/docker/client.py"
]
| [
"xd/docker/client.py"
]
|
XD-embedded__xd-docker-57 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | 2016-08-28 09:44:51 | 03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f | diff --git a/xd/docker/client.py b/xd/docker/client.py
index a0c7398..fdff2db 100644
--- a/xd/docker/client.py
+++ b/xd/docker/client.py
@@ -17,7 +17,7 @@ from typing import Optional, Union, Sequence, Dict, Tuple, List
from xd.docker.container import Container
from xd.docker.image import Image
from xd.docker.parameters import ContainerConfig, HostConfig, ContainerName, \
- Repository, RegistryAuthConfig, VolumeMount, json_update
+ Repository, RegistryAuthConfig, VolumeMount, Signal, json_update
import logging
log = logging.getLogger(__name__)
@@ -572,3 +572,29 @@ class DockerClient(object):
self._post('/containers/{}/restart'.format(id_or_name),
params=params)
+
+ def container_kill(self,
+ container: Union[Container, ContainerName, str],
+ signal: Optional[Signal]=None):
+ """Kill container.
+
+ Send signal to container, and (maybe) wait for the container to exit.
+
+ Arguments:
+ container: The container to remove (id or name).
+ signal: Signal to send to container.
+ """
+
+ # Handle convenience argument types
+ if isinstance(container, str):
+ id_or_name = container
+ elif isinstance(container, ContainerName):
+ id_or_name = container.name
+ else:
+ id_or_name = container.id or container.name
+
+ params = {}
+ if signal is not None:
+ params['signal'] = signal
+
+ self._post('/containers/{}/kill'.format(id_or_name), params=params)
| client.container_kill()
Client API command to kill a container. | XD-embedded/xd-docker | diff --git a/tests/integration/container_kill_test.py b/tests/integration/container_kill_test.py
new file mode 100644
index 0000000..bb9dfbc
--- /dev/null
+++ b/tests/integration/container_kill_test.py
@@ -0,0 +1,38 @@
+import pytest
+import os
+
+from xd.docker.client import *
+
+
+def test_kill(docker, stdout):
+ os.system("docker run -d --name xd-docker-test busybox:latest sleep 10")
+ docker.container_kill('xd-docker-test')
+
+
+def test_already_stopped(docker, stdout):
+ os.system("docker run --name xd-docker-test busybox:latest true")
+ # Prior to Docker 1.8, kill silently ignores stopped containers, and
+ # beginning with 1.8, they return HTTP 500 (ServerError)
+ if docker.api_version > (1, 19):
+ with pytest.raises(ServerError) as servererror:
+ docker.container_kill('xd-docker-test')
+ else:
+ docker.container_kill('xd-docker-test')
+
+
+def test_not_started(docker, stdout):
+ os.system("docker create --name xd-docker-test busybox:latest true")
+ # Prior to Docker 1.8, kill silently ignores stopped containers, and
+ # beginning with 1.8, they return HTTP 500 (ServerError)
+ if docker.api_version > (1, 19):
+ with pytest.raises(ServerError) as servererror:
+ docker.container_kill('xd-docker-test')
+ else:
+ docker.container_kill('xd-docker-test')
+
+
+def test_no_such_container(docker, stdout):
+ with pytest.raises(ClientError) as clienterror:
+ with stdout.redirect():
+ docker.container_kill('xd-docker-test')
+ assert clienterror.value.code == 404
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index e890e2d..c2f4f95 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1354,3 +1354,46 @@ class container_restart_tests(ContextClientTestCase):
params = post_mock.call_args[1]['params']
assert 't' in params
assert params['t'] == 42
+
+
+class container_kill_tests(ContextClientTestCase):
+
+ @mock.patch('requests.post')
+ def test_normal(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar")
+
+ @mock.patch('requests.post')
+ def test_no_such_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 404)
+ with pytest.raises(ClientError) as clienterror:
+ self.client.container_kill('foobar')
+ assert clienterror.value.code == 404
+
+ @mock.patch('requests.post')
+ def test_containername(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill(ContainerName("foobar"))
+
+ @mock.patch('requests.post')
+ def test_container(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill(Container(self.client,name="foobar"))
+
+ @mock.patch('requests.post')
+ def test_sigint(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar", signal='SIGINT')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'signal' in params
+ assert params['signal'] == 'SIGINT'
+
+ @mock.patch('requests.post')
+ def test_sighup(self, post_mock):
+ post_mock.return_value = requests_mock.Response(None, 204)
+ self.client.container_kill("foobar", signal='SIGHUP')
+ assert 'params' in post_mock.call_args[1]
+ params = post_mock.call_args[1]['params']
+ assert 'signal' in params
+ assert params['signal'] == 'SIGHUP'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests-unixsocket==0.3.0
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/XD-embedded/xd-docker.git@03179f60c8e6d5fa7d2a2a20ec429da132ff7e8f#egg=XD_Docker
zipp==3.6.0
| name: xd-docker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-unixsocket==0.3.0
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/xd-docker
| [
"tests/unit/client_test.py::container_kill_tests::test_container",
"tests/unit/client_test.py::container_kill_tests::test_containername",
"tests/unit/client_test.py::container_kill_tests::test_no_such_container",
"tests/unit/client_test.py::container_kill_tests::test_normal",
"tests/unit/client_test.py::container_kill_tests::test_sighup",
"tests/unit/client_test.py::container_kill_tests::test_sigint"
]
| [
"tests/integration/container_kill_test.py::test_kill",
"tests/integration/container_kill_test.py::test_already_stopped",
"tests/integration/container_kill_test.py::test_not_started",
"tests/integration/container_kill_test.py::test_no_such_container"
]
| [
"tests/unit/client_test.py::init_tests::test_init_foobar",
"tests/unit/client_test.py::init_tests::test_init_http",
"tests/unit/client_test.py::init_tests::test_init_http_unix",
"tests/unit/client_test.py::init_tests::test_init_noargs",
"tests/unit/client_test.py::init_tests::test_init_tcp",
"tests/unit/client_test.py::init_tests::test_init_unix",
"tests/unit/client_test.py::version_tests::test_version",
"tests/unit/client_test.py::version_tests::test_version_httperror_404",
"tests/unit/client_test.py::version_tests::test_version_httperror_500",
"tests/unit/client_test.py::version_tests::test_version_httperror_unknown",
"tests/unit/client_test.py::ping_tests::test_ping",
"tests/unit/client_test.py::ping_tests::test_ping_server_error",
"tests/unit/client_test.py::containers_tests::test_containers_1",
"tests/unit/client_test.py::containers_tests::test_containers_3",
"tests/unit/client_test.py::containers_tests::test_containers_4",
"tests/unit/client_test.py::containers_tests::test_containers_only_running_false",
"tests/unit/client_test.py::images_tests::test_images",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect",
"tests/unit/client_test.py::image_inspect_tests::test_image_inspect_raw",
"tests/unit/client_test.py::image_build_tests::test_image_build",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_as_file",
"tests/unit/client_test.py::image_build_tests::test_image_build_context_does_not_exist",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_rm",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_1",
"tests/unit/client_test.py::image_build_tests::test_image_build_invalid_tag_2",
"tests/unit/client_test.py::image_build_tests::test_image_build_nonstandard_dockerfile",
"tests/unit/client_test.py::image_build_tests::test_image_build_run_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_server_error",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_args",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_forcerm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_name",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_nocache",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_norm",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_only_error_output",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_pull",
"tests/unit/client_test.py::image_build_tests::test_image_build_with_registry_config",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_1_ok",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_2_not_found",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_3_authconfig",
"tests/unit/client_test.py::image_pull_tests::test_image_pull_4_invalid_authconfig",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_1",
"tests/unit/client_test.py::image_remove_tests::test_image_remove_2_not_found",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_1_repo",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_2_repo_and_tag",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_3_force",
"tests/unit/client_test.py::image_tag_tests::test_image_tag_4_fail",
"tests/unit/client_test.py::container_create_tests::test_container_create_1_anon",
"tests/unit/client_test.py::container_create_tests::test_container_create_2_named",
"tests/unit/client_test.py::container_create_tests::test_container_create_3_named_str",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_nopull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_pull_not_needed",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_command",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_env",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_exposed_ports",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_memory_and_swap",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_network_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_oom_kill_false",
"tests/unit/client_test.py::container_create_tests::test_container_create_with_swap_but_not_memory",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id",
"tests/unit/client_test.py::container_remove_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_remove_tests::test_container_with_name",
"tests/unit/client_test.py::container_remove_tests::test_containername",
"tests/unit/client_test.py::container_remove_tests::test_force_false",
"tests/unit/client_test.py::container_remove_tests::test_force_true",
"tests/unit/client_test.py::container_remove_tests::test_no_such_container",
"tests/unit/client_test.py::container_remove_tests::test_ok",
"tests/unit/client_test.py::container_remove_tests::test_volumes_false",
"tests/unit/client_test.py::container_remove_tests::test_volumes_true",
"tests/unit/client_test.py::container_start_tests::test_already_running",
"tests/unit/client_test.py::container_start_tests::test_container_with_id",
"tests/unit/client_test.py::container_start_tests::test_container_with_id_and_name",
"tests/unit/client_test.py::container_start_tests::test_container_with_name",
"tests/unit/client_test.py::container_start_tests::test_containername",
"tests/unit/client_test.py::container_start_tests::test_no_such_container",
"tests/unit/client_test.py::container_start_tests::test_str",
"tests/unit/client_test.py::container_wait_tests::test_0",
"tests/unit/client_test.py::container_wait_tests::test_42",
"tests/unit/client_test.py::container_wait_tests::test_container",
"tests/unit/client_test.py::container_wait_tests::test_containername",
"tests/unit/client_test.py::container_wait_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_already_stopped",
"tests/unit/client_test.py::container_stop_tests::test_container",
"tests/unit/client_test.py::container_stop_tests::test_containername",
"tests/unit/client_test.py::container_stop_tests::test_no_such_container",
"tests/unit/client_test.py::container_stop_tests::test_normal",
"tests/unit/client_test.py::container_stop_tests::test_timeout",
"tests/unit/client_test.py::container_restart_tests::test_container",
"tests/unit/client_test.py::container_restart_tests::test_containername",
"tests/unit/client_test.py::container_restart_tests::test_no_such_container",
"tests/unit/client_test.py::container_restart_tests::test_normal",
"tests/unit/client_test.py::container_restart_tests::test_timeout"
]
| []
| MIT License | 728 | [
"xd/docker/client.py"
]
| [
"xd/docker/client.py"
]
|
|
zalando-stups__senza-332 | 4993fb81ebcc9c8a5c6773af14eaa3cb0e069010 | 2016-08-29 09:53:21 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/configuration.py b/senza/components/configuration.py
index e83b419..6712f03 100644
--- a/senza/components/configuration.py
+++ b/senza/components/configuration.py
@@ -3,7 +3,7 @@ from senza.utils import ensure_keys, named_value
def format_params(args):
- items = [(key, val) for key, val in args.__dict__.items() if key not in ('region', 'version')]
+ items = [(key, val) for key, val in sorted(args.__dict__.items()) if key not in ('region', 'version')]
return ', '.join(['{}: {}'.format(key, val) for key, val in items])
@@ -27,7 +27,9 @@ def component_configuration(definition, configuration, args, info, force, accoun
if 'Description' not in definition:
# set some sane default stack description
- definition['Description'] = get_default_description(info, args)
+ # we need to truncate at 1024 chars (should be Bytes actually)
+ # see http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-description-structure.html
+ definition['Description'] = get_default_description(info, args)[:1024]
# ServerSubnets
for region, subnets in configuration.get('ServerSubnets', {}).items():
| Shrink "description" field to allowed 1024 chars
This seems to pop up more and more, I see no real reason why senza should not just do
description[:1024]
to fix this for everyone. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index fdc377d..652ccfc 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -6,6 +6,7 @@ import pytest
import senza.traffic
from senza.cli import AccountArguments
from senza.components import get_component
+from senza.components.configuration import component_configuration
from senza.components.auto_scaling_group import (component_auto_scaling_group,
normalize_asg_success,
normalize_network_threshold,
@@ -923,3 +924,14 @@ def test_weighted_dns_load_balancer_v2(monkeypatch, boto_resource):
assert result['Resources']['MyLBListener']['Properties']['Certificates'] == [{'CertificateArn': 'arn:aws:42'}]
# test that our custom drain setting works
assert result['Resources']['MyLBTargetGroup']['Properties']['TargetGroupAttributes'] == [{'Key': 'deregistration_delay.timeout_seconds', 'Value': '123'}]
+
+
+def test_max_description_length():
+ definition = {}
+ configuration = {}
+ args = MagicMock()
+ args.__dict__ = {'Param1': 'my param value', 'SecondParam': ('1234567890' * 100)}
+ info = {'StackName': 'My-Stack'}
+ component_configuration(definition, configuration, args, info, False, AccountArguments('dummyregion'))
+ assert definition['Description'].startswith('My Stack (Param1: my param value, SecondParam: 1234567890')
+ assert 0 < len(definition['Description']) <= 1024
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
execnet==2.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@4993fb81ebcc9c8a5c6773af14eaa3cb0e069010#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
typing_extensions==4.13.0
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- typing-extensions==4.13.0
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_max_description_length"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2"
]
| []
| Apache License 2.0 | 729 | [
"senza/components/configuration.py"
]
| [
"senza/components/configuration.py"
]
|
|
pimutils__khal-495 | 7ff941fbb8f294de8eb3e6abaed48014a804f9d0 | 2016-08-30 00:20:35 | e8050625cccc20cd8c6a673f44af61d70183f638 | diff --git a/khal/settings/settings.py b/khal/settings/settings.py
index b9fde9d..254e424 100644
--- a/khal/settings/settings.py
+++ b/khal/settings/settings.py
@@ -31,7 +31,7 @@ from .exceptions import InvalidSettingsError, CannotParseConfigFileError, NoConf
from khal import __productname__
from ..log import logger
from .utils import is_timezone, weeknumber_option, config_checks, \
- expand_path, expand_db_path, is_color
+ expand_path, expand_db_path, is_color, get_vdir_type, get_color_from_vdir
SPECPATH = os.path.join(os.path.dirname(__file__), 'khal.spec')
@@ -66,12 +66,17 @@ def find_configuration_file():
return None
-def get_config(config_path=None):
+def get_config(
+ config_path=None,
+ _get_color_from_vdir=get_color_from_vdir,
+ _get_vdir_type=get_vdir_type):
"""reads the config file, validates it and return a config dict
:param config_path: path to a custom config file, if none is given the
default locations will be searched
:type config_path: str
+ :param _get_color_from_vdir: override get_color_from_vdir for testing purposes
+ :param _get_vdir_type: override get_vdir_type for testing purposes
:returns: configuration
:rtype: dict
"""
@@ -124,7 +129,7 @@ def get_config(config_path=None):
if abort or not results:
raise InvalidSettingsError()
- config_checks(user_config)
+ config_checks(user_config, _get_color_from_vdir, _get_vdir_type)
extras = get_extra_values(user_config)
for section, value in extras:
diff --git a/khal/settings/utils.py b/khal/settings/utils.py
index d7a714c..0470ea4 100644
--- a/khal/settings/utils.py
+++ b/khal/settings/utils.py
@@ -152,7 +152,10 @@ def get_vdir_type(_):
return 'calendar'
-def config_checks(config):
+def config_checks(
+ config,
+ _get_color_from_vdir=get_color_from_vdir,
+ _get_vdir_type=get_vdir_type):
"""do some tests on the config we cannot do with configobj's validator"""
if len(config['calendars'].keys()) < 1:
logger.fatal('Found no calendar section in the config file')
@@ -173,8 +176,8 @@ def config_checks(config):
config['calendars'].pop(calendar)
for vdir in sorted(vdirs):
calendar = {'path': vdir,
- 'color': get_color_from_vdir(vdir),
- 'type': get_vdir_type(vdir),
+ 'color': _get_color_from_vdir(vdir),
+ 'type': _get_vdir_type(vdir),
'readonly': False
}
name = get_unique_name(vdir, config['calendars'].keys())
@@ -186,4 +189,4 @@ def config_checks(config):
config['calendars'][calendar]['readonly'] = True
if config['calendars'][calendar]['color'] == 'auto':
config['calendars'][calendar]['color'] = \
- get_color_from_vdir(config['calendars'][calendar]['path'])
+ _get_color_from_vdir(config['calendars'][calendar]['path'])
| Testsuite fails when test collections actually exist
From https://aur.archlinux.org/packages/khal/#comment-560248
platform linux -- Python 3.5.2, pytest-2.9.2, py-1.4.31, pluggy-0.3.1
rootdir: /tmp/yaourt-tmp-nicolas/aur-khal/src/khal-0.8.3, inifile:
plugins: localserver-0.3.5, hypothesis-3.4.2, subtesthack-0.1.1
collected 178 items
tests/aux_test.py .....................
tests/backend_test.py ..........................
tests/cal_display_test.py ...xxx
tests/cli_test.py .....x.............
tests/controller_test.py ....
tests/event_test.py ..............................
tests/khalendar_aux_test.py ................................
tests/khalendar_test.py ......................
tests/settings_test.py F.F.....
tests/terminal_test.py ...
tests/vtimezone_test.py ...
tests/ui/test_calendarwidget.py ...
tests/ui/test_widgets.py .
============================================ FAILURES =============================================
_________________________________ TestSettings.test_simple_config _________________________________
self = <tests.settings_test.TestSettings object at 0x7f6799a26240>
def test_simple_config(self):
config = get_config(PATH + 'simple.conf')
comp_config = {
'calendars': {
'home': {'path': os.path.expanduser('~/.calendars/home/'),
'readonly': False, 'color': None, 'type': 'calendar'},
'work': {'path': os.path.expanduser('~/.calendars/work/'),
'readonly': False, 'color': None, 'type': 'calendar'},
},
'sqlite': {'path': os.path.expanduser('~/.local/share/khal/khal.db')},
'locale': {
'local_timezone': pytz.timezone('Europe/Berlin'),
'default_timezone': pytz.timezone('Europe/Berlin'),
'timeformat': '%H:%M',
'dateformat': '%d.%m.',
'longdateformat': '%d.%m.%Y',
'datetimeformat': '%d.%m. %H:%M',
'longdatetimeformat': '%d.%m.%Y %H:%M',
'firstweekday': 0,
'encoding': 'utf-8',
'unicode_symbols': True,
'weeknumbers': False,
},
'default': {
'default_command': 'calendar',
'default_calendar': None,
'show_all_days': False,
'print_new': 'False',
'days': 2,
'highlight_event_days': False
}
}
for key in comp_config:
> assert config[key] == comp_config[key]
E assert {'home': {'pa...: 'calendar'}} == {'home': {'col...: 'calendar'}}
E Differing items:
E {'work': {'path': '/home/nicolas/.calendars/work/', 'color': '#E6C800FF', 'readonly': False, 'type': 'calendar'}} != {'work': {'color': None, 'path': '/home/nicolas/.calendars/work/', 'readonly': False, 'type': 'calendar'}}
E {'home': {'path': '/home/nicolas/.calendars/home/', 'color': '#882F00FF', 'readonly': False, 'type': 'calendar'}} != {'home': {'color': None, 'path': '/home/nicolas/.calendars/home/', 'readonly': False, 'type': 'calendar'}}
E Use -v to get the full diff
tests/settings_test.py:50: AssertionError
_____________________________________ TestSettings.test_small _____________________________________
self = <tests.settings_test.TestSettings object at 0x7f6799a6cb00>
def test_small(self):
config = get_config(PATH + 'small.conf')
comp_config = {
'calendars': {
'home': {'path': os.path.expanduser('~/.calendars/home/'),
'color': 'dark green', 'readonly': False,
'type': 'calendar'},
'work': {'path': os.path.expanduser('~/.calendars/work/'),
'readonly': True, 'color': None,
'type': 'calendar'}},
'sqlite': {'path': os.path.expanduser('~/.local/share/khal/khal.db')},
'locale': {
'local_timezone': get_localzone(),
'default_timezone': get_localzone(),
'timeformat': '%H:%M',
'dateformat': '%d.%m.',
'longdateformat': '%d.%m.%Y',
'datetimeformat': '%d.%m. %H:%M',
'longdatetimeformat': '%d.%m.%Y %H:%M',
'firstweekday': 0,
'encoding': 'utf-8',
'unicode_symbols': True,
'weeknumbers': False,
},
'default': {
'default_calendar': None,
'default_command': 'calendar',
'print_new': 'False',
'show_all_days': False,
'days': 2,
'highlight_event_days': False
}
}
for key in comp_config:
> assert config[key] == comp_config[key]
E assert {'home': {'pa...: 'calendar'}} == {'home': {'col...: 'calendar'}}
E Omitting 1 identical items, use -v to show
E Differing items:
E {'work': {'path': '/home/nicolas/.calendars/work/', 'readonly': True, 'color': '#E6C800FF', 'type': 'calendar'}} != {'work': {'color': None, 'path': '/home/nicolas/.calendars/work/', 'readonly': True, 'type': 'calendar'}}
E Use -v to get the full diff
tests/settings_test.py:90: AssertionError
========================= 2 failed, 172 passed, 4 xfailed in 5.65 seconds =========================
==> ERROR: A failure occurred in check().
Aborting...
==> ERROR: Makepkg was unable to build khal.
==> Restart building khal ? [y/N]
| pimutils/khal | diff --git a/tests/settings_test.py b/tests/settings_test.py
index 9b18bfc..51156b6 100644
--- a/tests/settings_test.py
+++ b/tests/settings_test.py
@@ -15,7 +15,11 @@ PATH = __file__.rsplit('/', 1)[0] + '/configs/'
class TestSettings(object):
def test_simple_config(self):
- config = get_config(PATH + 'simple.conf')
+ config = get_config(
+ PATH + 'simple.conf',
+ _get_color_from_vdir=lambda x: None,
+ _get_vdir_type=lambda x: 'calendar',
+ )
comp_config = {
'calendars': {
'home': {'path': os.path.expanduser('~/.calendars/home/'),
@@ -53,7 +57,11 @@ class TestSettings(object):
get_config(PATH + 'nocalendars.conf')
def test_small(self):
- config = get_config(PATH + 'small.conf')
+ config = get_config(
+ PATH + 'small.conf',
+ _get_color_from_vdir=lambda x: None,
+ _get_vdir_type=lambda x: 'calendar',
+ )
comp_config = {
'calendars': {
'home': {'path': os.path.expanduser('~/.calendars/home/'),
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"codecov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libxml2-dev libxslt1-dev"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | atomicwrites==1.4.1
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
codecov==2.1.13
configobj==5.0.9
coverage==7.8.0
exceptiongroup==1.2.2
icalendar==6.1.3
idna==3.10
iniconfig==2.1.0
-e git+https://github.com/pimutils/khal.git@7ff941fbb8f294de8eb3e6abaed48014a804f9d0#egg=khal
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
pyxdg==0.28
requests==2.32.3
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
tzdata==2025.2
tzlocal==5.3.1
urllib3==2.3.0
urwid==2.6.16
wcwidth==0.2.13
| name: khal
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- atomicwrites==1.4.1
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- codecov==2.1.13
- configobj==5.0.9
- coverage==7.8.0
- exceptiongroup==1.2.2
- icalendar==6.1.3
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyxdg==0.28
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- tzdata==2025.2
- tzlocal==5.3.1
- urllib3==2.3.0
- urwid==2.6.16
- wcwidth==0.2.13
prefix: /opt/conda/envs/khal
| [
"tests/settings_test.py::TestSettings::test_simple_config",
"tests/settings_test.py::TestSettings::test_small"
]
| []
| [
"tests/settings_test.py::TestSettings::test_nocalendars",
"tests/settings_test.py::TestSettings::test_old_config",
"tests/settings_test.py::TestSettings::test_extra_sections",
"tests/settings_test.py::test_discover",
"tests/settings_test.py::test_get_unique_name",
"tests/settings_test.py::test_config_checks"
]
| []
| MIT License | 730 | [
"khal/settings/settings.py",
"khal/settings/utils.py"
]
| [
"khal/settings/settings.py",
"khal/settings/utils.py"
]
|
|
Azure__WALinuxAgent-403 | d0392908c8a8c66f41183696c0dbe5dfc3d34acc | 2016-08-31 22:29:00 | d0392908c8a8c66f41183696c0dbe5dfc3d34acc | diff --git a/azurelinuxagent/common/protocol/wire.py b/azurelinuxagent/common/protocol/wire.py
index 287da33c..29a1663f 100644
--- a/azurelinuxagent/common/protocol/wire.py
+++ b/azurelinuxagent/common/protocol/wire.py
@@ -269,7 +269,7 @@ def ext_status_to_v1(ext_name, ext_status):
"timestampUTC": timestamp
}
if len(v1_sub_status) != 0:
- v1_ext_status['status']['substatus'] = v1_sub_status
+ v1_ext_status['substatus'] = v1_sub_status
return v1_ext_status
diff --git a/azurelinuxagent/common/utils/textutil.py b/azurelinuxagent/common/utils/textutil.py
index f03c7e67..6d460cb1 100644
--- a/azurelinuxagent/common/utils/textutil.py
+++ b/azurelinuxagent/common/utils/textutil.py
@@ -277,3 +277,11 @@ def b64encode(s):
if PY_VERSION_MAJOR > 2:
return base64.b64encode(bytes(s, 'utf-8')).decode('utf-8')
return base64.b64encode(s)
+
+
+def safe_shlex_split(s):
+ import shlex
+ from azurelinuxagent.common.version import PY_VERSION
+ if PY_VERSION[:2] == (2, 6):
+ return shlex.split(s.encode('utf-8'))
+ return shlex.split(s)
diff --git a/azurelinuxagent/daemon/resourcedisk/default.py b/azurelinuxagent/daemon/resourcedisk/default.py
index 9ffe2809..a5f4cf66 100644
--- a/azurelinuxagent/daemon/resourcedisk/default.py
+++ b/azurelinuxagent/daemon/resourcedisk/default.py
@@ -106,10 +106,7 @@ class ResourceDiskHandler(object):
raise ResourceDiskError("Could not determine partition info for "
"{0}: {1}".format(device, ret[1]))
- force_option = 'F'
- if fs == 'xfs':
- force_option = 'f'
- mkfs_string = "mkfs.{0} {1} -{2}".format(fs, partition, force_option)
+ mkfs_string = "mkfs.{0} {1} -F".format(fs, partition)
if "gpt" in ret[1]:
logger.info("GPT detected, finding partitions")
diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py
index 37774d29..5eff7e53 100644
--- a/azurelinuxagent/ga/update.py
+++ b/azurelinuxagent/ga/update.py
@@ -16,12 +16,12 @@
#
# Requires Python 2.4+ and Openssl 1.0+
#
+
import glob
import json
import os
import platform
import re
-import shlex
import shutil
import signal
import subprocess
@@ -130,7 +130,7 @@ class UpdateHandler(object):
try:
# Launch the correct Python version for python-based agents
- cmds = shlex.split(agent_cmd)
+ cmds = textutil.safe_shlex_split(agent_cmd)
if cmds[0].lower() == "python":
cmds[0] = get_python_cmd()
agent_cmd = " ".join(cmds)
diff --git a/setup.py b/setup.py
index 7303b62c..887a557e 100755
--- a/setup.py
+++ b/setup.py
@@ -147,7 +147,7 @@ class install(_install):
('lnx-distro=', None, 'target Linux distribution'),
('lnx-distro-version=', None, 'target Linux distribution version'),
('lnx-distro-fullname=', None, 'target Linux distribution full name'),
- ('register-service', None, 'register as startup service and start'),
+ ('register-service', None, 'register as startup service'),
('skip-data-files', None, 'skip data files installation'),
]
@@ -172,16 +172,14 @@ class install(_install):
def run(self):
_install.run(self)
if self.register_service:
- osutil = get_osutil()
- osutil.register_agent_service()
- osutil.start_agent_service()
+ get_osutil().register_agent_service()
setuptools.setup(
name=AGENT_NAME,
version=AGENT_VERSION,
long_description=AGENT_DESCRIPTION,
- author='Microsoft Corporation',
+ author='Yue Zhang, Stephen Zarkos, Eric Gable',
author_email='[email protected]',
platforms='Linux',
url='https://github.com/Azure/WALinuxAgent',
| [2.1.5] AutoUpdate WALA error on RHEL-6.8
Description of problem:
waagent fails to run the AutoUpdated WALA process.
Version-Release number of selected component (if applicable):
WALinuxAgent-2.1.5
RHEL Version: RHEL-6.8
Python Version: 2.6.6
How reproducible:
100%
Steps to Reproduce:
1. Prepare a RHEL7.3 VM on Azure East US location(ASM mode) with WALA-2.1.5 installed. Enable wala auto-update in the /etc/waagent.conf:
\# AutoUpdate.Enabled=y
\# AutoUpdate.GAFamily=Prod
Then restart waagent service
\# service waagent restart
2. Wait until the new WALA package is downloaded. Check waagent.log.
Actual results:
waagent can't run the new WALA version.
There's error logs in waagent.log:
```
2016/08/25 20:20:17.474136 INFO Instantiating Agent WALinuxAgent-2.1.6 from package
2016/08/25 20:20:17.476843 INFO Agent WALinuxAgent-2.1.6 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/08/25 20:20:17.476980 INFO Ensuring Agent WALinuxAgent-2.1.6 is downloaded
2016/08/25 20:20:17.738310 INFO Agent WALinuxAgent-2.1.6 downloaded from https://rdfepirv2bl2prdstr01.blob.core.windows.net/7d89d439b79f4452950452399add2c90/Microsoft.OSTCLinuxAgent__Prod__2.1.6
2016/08/25 20:20:17.799473 INFO Agent WALinuxAgent-2.1.6 unpacked successfully to /var/lib/waagent/WALinuxAgent-2.1.6
2016/08/25 20:20:17.877585 INFO Agent WALinuxAgent-2.1.6 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.6/HandlerManifest.json
2016/08/25 20:20:17.962055 INFO Agent WALinuxAgent-2.1.6 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/08/25 20:20:17.977174 INFO Agent WALinuxAgent-2.1.6 downloaded successfully
2016/08/25 20:20:18.021588 INFO Event: name=WALinuxAgent, op=Install, message=Agent WALinuxAgent-2.1.6 downloaded successfully
2016/08/25 20:20:18.102944 INFO Agent WALinuxAgent-2.1.5 discovered WALinuxAgent-2.1.6 as an update and will exit
2016/08/25 20:21:12.959822 INFO Agent WALinuxAgent-2.1.5 launched with command 'python -u /usr/sbin/waagent -run-exthandlers' is successfully running
2016/08/25 20:21:13.048011 INFO Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.5 launched with command 'python -u /usr/sbin/waagent -run-exthandlers' is successfully running
2016/08/25 20:21:13.146289 INFO Instantiating Agent WALinuxAgent-2.1.6 from disk
2016/08/25 20:21:13.195095 INFO Agent WALinuxAgent-2.1.6 error state: Last Failure: 0.0, Total Failures: 0, Fatal: False
2016/08/25 20:21:13.230193 INFO Ensuring Agent WALinuxAgent-2.1.6 is downloaded
2016/08/25 20:21:13.235666 INFO Agent WALinuxAgent-2.1.6 was previously downloaded - skipping download
2016/08/25 20:21:13.242621 INFO Agent WALinuxAgent-2.1.6 loaded manifest from /var/lib/waagent/WALinuxAgent-2.1.6/HandlerManifest.json
2016/08/25 20:21:13.328814 INFO Determined Agent WALinuxAgent-2.1.6 to be the latest agent
2016/08/25 20:21:13.343555 WARNING Agent WALinuxAgent-2.1.6 launched with command 'python -u bin/WALinuxAgent-2.1.6-py2.7.egg -run-exthandlers' failed with exception: execv() argument 1 must be encoded string without NULL bytes, not str
2016/08/25 20:21:13.566290 ERROR Event: name=WALinuxAgent, op=Enable, message=Agent WALinuxAgent-2.1.6 launched with command 'python -u bin/WALinuxAgent-2.1.6-py2.7.egg -run-exthandlers' failed with exception: execv() argument 1 must be encoded string without NULL bytes, not str
2016/08/25 20:21:13.675956 WARNING Agent WALinuxAgent-2.1.6 is permanently blacklisted
2016/08/25 20:21:13.749416 INFO Installed Agent WALinuxAgent-2.1.5 is the most current agent
2016/08/25 20:21:13.756486 INFO Agent WALinuxAgent-2.1.5 launched with command 'python -u /usr/sbin/waagent -run-exthandlers'
\# cat /var/lib/waagent/WALinuxAgent-2.1.6/error.json
{"was_fatal": true, "failure_count": 1, "last_failure": 1472127673.6758201}
```
Additional info:
1. Only exists on RHEL-6. Doesn't exist on RHEL-7.
2. If install WALA-2.1.6 directly, there's no error log. Only exists during AutoUpdate. | Azure/WALinuxAgent | diff --git a/tests/ga/test_update.py b/tests/ga/test_update.py
index f4b33207..0e07119d 100644
--- a/tests/ga/test_update.py
+++ b/tests/ga/test_update.py
@@ -438,7 +438,8 @@ class TestGuestAgent(UpdateTestCase):
agent = GuestAgent(path=self.agent_path)
agent._unpack()
agent._load_manifest()
- self.assertEqual(agent.manifest.get_enable_command(), agent.get_agent_cmd())
+ self.assertEqual(agent.manifest.get_enable_command(),
+ agent.get_agent_cmd())
return
@patch("azurelinuxagent.ga.update.GuestAgent._ensure_downloaded")
@@ -992,13 +993,14 @@ class TestUpdate(UpdateTestCase):
agent = self.update_handler.get_latest_agent()
args, kwargs = self._test_run_latest()
- cmds = shlex.split(agent.get_agent_cmd())
+ cmds = textutil.safe_shlex_split(agent.get_agent_cmd())
if cmds[0].lower() == "python":
cmds[0] = get_python_cmd()
self.assertEqual(args[0], cmds)
self.assertEqual(True, 'cwd' in kwargs)
self.assertEqual(agent.get_agent_dir(), kwargs['cwd'])
+ self.assertEqual(False, '\x00' in cmds[0])
return
def test_run_latest_polls_and_waits_for_success(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 5
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pyasn1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@d0392908c8a8c66f41183696c0dbe5dfc3d34acc#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/ga/test_update.py::TestUpdate::test_run_latest"
]
| []
| [
"tests/ga/test_update.py::TestGuestAgentError::test_clear",
"tests/ga/test_update.py::TestGuestAgentError::test_creation",
"tests/ga/test_update.py::TestGuestAgentError::test_load_preserves_error_state",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure_permanent",
"tests/ga/test_update.py::TestGuestAgentError::test_save",
"tests/ga/test_update.py::TestGuestAgentError::test_str",
"tests/ga/test_update.py::TestGuestAgent::test_clear_error",
"tests/ga/test_update.py::TestGuestAgent::test_creation",
"tests/ga/test_update.py::TestGuestAgent::test_download",
"tests/ga/test_update.py::TestGuestAgent::test_download_fail",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_download_skips_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_download_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_load_manifest_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_unpack_fails",
"tests/ga/test_update.py::TestGuestAgent::test_is_available",
"tests/ga/test_update.py::TestGuestAgent::test_is_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_is_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_load_error",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_empty",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_malformed",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_missing",
"tests/ga/test_update.py::TestGuestAgent::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgent::test_unpack",
"tests/ga/test_update.py::TestGuestAgent::test_unpack_fail",
"tests/ga/test_update.py::TestUpdate::test_creation",
"tests/ga/test_update.py::TestUpdate::test_emit_restart_event_emits_event_if_not_clean_start",
"tests/ga/test_update.py::TestUpdate::test_emit_restart_event_writes_sentinal_file",
"tests/ga/test_update.py::TestUpdate::test_ensure_lastest_agent_purges_old_agents",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_includes_old_agents",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_returns_true_on_first_use",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_too_frequent",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_if_when_no_new_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_etag_matches",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_no_versions",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_skips_when_updates_are_disabled",
"tests/ga/test_update.py::TestUpdate::test_ensure_latest_agent_sorts",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_ignores_exceptions",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_kills_after_interval",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_skips_if_no_orphans",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_ignores_installed_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_raises_exception_for_restarting_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_resets_with_new_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_for_long_restarts",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_too_few_restarts",
"tests/ga/test_update.py::TestUpdate::test_filter_blacklisted_agents",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_no_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skip_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skips_unavailable",
"tests/ga/test_update.py::TestUpdate::test_get_pid_files",
"tests/ga/test_update.py::TestUpdate::test_get_pid_files_returns_previous",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_for_current_agent",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_for_exceptions",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_true_sentinal_agent_is_not_current",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_true_when_no_sentinal",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_false_if_parent_exists",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_does_not_exist",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_is_init",
"tests/ga/test_update.py::TestUpdate::test_load_agents",
"tests/ga/test_update.py::TestUpdate::test_load_agents_does_not_reload",
"tests/ga/test_update.py::TestUpdate::test_load_agents_sorts",
"tests/ga/test_update.py::TestUpdate::test_purge_agents",
"tests/ga/test_update.py::TestUpdate::test_run",
"tests/ga/test_update.py::TestUpdate::test_run_clears_sentinal_on_successful_exit",
"tests/ga/test_update.py::TestUpdate::test_run_emits_restart_event",
"tests/ga/test_update.py::TestUpdate::test_run_keeps_running",
"tests/ga/test_update.py::TestUpdate::test_run_latest_captures_signals",
"tests/ga/test_update.py::TestUpdate::test_run_latest_creates_only_one_signal_handler",
"tests/ga/test_update.py::TestUpdate::test_run_latest_defaults_to_current",
"tests/ga/test_update.py::TestUpdate::test_run_latest_exception_blacklists",
"tests/ga/test_update.py::TestUpdate::test_run_latest_forwards_output",
"tests/ga/test_update.py::TestUpdate::test_run_latest_nonzero_code_marks_failures",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_failure",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_success",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polls_and_waits_for_success",
"tests/ga/test_update.py::TestUpdate::test_run_leaves_sentinal_on_unsuccessful_exit",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_orphaned",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_update_available",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sets_agents",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sorts_agents",
"tests/ga/test_update.py::TestUpdate::test_set_sentinal",
"tests/ga/test_update.py::TestUpdate::test_set_sentinal_writes_current_agent",
"tests/ga/test_update.py::TestUpdate::test_shutdown",
"tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_exceptions",
"tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_missing_sentinal_file",
"tests/ga/test_update.py::TestUpdate::test_write_pid_file",
"tests/ga/test_update.py::TestUpdate::test_write_pid_file_ignores_exceptions"
]
| []
| Apache License 2.0 | 733 | [
"azurelinuxagent/common/utils/textutil.py",
"azurelinuxagent/ga/update.py",
"azurelinuxagent/daemon/resourcedisk/default.py",
"setup.py",
"azurelinuxagent/common/protocol/wire.py"
]
| [
"azurelinuxagent/common/utils/textutil.py",
"azurelinuxagent/ga/update.py",
"azurelinuxagent/daemon/resourcedisk/default.py",
"setup.py",
"azurelinuxagent/common/protocol/wire.py"
]
|
|
docker__docker-py-1178 | 24bfb99e05d57a7a098a81fb86ea7b93cff62661 | 2016-09-01 01:42:42 | a44d65be370c28abd666a299456b83659dd1a1df | diff --git a/docker/api/network.py b/docker/api/network.py
index 34cd8987..0ee0dab6 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -22,7 +22,8 @@ class NetworkApiMixin(object):
@minimum_version('1.21')
def create_network(self, name, driver=None, options=None, ipam=None,
- check_duplicate=None, internal=False):
+ check_duplicate=None, internal=False, labels=None,
+ enable_ipv6=False):
if options is not None and not isinstance(options, dict):
raise TypeError('options must be a dictionary')
@@ -34,6 +35,22 @@ class NetworkApiMixin(object):
'CheckDuplicate': check_duplicate
}
+ if labels is not None:
+ if version_lt(self._version, '1.23'):
+ raise InvalidVersion(
+ 'network labels were introduced in API 1.23'
+ )
+ if not isinstance(labels, dict):
+ raise TypeError('labels must be a dictionary')
+ data["Labels"] = labels
+
+ if enable_ipv6:
+ if version_lt(self._version, '1.23'):
+ raise InvalidVersion(
+ 'enable_ipv6 was introduced in API 1.23'
+ )
+ data['EnableIPv6'] = True
+
if internal:
if version_lt(self._version, '1.22'):
raise InvalidVersion('Internal networks are not '
@@ -76,8 +93,15 @@ class NetworkApiMixin(object):
@check_resource
@minimum_version('1.21')
- def disconnect_container_from_network(self, container, net_id):
- data = {"container": container}
+ def disconnect_container_from_network(self, container, net_id,
+ force=False):
+ data = {"Container": container}
+ if force:
+ if version_lt(self._version, '1.22'):
+ raise InvalidVersion(
+ 'Forced disconnect was introduced in API 1.22'
+ )
+ data['Force'] = force
url = self._url("/networks/{0}/disconnect", net_id)
res = self._post_json(url, data=data)
self._raise_for_status(res)
diff --git a/docs/api.md b/docs/api.md
index 895d7d45..1699344a 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -283,22 +283,25 @@ The utility can be used as follows:
```python
>>> import docker.utils
>>> my_envs = docker.utils.parse_env_file('/path/to/file')
->>> docker.utils.create_container_config('1.18', '_mongodb', 'foobar', environment=my_envs)
+>>> client.create_container('myimage', 'command', environment=my_envs)
```
-You can now use this with 'environment' for `create_container`.
-
-
## create_network
-Create a network, similar to the `docker network create` command.
+Create a network, similar to the `docker network create` command. See the
+[networks documentation](networks.md) for details.
**Params**:
* name (str): Name of the network
* driver (str): Name of the driver used to create the network
-
* options (dict): Driver options as a key-value dictionary
+* ipam (dict): Optional custom IP scheme for the network
+* check_duplicate (bool): Request daemon to check for networks with same name.
+ Default: `True`.
+* internal (bool): Restrict external access to the network. Default `False`.
+* labels (dict): Map of labels to set on the network. Default `None`.
+* enable_ipv6 (bool): Enable IPv6 on the network. Default `False`.
**Returns** (dict): The created network reference object
@@ -352,6 +355,8 @@ Inspect changes on a container's filesystem.
* container (str): container-id/name to be disconnected from a network
* net_id (str): network id
+* force (bool): Force the container to disconnect from a network.
+ Default: `False`
## events
| Support create network EnableIPv6 and Labels options
Check the remote API:
https://docs.docker.com/engine/reference/api/docker_remote_api_v1.23/#create-a-network
There are two missing JSON parameters:
```
EnableIPv6 - Enable IPv6 on the network
Labels - Labels to set on the network, specified as a map: {"key":"value" [,"key2":"value2"]}
``` | docker/docker-py | diff --git a/tests/integration/network_test.py b/tests/integration/network_test.py
index 27e1b14d..6726db4b 100644
--- a/tests/integration/network_test.py
+++ b/tests/integration/network_test.py
@@ -115,7 +115,8 @@ class TestNetworks(helpers.BaseTestCase):
network_data = self.client.inspect_network(net_id)
self.assertEqual(
list(network_data['Containers'].keys()),
- [container['Id']])
+ [container['Id']]
+ )
with pytest.raises(docker.errors.APIError):
self.client.connect_container_to_network(container, net_id)
@@ -127,6 +128,33 @@ class TestNetworks(helpers.BaseTestCase):
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(container, net_id)
+ @requires_api_version('1.22')
+ def test_connect_and_force_disconnect_container(self):
+ net_name, net_id = self.create_network()
+
+ container = self.client.create_container('busybox', 'top')
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ network_data = self.client.inspect_network(net_id)
+ self.assertFalse(network_data.get('Containers'))
+
+ self.client.connect_container_to_network(container, net_id)
+ network_data = self.client.inspect_network(net_id)
+ self.assertEqual(
+ list(network_data['Containers'].keys()),
+ [container['Id']]
+ )
+
+ self.client.disconnect_container_from_network(container, net_id, True)
+ network_data = self.client.inspect_network(net_id)
+ self.assertFalse(network_data.get('Containers'))
+
+ with pytest.raises(docker.errors.APIError):
+ self.client.disconnect_container_from_network(
+ container, net_id, force=True
+ )
+
@requires_api_version('1.22')
def test_connect_with_aliases(self):
net_name, net_id = self.create_network()
@@ -300,7 +328,8 @@ class TestNetworks(helpers.BaseTestCase):
net_name, net_id = self.create_network()
with self.assertRaises(docker.errors.APIError):
self.client.create_network(net_name, check_duplicate=True)
- self.client.create_network(net_name, check_duplicate=False)
+ net_id = self.client.create_network(net_name, check_duplicate=False)
+ self.tmp_networks.append(net_id['Id'])
@requires_api_version('1.22')
def test_connect_with_links(self):
@@ -387,3 +416,27 @@ class TestNetworks(helpers.BaseTestCase):
_, net_id = self.create_network(internal=True)
net = self.client.inspect_network(net_id)
assert net['Internal'] is True
+
+ @requires_api_version('1.23')
+ def test_create_network_with_labels(self):
+ _, net_id = self.create_network(labels={
+ 'com.docker.py.test': 'label'
+ })
+
+ net = self.client.inspect_network(net_id)
+ assert 'Labels' in net
+ assert len(net['Labels']) == 1
+ assert net['Labels'] == {
+ 'com.docker.py.test': 'label'
+ }
+
+ @requires_api_version('1.23')
+ def test_create_network_with_labels_wrong_type(self):
+ with pytest.raises(TypeError):
+ self.create_network(labels=['com.docker.py.test=label', ])
+
+ @requires_api_version('1.23')
+ def test_create_network_ipv6_enabled(self):
+ _, net_id = self.create_network(enable_ipv6=True)
+ net = self.client.inspect_network(net_id)
+ assert net['EnableIPv6'] is True
diff --git a/tests/unit/network_test.py b/tests/unit/network_test.py
index 5bba9db2..2521688d 100644
--- a/tests/unit/network_test.py
+++ b/tests/unit/network_test.py
@@ -184,4 +184,4 @@ class NetworkTest(DockerClientTest):
self.assertEqual(
json.loads(post.call_args[1]['data']),
- {'container': container_id})
+ {'Container': container_id})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 1.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/docker/docker-py.git@24bfb99e05d57a7a098a81fb86ea7b93cff62661#egg=docker_py
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.5.3
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
websocket-client==0.32.0
zipp==3.6.0
| name: docker-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.5.3
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- websocket-client==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/docker-py
| [
"tests/unit/network_test.py::NetworkTest::test_disconnect_container_from_network"
]
| []
| [
"tests/unit/network_test.py::NetworkTest::test_connect_container_to_network",
"tests/unit/network_test.py::NetworkTest::test_create_network",
"tests/unit/network_test.py::NetworkTest::test_inspect_network",
"tests/unit/network_test.py::NetworkTest::test_list_networks",
"tests/unit/network_test.py::NetworkTest::test_remove_network"
]
| []
| Apache License 2.0 | 734 | [
"docker/api/network.py",
"docs/api.md"
]
| [
"docker/api/network.py",
"docs/api.md"
]
|
|
napjon__krisk-37 | 7aeaac1b566ffe71d7282c41eb09567845eecf01 | 2016-09-01 04:53:45 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | codecov-io: ## [Current coverage](https://codecov.io/gh/napjon/krisk/pull/37?src=pr) is 91.91% (diff: 81.57%)
> Merging [#37](https://codecov.io/gh/napjon/krisk/pull/37?src=pr) into [0.2-develop](https://codecov.io/gh/napjon/krisk/branch/0.2-develop?src=pr) will decrease coverage by **0.24%**
```diff
@@ 0.2-develop #37 diff @@
=============================================
Files 10 10
Lines 332 297 -35
Methods 0 0
Messages 0 0
Branches 37 37
=============================================
- Hits 306 273 -33
+ Misses 12 11 -1
+ Partials 14 13 -1
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [7aeaac1...292c9f8](https://codecov.io/gh/napjon/krisk/compare/7aeaac1b566ffe71d7282c41eb09567845eecf01...292c9f8d2d28a9b46be33d521b2ca92b8a98c054?src=pr) | diff --git a/krisk/chart.py b/krisk/chart.py
index 9517dff..44a75c6 100644
--- a/krisk/chart.py
+++ b/krisk/chart.py
@@ -152,11 +152,11 @@ class Chart(object):
raise TypeError('Chart Type not supported')
else:
f_columns = []
- for c in columns:
- if isinstance(c, str):
- key, unit = c, ' '
- elif isinstance(c, tuple):
- key, unit = c
+ for col in columns:
+ if isinstance(col, str):
+ key, unit = col, ' '
+ elif isinstance(col, tuple):
+ key, unit = col
else:
raise TypeError('Columns type not supported')
@@ -446,9 +446,9 @@ class Chart(object):
(self._get_resync_option_strings(self.option))
def _get_duplicated(self):
- c = deepcopy(self)
- c._chartId = str(uuid.uuid4())
- return c
+ chart = deepcopy(self)
+ chart._chartId = str(uuid.uuid4())
+ return chart
# ----------------------------------------------------------------------
# Saving chart option
diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index f4e3be2..0ed9354 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -4,7 +4,7 @@ from krisk.plot.make_chart import make_chart
def bar(df,
x,
y=None,
- category=None,
+ c=None,
how='count',
stacked=False,
annotate=None):
@@ -34,22 +34,15 @@ def bar(df,
"""
# TODO: add optional argument trendline
- kwargs = {}
- kwargs['x'] = x
- kwargs['y'] = y
- kwargs['category'] = category
- kwargs['how'] = how
- kwargs['type'] = 'bar'
- kwargs['stacked'] = stacked
- kwargs['annotate'] = 'top' if annotate is True else annotate
- return make_chart(df, **kwargs)
+ return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,
+ annotate='top' if annotate == True else annotate)
def line(df,
x,
y=None,
- category=None,
+ c=None,
how=None,
stacked=False,
area=False,
@@ -63,8 +56,8 @@ def line(df,
columns to be used as category axis
y: string, default to None
if None, use count of category value. otherwise aggregate based on y columns
- category: string, default to None
- another grouping columns inside x-axis
+ c: string, default to None
+ category column inside x-axis
how: string, default to None
to be passed to pd.group_by(x).aggregate(how). Can be mean,median, or any
reduced operations.
@@ -78,22 +71,13 @@ def line(df,
-------
Chart Object
"""
- kwargs = {}
- kwargs['x'] = x
- kwargs['y'] = y
- kwargs['category'] = category
- kwargs['how'] = how
- kwargs['type'] = 'line'
- kwargs['stacked'] = stacked
- kwargs['area'] = area
- kwargs['annotate'] = 'top' if annotate is True else annotate
-
- return make_chart(df, **kwargs)
+ return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,
+ annotate='top' if annotate == True else annotate)
def hist(df,
x,
- category=None,
+ c=None,
bins=10,
normed=False,
stacked=False,
@@ -105,7 +89,7 @@ def hist(df,
data to be used for the chart
x: string
columns to be used as category axis
- category: string, default to None
+ c: string, default to None
another grouping columns inside x-axis
bins: int, default to 10
Set number of bins in histogram
@@ -121,19 +105,11 @@ def hist(df,
-------
Chart Object
"""
- kwargs = {}
- kwargs['x'] = x
- kwargs['category'] = category
- kwargs['bins'] = bins
- kwargs['type'] = 'hist'
- kwargs['normed'] = normed
- kwargs['stacked'] = stacked
- kwargs['annotate'] = 'top' if annotate is True else annotate
-
- return make_chart(df, **kwargs)
+ return make_chart(df,type='hist',x=x,c=c,bins=bins,normed=normed,stacked=stacked,
+ annotate='top' if annotate == True else annotate)
+
-
-def scatter(df, x, y, size=None, category=None, size_px=(10, 70)):
+def scatter(df, x, y, s=None, c=None, size_px=(10, 70)):
"""
Parameters
----------
@@ -141,9 +117,9 @@ def scatter(df, x, y, size=None, category=None, size_px=(10, 70)):
data to be used for the chart
x,y: string, columns in pd.DataFrame
Used as coordinate in scatter chart
- size: string, columns in pd.DataFrame default to None
+ s: string, columns in pd.DataFrame default to None
Used as sizing value of the scatter points
- category: string, default to None
+ c: string, default to None
column used as grouping color category
size_px: tuple, default to (10,70)
boundary size, lower and upper limit in pixel for min-max scatter points
@@ -152,14 +128,5 @@ def scatter(df, x, y, size=None, category=None, size_px=(10, 70)):
-------
Chart Object
"""
-
- kwargs = {}
- kwargs['x'] = x
- kwargs['y'] = y
- kwargs['category'] = category
- kwargs['size'] = size
- #kwargs['saturate'] = saturate #TODO: Fix saturate
- kwargs['size_px'] = size_px
- kwargs['type'] = 'scatter'
-
- return make_chart(df, **kwargs)
+ #TODO add saturation
+ return make_chart(df,type='scatter',x=x,y=y,s=s,c=c,size_px=size_px)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index e2e2f19..5884aa4 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -5,21 +5,22 @@ import pandas as pd
from krisk.plot.make_chart import insert_series_data
-def set_bar_line_chart(chart, df, x, category, **kwargs):
+def set_bar_line_chart(chart, df, x, c, **kwargs):
+ """Construct Bar, Line, and Histogram"""
data = None
chart_type = kwargs['type']
if chart_type in ['bar', 'line']:
- data = get_bar_line_data(df, x, category, **kwargs)
+ data = get_bar_line_data(df, x, c, **kwargs)
chart.option['xAxis']['data'] = data.index.values.tolist()
elif chart_type == 'hist':
chart_type = 'bar'
- data, bins = get_hist_data(df, x, category, **kwargs)
+ data, bins = get_hist_data(df, x, c, **kwargs)
chart.option['xAxis']['data'] = bins
- if category:
+ if c:
# append data for every category
for cat in data.columns:
insert_series_data(data[cat], x, chart_type, chart, cat)
@@ -31,9 +32,9 @@ def set_bar_line_chart(chart, df, x, category, **kwargs):
########Provide stacked,annotate, area for bar line hist#################
d_annotate = {'normal': {'show': True, 'position': 'top'}}
- if category and kwargs['stacked']:
+ if c and kwargs['stacked']:
for s in series:
- s['stack'] = category
+ s['stack'] = c
if chart_type == 'line' and kwargs['area']:
s['areaStyle'] = {'normal': {}}
@@ -52,36 +53,36 @@ def set_bar_line_chart(chart, df, x, category, **kwargs):
# TODO: make annotate receive all kinds supported in echarts.
-def get_bar_line_data(df, x, category, y, **kwargs):
-
- if category:
- if y is None:
- data = pd.crosstab(df[x], df[category])
- else:
- data = df.pivot_table(
+def get_bar_line_data(df, x, c, y, **kwargs):
+ """Get Bar and Line manipulated data"""
+
+ if c and y:
+ data = df.pivot_table(
index=x,
values=y,
- columns=category,
+ columns=c,
aggfunc=kwargs['how'],
fill_value=0)
+ elif c and y is None:
+ data = pd.crosstab(df[x], df[c])
+ elif c is None and y:
+ data = df.groupby(x)[y].agg(kwargs['how'])
else:
- if y is None:
- data = df[x].value_counts()
- else:
- raise AssertionError('Use y in category instead')
-
+ data = df[x].value_counts()
+
return data
-def get_hist_data(df, x, category, **kwargs):
+def get_hist_data(df, x, c, **kwargs):
+ """Get Histogram manipulated data"""
y_val, x_val = np.histogram(
df[x], bins=kwargs['bins'], normed=kwargs['normed'])
bins = x_val.astype(int).tolist()
- if category:
+ if c:
data = pd.DataFrame()
- for cat, sub in df.groupby(category):
+ for cat, sub in df.groupby(c):
data[cat] = (pd.cut(sub[x], x_val).value_counts(
sort=False, normalize=kwargs['normed']))
else:
diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index 31f9980..ceab454 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -38,9 +38,6 @@ def make_chart(df, **kwargs):
chart = Chart(**kwargs)
chart._kwargs_chart_['data_columns'] = df.columns
- x = kwargs['x']
- y = kwargs.get('y')
- category = kwargs['category']
if kwargs['type'] in ['bar', 'line', 'hist']:
set_bar_line_chart(chart, df, **kwargs)
diff --git a/krisk/plot/points.py b/krisk/plot/points.py
index 3eb7d3e..a646bf6 100644
--- a/krisk/plot/points.py
+++ b/krisk/plot/points.py
@@ -2,16 +2,16 @@ from copy import deepcopy
from krisk.plot.make_chart import insert_series_data
-def set_scatter_chart(chart, df, x, y, category, **kwargs):
+def set_scatter_chart(chart, df, x, y, c, **kwargs):
chart.option['xAxis'] = {'type': 'value', 'name': x, 'max': int(df[x].max())}
chart.option['yAxis'] = {'type': 'value', 'name': y, 'max': int(df[y].max())}
chart.option['visualMap'] = []
cols = [x, y]
- size = kwargs['size']
- if size is not None:
- cols.append(size)
+ s = kwargs['s']
+ if s is not None:
+ cols.append(s)
vmap_template_size = {'show': False,
'dimension': 2,
@@ -21,8 +21,8 @@ def set_scatter_chart(chart, df, x, y, category, **kwargs):
'inRange': {'symbolSize': [10, 70]}}
vmap_size = deepcopy(vmap_template_size)
- vmap_size['min'] = df[size].min()
- vmap_size['max'] = df[size].max()
+ vmap_size['min'] = df[s].min()
+ vmap_size['max'] = df[s].max()
vmap_size['inRange']['symbolSize'] = list(kwargs['size_px'][:2])
chart.option['visualMap'].append(vmap_size)
@@ -39,9 +39,9 @@ def set_scatter_chart(chart, df, x, y, category, **kwargs):
columns = cols + df.columns.difference(cols).tolist()
chart._kwargs_chart_['columns'] = columns
data = df[columns]
- if category:
+ if c:
#Iterate and append Data for every category
- for cat, subset in data.groupby(category):
+ for cat, subset in data.groupby(c):
insert_series_data(subset, x, kwargs['type'], chart, cat)
else:
insert_series_data(data, x, kwargs['type'], chart)
diff --git a/notebooks/Intro.ipynb b/notebooks/Intro.ipynb
index 6d5333e..a850d7d 100644
--- a/notebooks/Intro.ipynb
+++ b/notebooks/Intro.ipynb
@@ -161,8 +161,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#9fcb27b2-feba-4fdd-9389-786b3f607b67').attr('id','9fcb27b2-feba-4fdd-9389-786b3f607b67'+'_old');\n",
- "element.append('<div id=\"9fcb27b2-feba-4fdd-9389-786b3f607b67\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#3ccf5480-c67f-4d2d-a294-398da14c8f2c').attr('id','3ccf5480-c67f-4d2d-a294-398da14c8f2c'+'_old');\n",
+ "element.append('<div id=\"3ccf5480-c67f-4d2d-a294-398da14c8f2c\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -170,43 +170,43 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"9fcb27b2-feba-4fdd-9389-786b3f607b67\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"3ccf5480-c67f-4d2d-a294-398da14c8f2c\"),\"\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Americas\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"continent\",\n",
" \"data\": [\n",
" 624,\n",
" 396,\n",
" 360,\n",
" 300,\n",
" 24\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"continent\"\n",
+ " ]\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Americas\",\n",
- " \"Oceania\"\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": []\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -216,7 +216,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x104a52fd0>"
+ "<krisk.chart.Chart at 0x10493f160>"
]
},
"execution_count": 4,
@@ -228,6 +228,13 @@
"kk.bar(df,'continent')"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Note that by default, the plot already used a tooltip. You can hover the plot to see the y-value."
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -237,7 +244,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 6,
"metadata": {
"collapsed": false
},
@@ -246,8 +253,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#a6533f9b-4f23-4990-b858-2e60affcc1e1').attr('id','a6533f9b-4f23-4990-b858-2e60affcc1e1'+'_old');\n",
- "element.append('<div id=\"a6533f9b-4f23-4990-b858-2e60affcc1e1\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#ee6ef362-832f-45a4-87a9-e62e0e214cc2').attr('id','ee6ef362-832f-45a4-87a9-e62e0e214cc2'+'_old');\n",
+ "element.append('<div id=\"ee6ef362-832f-45a4-87a9-e62e0e214cc2\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -255,43 +262,43 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"a6533f9b-4f23-4990-b858-2e60affcc1e1\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"ee6ef362-832f-45a4-87a9-e62e0e214cc2\"),\"\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"continent\",\n",
" \"data\": [\n",
" 2193.755,\n",
" 7136.11,\n",
" 7902.15,\n",
" 14469.476,\n",
" 18621.609\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"continent\"\n",
+ " ]\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": []\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -301,10 +308,10 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10369ffd0>"
+ "<krisk.chart.Chart at 0x10676cf98>"
]
},
- "execution_count": 5,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -322,7 +329,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 7,
"metadata": {
"collapsed": false
},
@@ -331,8 +338,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#308da64e-53be-49d3-8e78-1a0213d9cc08').attr('id','308da64e-53be-49d3-8e78-1a0213d9cc08'+'_old');\n",
- "element.append('<div id=\"308da64e-53be-49d3-8e78-1a0213d9cc08\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#e68bfdf7-fff3-49b6-aadd-dd75abd1bcca').attr('id','e68bfdf7-fff3-49b6-aadd-dd75abd1bcca'+'_old');\n",
+ "element.append('<div id=\"e68bfdf7-fff3-49b6-aadd-dd75abd1bcca\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -340,14 +347,30 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"308da64e-53be-49d3-8e78-1a0213d9cc08\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e68bfdf7-fff3-49b6-aadd-dd75abd1bcca\"),\"\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
+ " 1972,\n",
+ " 1977,\n",
+ " 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
+ " 1997,\n",
+ " 2002,\n",
+ " 2007\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Africa\",\n",
" \"data\": [\n",
" 1252.572,\n",
" 1385.236,\n",
@@ -361,11 +384,11 @@
" 2378.76,\n",
" 2599.385,\n",
" 3089.033\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Americas\",\n",
" \"data\": [\n",
" 4079.063,\n",
" 4616.044,\n",
@@ -379,11 +402,11 @@
" 8889.301,\n",
" 9287.677,\n",
" 11003.032\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Asia\",\n",
" \"data\": [\n",
" 5195.484,\n",
" 5787.733,\n",
@@ -397,11 +420,11 @@
" 9834.093,\n",
" 10174.09,\n",
" 12473.027\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Europe\",\n",
" \"data\": [\n",
" 5661.057,\n",
" 6963.013,\n",
@@ -415,11 +438,11 @@
" 19076.782,\n",
" 21711.732,\n",
" 25054.482\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Oceania\",\n",
" \"data\": [\n",
" 10298.086,\n",
" 11598.522,\n",
@@ -433,31 +456,11 @@
" 24024.175,\n",
" 26938.778,\n",
" 29810.188\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
+ " ]\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " 1952,\n",
- " 1957,\n",
- " 1962,\n",
- " 1967,\n",
- " 1972,\n",
- " 1977,\n",
- " 1982,\n",
- " 1987,\n",
- " 1992,\n",
- " 1997,\n",
- " 2002,\n",
- " 2007\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": [\n",
@@ -468,7 +471,11 @@
" \"Oceania\"\n",
" ]\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -478,16 +485,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x1036b3208>"
+ "<krisk.chart.Chart at 0x106784240>"
]
},
- "execution_count": 6,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "kk.bar(df,'year',y='gdpPercap',category='continent',how='mean')"
+ "kk.bar(df,'year',y='gdpPercap',c='continent',how='mean')"
]
},
{
@@ -499,7 +506,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 8,
"metadata": {
"collapsed": false
},
@@ -508,8 +515,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#ba21a86d-8828-4051-aa10-8535acd96a7f').attr('id','ba21a86d-8828-4051-aa10-8535acd96a7f'+'_old');\n",
- "element.append('<div id=\"ba21a86d-8828-4051-aa10-8535acd96a7f\" style=\"width: 1000px;height:400px;\"></div>');\n",
+ "$('#67393873-1e86-4d53-b796-cc60d3a0a0ce').attr('id','67393873-1e86-4d53-b796-cc60d3a0a0ce'+'_old');\n",
+ "element.append('<div id=\"67393873-1e86-4d53-b796-cc60d3a0a0ce\" style=\"width: 1000px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -517,14 +524,31 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ba21a86d-8828-4051-aa10-8535acd96a7f\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"67393873-1e86-4d53-b796-cc60d3a0a0ce\"),\"\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
+ " 1972,\n",
+ " 1977,\n",
+ " 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
+ " 1997,\n",
+ " 2002,\n",
+ " 2007\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"stack\": \"continent\",\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Africa\",\n",
" \"data\": [\n",
" 1252.572,\n",
" 1385.236,\n",
@@ -538,12 +562,12 @@
" 2378.76,\n",
" 2599.385,\n",
" 3089.033\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\",\n",
- " \"stack\": \"continent\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"stack\": \"continent\",\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Americas\",\n",
" \"data\": [\n",
" 4079.063,\n",
" 4616.044,\n",
@@ -557,12 +581,12 @@
" 8889.301,\n",
" 9287.677,\n",
" 11003.032\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\",\n",
- " \"stack\": \"continent\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"stack\": \"continent\",\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Asia\",\n",
" \"data\": [\n",
" 5195.484,\n",
" 5787.733,\n",
@@ -576,12 +600,12 @@
" 9834.093,\n",
" 10174.09,\n",
" 12473.027\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\",\n",
- " \"stack\": \"continent\"\n",
+ " ]\n",
" },\n",
" {\n",
+ " \"stack\": \"continent\",\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Europe\",\n",
" \"data\": [\n",
" 5661.057,\n",
" 6963.013,\n",
@@ -595,18 +619,18 @@
" 19076.782,\n",
" 21711.732,\n",
" 25054.482\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\",\n",
- " \"stack\": \"continent\"\n",
+ " ]\n",
" },\n",
" {\n",
" \"label\": {\n",
" \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
" }\n",
" },\n",
+ " \"stack\": \"continent\",\n",
+ " \"type\": \"bar\",\n",
+ " \"name\": \"Oceania\",\n",
" \"data\": [\n",
" 10298.086,\n",
" 11598.522,\n",
@@ -620,32 +644,11 @@
" 24024.175,\n",
" 26938.778,\n",
" 29810.188\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\",\n",
- " \"stack\": \"continent\"\n",
+ " ]\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " 1952,\n",
- " 1957,\n",
- " 1962,\n",
- " 1967,\n",
- " 1972,\n",
- " 1977,\n",
- " 1982,\n",
- " 1987,\n",
- " 1992,\n",
- " 1997,\n",
- " 2002,\n",
- " 2007\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": [\n",
@@ -656,7 +659,11 @@
" \"Oceania\"\n",
" ]\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -666,16 +673,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x107005390>"
+ "<krisk.chart.Chart at 0x106784588>"
]
},
- "execution_count": 7,
+ "execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "(kk.bar(df,'year',y='gdpPercap',category='continent',how='mean',stacked=True,annotate=True)\n",
+ "(kk.bar(df,'year',y='gdpPercap',c='continent',how='mean',stacked=True,annotate=True)\n",
" .set_size(width=1000))"
]
},
@@ -688,7 +695,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 9,
"metadata": {
"collapsed": false
},
@@ -697,8 +704,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#a5a0f6a1-991c-4e30-817e-a12af6c3c25d').attr('id','a5a0f6a1-991c-4e30-817e-a12af6c3c25d'+'_old');\n",
- "element.append('<div id=\"a5a0f6a1-991c-4e30-817e-a12af6c3c25d\" style=\"width: 1000px;height:400px;\"></div>');\n",
+ "$('#20bbc738-2723-40a7-bf73-292596f9c486').attr('id','20bbc738-2723-40a7-bf73-292596f9c486'+'_old');\n",
+ "element.append('<div id=\"20bbc738-2723-40a7-bf73-292596f9c486\" style=\"width: 1000px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -706,14 +713,35 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"a5a0f6a1-991c-4e30-817e-a12af6c3c25d\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"20bbc738-2723-40a7-bf73-292596f9c486\"),\"\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
+ " 1972,\n",
+ " 1977,\n",
+ " 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
+ " 1997,\n",
+ " 2002,\n",
+ " 2007\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"label\": {\n",
+ " \"normal\": {\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
+ " }\n",
+ " },\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
" 1252.572,\n",
" 1385.236,\n",
@@ -728,20 +756,20 @@
" 2599.385,\n",
" 3089.033\n",
" ],\n",
+ " \"areaStyle\": {\n",
+ " \"normal\": {}\n",
+ " },\n",
" \"type\": \"line\",\n",
- " \"name\": \"Africa\",\n",
+ " \"name\": \"Africa\"\n",
+ " },\n",
+ " {\n",
" \"label\": {\n",
" \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
" }\n",
" },\n",
" \"stack\": \"continent\",\n",
- " \"areaStyle\": {\n",
- " \"normal\": {}\n",
- " }\n",
- " },\n",
- " {\n",
" \"data\": [\n",
" 4079.063,\n",
" 4616.044,\n",
@@ -756,20 +784,20 @@
" 9287.677,\n",
" 11003.032\n",
" ],\n",
+ " \"areaStyle\": {\n",
+ " \"normal\": {}\n",
+ " },\n",
" \"type\": \"line\",\n",
- " \"name\": \"Americas\",\n",
+ " \"name\": \"Americas\"\n",
+ " },\n",
+ " {\n",
" \"label\": {\n",
" \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
" }\n",
" },\n",
" \"stack\": \"continent\",\n",
- " \"areaStyle\": {\n",
- " \"normal\": {}\n",
- " }\n",
- " },\n",
- " {\n",
" \"data\": [\n",
" 5195.484,\n",
" 5787.733,\n",
@@ -784,20 +812,20 @@
" 10174.09,\n",
" 12473.027\n",
" ],\n",
+ " \"areaStyle\": {\n",
+ " \"normal\": {}\n",
+ " },\n",
" \"type\": \"line\",\n",
- " \"name\": \"Asia\",\n",
+ " \"name\": \"Asia\"\n",
+ " },\n",
+ " {\n",
" \"label\": {\n",
" \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
" }\n",
" },\n",
" \"stack\": \"continent\",\n",
- " \"areaStyle\": {\n",
- " \"normal\": {}\n",
- " }\n",
- " },\n",
- " {\n",
" \"data\": [\n",
" 5661.057,\n",
" 6963.013,\n",
@@ -812,20 +840,20 @@
" 21711.732,\n",
" 25054.482\n",
" ],\n",
+ " \"areaStyle\": {\n",
+ " \"normal\": {}\n",
+ " },\n",
" \"type\": \"line\",\n",
- " \"name\": \"Europe\",\n",
+ " \"name\": \"Europe\"\n",
+ " },\n",
+ " {\n",
" \"label\": {\n",
" \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
+ " \"position\": \"top\",\n",
+ " \"show\": true\n",
" }\n",
" },\n",
" \"stack\": \"continent\",\n",
- " \"areaStyle\": {\n",
- " \"normal\": {}\n",
- " }\n",
- " },\n",
- " {\n",
" \"data\": [\n",
" 10298.086,\n",
" 11598.522,\n",
@@ -840,45 +868,15 @@
" 26938.778,\n",
" 29810.188\n",
" ],\n",
- " \"type\": \"line\",\n",
- " \"name\": \"Oceania\",\n",
- " \"label\": {\n",
- " \"normal\": {\n",
- " \"show\": true,\n",
- " \"position\": \"top\"\n",
- " }\n",
- " },\n",
- " \"stack\": \"continent\",\n",
" \"areaStyle\": {\n",
" \"normal\": {}\n",
- " }\n",
+ " },\n",
+ " \"type\": \"line\",\n",
+ " \"name\": \"Oceania\"\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"trigger\": \"axis\",\n",
- " \"fontStyle\": \"normal\",\n",
- " \"fontFamily\": \"sans-serif\",\n",
- " \"fontSize\": 14,\n",
- " \"triggerOn\": \"mousemove\",\n",
- " \"axisPointer\": {\n",
- " \"type\": \"shadow\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " 1952,\n",
- " 1957,\n",
- " 1962,\n",
- " 1967,\n",
- " 1972,\n",
- " 1977,\n",
- " 1982,\n",
- " 1987,\n",
- " 1992,\n",
- " 1997,\n",
- " 2002,\n",
- " 2007\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": [\n",
@@ -889,7 +887,16 @@
" \"Oceania\"\n",
" ]\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"fontStyle\": \"normal\",\n",
+ " \"triggerOn\": \"mousemove\",\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"shadow\"\n",
+ " },\n",
+ " \"trigger\": \"axis\",\n",
+ " \"fontSize\": 14,\n",
+ " \"fontFamily\": \"sans-serif\"\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -899,16 +906,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x1070054a8>"
+ "<krisk.chart.Chart at 0x106784b00>"
]
},
- "execution_count": 8,
+ "execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "p = kk.line(df,'year',y='gdpPercap',category='continent',how='mean',\n",
+ "p = kk.line(df,'year',y='gdpPercap',c='continent',how='mean',\n",
" stacked=True,annotate='all',area=True)\n",
"p.set_tooltip_style(trigger='axis',axis_pointer='shadow')\n",
"p.set_size(width=1000)"
@@ -923,7 +930,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 10,
"metadata": {
"collapsed": false
},
@@ -932,8 +939,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#86022075-1dc6-45f2-9dfe-e9b05f0235d2').attr('id','86022075-1dc6-45f2-9dfe-e9b05f0235d2'+'_old');\n",
- "element.append('<div id=\"86022075-1dc6-45f2-9dfe-e9b05f0235d2\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#e8b87d9e-fa08-4511-acb5-2146f1ba883e').attr('id','e8b87d9e-fa08-4511-acb5-2146f1ba883e'+'_old');\n",
+ "element.append('<div id=\"e8b87d9e-fa08-4511-acb5-2146f1ba883e\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -941,16 +948,120 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"86022075-1dc6-45f2-9dfe-e9b05f0235d2\"),\"vintage\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e8b87d9e-fa08-4511-acb5-2146f1ba883e\"),\"vintage\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"yAxis\": {},\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 23,\n",
+ " 24,\n",
+ " 24,\n",
+ " 25,\n",
+ " 25,\n",
+ " 26,\n",
+ " 27,\n",
+ " 27,\n",
+ " 28,\n",
+ " 28,\n",
+ " 29,\n",
+ " 30,\n",
+ " 30,\n",
+ " 31,\n",
+ " 31,\n",
+ " 32,\n",
+ " 33,\n",
+ " 33,\n",
+ " 34,\n",
+ " 34,\n",
+ " 35,\n",
+ " 35,\n",
+ " 36,\n",
+ " 37,\n",
+ " 37,\n",
+ " 38,\n",
+ " 38,\n",
+ " 39,\n",
+ " 40,\n",
+ " 40,\n",
+ " 41,\n",
+ " 41,\n",
+ " 42,\n",
+ " 43,\n",
+ " 43,\n",
+ " 44,\n",
+ " 44,\n",
+ " 45,\n",
+ " 46,\n",
+ " 46,\n",
+ " 47,\n",
+ " 47,\n",
+ " 48,\n",
+ " 48,\n",
+ " 49,\n",
+ " 50,\n",
+ " 50,\n",
+ " 51,\n",
+ " 51,\n",
+ " 52,\n",
+ " 53,\n",
+ " 53,\n",
+ " 54,\n",
+ " 54,\n",
+ " 55,\n",
+ " 56,\n",
+ " 56,\n",
+ " 57,\n",
+ " 57,\n",
+ " 58,\n",
+ " 59,\n",
+ " 59,\n",
+ " 60,\n",
+ " 60,\n",
+ " 61,\n",
+ " 61,\n",
+ " 62,\n",
+ " 63,\n",
+ " 63,\n",
+ " 64,\n",
+ " 64,\n",
+ " 65,\n",
+ " 66,\n",
+ " 66,\n",
+ " 67,\n",
+ " 67,\n",
+ " 68,\n",
+ " 69,\n",
+ " 69,\n",
+ " 70,\n",
+ " 70,\n",
+ " 71,\n",
+ " 71,\n",
+ " 72,\n",
+ " 73,\n",
+ " 73,\n",
+ " 74,\n",
+ " 74,\n",
+ " 75,\n",
+ " 76,\n",
+ " 76,\n",
+ " 77,\n",
+ " 77,\n",
+ " 78,\n",
+ " 79,\n",
+ " 79,\n",
+ " 80,\n",
+ " 80,\n",
+ " 81,\n",
+ " 82,\n",
+ " 82\n",
+ " ]\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -961,317 +1072,317 @@
" 0,\n",
" 0,\n",
" 0,\n",
+ " 2,\n",
+ " 1,\n",
" 0,\n",
+ " 2,\n",
" 3,\n",
- " 0,\n",
- " 1,\n",
" 3,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
" 6,\n",
" 3,\n",
- " 3,\n",
" 4,\n",
" 8,\n",
" 5,\n",
- " 7,\n",
" 6,\n",
- " 7,\n",
- " 10,\n",
+ " 8,\n",
" 9,\n",
- " 15,\n",
- " 13,\n",
- " 16,\n",
" 10,\n",
- " 12,\n",
- " 19,\n",
+ " 14,\n",
" 15,\n",
- " 12,\n",
" 15,\n",
+ " 14,\n",
+ " 10,\n",
+ " 18,\n",
+ " 21,\n",
+ " 13,\n",
" 18,\n",
+ " 19,\n",
+ " 19,\n",
+ " 15,\n",
" 18,\n",
" 14,\n",
+ " 16,\n",
+ " 19,\n",
+ " 15,\n",
" 13,\n",
+ " 16,\n",
" 17,\n",
- " 9,\n",
- " 18,\n",
- " 18,\n",
- " 11,\n",
" 11,\n",
" 17,\n",
- " 13,\n",
- " 14,\n",
- " 14,\n",
- " 10,\n",
- " 12,\n",
" 12,\n",
+ " 15,\n",
" 10,\n",
- " 6,\n",
- " 9,\n",
+ " 7,\n",
+ " 11,\n",
" 9,\n",
- " 12,\n",
- " 6,\n",
- " 10,\n",
- " 3,\n",
+ " 14,\n",
+ " 7,\n",
+ " 7,\n",
" 8,\n",
- " 12,\n",
- " 6,\n",
- " 5,\n",
- " 5,\n",
" 10,\n",
- " 3,\n",
+ " 9,\n",
" 5,\n",
- " 4,\n",
- " 3,\n",
" 5,\n",
+ " 11,\n",
+ " 2,\n",
+ " 7,\n",
+ " 4,\n",
+ " 6,\n",
" 3,\n",
" 3,\n",
" 1,\n",
- " 2,\n",
" 3,\n",
- " 1,\n",
- " 2,\n",
" 2,\n",
+ " 1,\n",
+ " 4,\n",
" 2,\n",
" 0,\n",
" 2,\n",
- " 1,\n",
- " 5,\n",
- " 0,\n",
" 2,\n",
+ " 4,\n",
+ " 1,\n",
" 3,\n",
+ " 4,\n",
+ " 1,\n",
" 3,\n",
- " 2,\n",
- " 2,\n",
" 1,\n",
" 2,\n",
" 1,\n",
" 0,\n",
" 1,\n",
- " 1\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
" ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\",\n",
- " \"stack\": \"continent\"\n",
+ " \"name\": \"Africa\"\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 1,\n",
" 0,\n",
" 0,\n",
- " 4,\n",
" 0,\n",
" 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
- " 1,\n",
- " 1,\n",
+ " 3,\n",
+ " 0,\n",
+ " 2,\n",
" 2,\n",
- " 1,\n",
" 1,\n",
" 2,\n",
" 2,\n",
- " 0,\n",
- " 0,\n",
" 2,\n",
- " 3,\n",
- " 1,\n",
- " 0,\n",
- " 4,\n",
- " 0,\n",
- " 4,\n",
- " 1,\n",
" 2,\n",
- " 1,\n",
- " 1,\n",
" 0,\n",
+ " 3,\n",
" 2,\n",
+ " 1,\n",
" 4,\n",
- " 0,\n",
- " 2,\n",
- " 6,\n",
- " 2,\n",
+ " 1,\n",
" 3,\n",
+ " 4,\n",
+ " 1,\n",
" 0,\n",
- " 5,\n",
- " 2,\n",
" 3,\n",
- " 6,\n",
" 3,\n",
" 2,\n",
- " 4,\n",
+ " 7,\n",
" 3,\n",
" 2,\n",
+ " 5,\n",
+ " 4,\n",
+ " 7,\n",
" 3,\n",
+ " 5,\n",
+ " 5,\n",
" 2,\n",
" 4,\n",
+ " 6,\n",
" 5,\n",
" 3,\n",
- " 3,\n",
- " 4,\n",
- " 4,\n",
- " 4,\n",
- " 4,\n",
- " 4,\n",
- " 5,\n",
" 6,\n",
" 6,\n",
- " 3,\n",
- " 6,\n",
" 5,\n",
- " 2,\n",
- " 14,\n",
" 7,\n",
- " 2,\n",
+ " 9,\n",
+ " 6,\n",
+ " 7,\n",
+ " 7,\n",
+ " 8,\n",
+ " 13,\n",
" 10,\n",
" 10,\n",
- " 4,\n",
" 10,\n",
+ " 9,\n",
+ " 10,\n",
+ " 11,\n",
" 6,\n",
- " 7,\n",
" 9,\n",
- " 8,\n",
- " 1,\n",
- " 8,\n",
- " 4,\n",
- " 4,\n",
- " 8,\n",
- " 2,\n",
+ " 7,\n",
+ " 7,\n",
" 4,\n",
+ " 5,\n",
" 3,\n",
- " 1,\n",
- " 3,\n",
- " 2,\n",
- " 3,\n",
+ " 4,\n",
" 3,\n",
" 4,\n",
+ " 4,\n",
" 0,\n",
" 1,\n",
+ " 1,\n",
+ " 0,\n",
" 0,\n",
- " 1\n",
+ " 0\n",
" ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\",\n",
- " \"stack\": \"continent\"\n",
+ " \"name\": \"Americas\"\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 0,\n",
" 1,\n",
+ " 0,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 0,\n",
+ " 1,\n",
+ " 1,\n",
" 0,\n",
" 2,\n",
" 0,\n",
" 1,\n",
" 0,\n",
" 3,\n",
- " 0,\n",
- " 2,\n",
- " 4,\n",
" 1,\n",
+ " 5,\n",
" 0,\n",
+ " 1,\n",
" 3,\n",
" 5,\n",
" 3,\n",
- " 1,\n",
+ " 2,\n",
" 4,\n",
+ " 3,\n",
+ " 3,\n",
+ " 7,\n",
" 4,\n",
" 1,\n",
- " 5,\n",
- " 5,\n",
- " 3,\n",
- " 2,\n",
- " 6,\n",
+ " 7,\n",
" 6,\n",
" 0,\n",
- " 3,\n",
+ " 5,\n",
" 3,\n",
" 5,\n",
- " 4,\n",
" 2,\n",
- " 3,\n",
- " 6,\n",
- " 3,\n",
" 4,\n",
- " 4,\n",
- " 3,\n",
" 5,\n",
- " 1,\n",
- " 8,\n",
+ " 4,\n",
+ " 4,\n",
+ " 4,\n",
+ " 4,\n",
+ " 6,\n",
" 2,\n",
- " 1,\n",
- " 5,\n",
- " 10,\n",
" 7,\n",
- " 4,\n",
+ " 2,\n",
+ " 2,\n",
+ " 9,\n",
+ " 9,\n",
" 5,\n",
+ " 6,\n",
" 8,\n",
" 5,\n",
- " 5,\n",
- " 7,\n",
- " 10,\n",
- " 3,\n",
- " 5,\n",
- " 7,\n",
- " 5,\n",
- " 10,\n",
- " 4,\n",
" 7,\n",
" 7,\n",
- " 5,\n",
" 9,\n",
" 6,\n",
- " 4,\n",
+ " 3,\n",
" 8,\n",
" 11,\n",
- " 3,\n",
+ " 4,\n",
+ " 8,\n",
" 7,\n",
" 8,\n",
+ " 8,\n",
" 5,\n",
- " 14,\n",
+ " 6,\n",
+ " 11,\n",
+ " 5,\n",
+ " 8,\n",
+ " 10,\n",
+ " 5,\n",
+ " 15,\n",
" 7,\n",
" 7,\n",
+ " 9,\n",
" 7,\n",
" 5,\n",
- " 8,\n",
- " 3,\n",
" 7,\n",
- " 2,\n",
- " 5,\n",
" 3,\n",
+ " 4,\n",
+ " 4,\n",
" 2,\n",
" 6,\n",
" 2,\n",
+ " 2,\n",
+ " 3,\n",
" 1,\n",
- " 4,\n",
- " 1,\n",
- " 1,\n",
+ " 3,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
- " 1,\n",
- " 1,\n",
" 2\n",
" ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\",\n",
- " \"stack\": \"continent\"\n",
+ " \"name\": \"Asia\"\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1282,7 +1393,6 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1293,117 +1403,109 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 0,\n",
" 1,\n",
- " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 0,\n",
" 1,\n",
- " 3,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 1,\n",
" 0,\n",
" 0,\n",
- " 2,\n",
- " 4,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 0,\n",
" 0,\n",
+ " 1,\n",
" 0,\n",
" 1,\n",
" 1,\n",
- " 2,\n",
" 3,\n",
+ " 2,\n",
+ " 0,\n",
+ " 4,\n",
" 4,\n",
" 0,\n",
" 1,\n",
+ " 0,\n",
" 3,\n",
- " 3,\n",
- " 7,\n",
- " 5,\n",
" 6,\n",
+ " 1,\n",
+ " 4,\n",
" 7,\n",
- " 0,\n",
+ " 8,\n",
+ " 10,\n",
" 3,\n",
- " 7,\n",
- " 15,\n",
- " 12,\n",
" 8,\n",
+ " 21,\n",
+ " 11,\n",
+ " 27,\n",
+ " 29,\n",
+ " 10,\n",
+ " 20,\n",
+ " 15,\n",
+ " 17,\n",
" 16,\n",
- " 22,\n",
+ " 18,\n",
" 15,\n",
- " 7,\n",
- " 9,\n",
- " 13,\n",
- " 9,\n",
+ " 12,\n",
+ " 12,\n",
" 10,\n",
" 11,\n",
- " 15,\n",
- " 5,\n",
- " 14,\n",
- " 7,\n",
+ " 11,\n",
" 10,\n",
- " 6,\n",
- " 9,\n",
" 8,\n",
- " 5,\n",
- " 13,\n",
- " 4,\n",
- " 7,\n",
" 6,\n",
- " 7,\n",
- " 6,\n",
- " 5,\n",
- " 2,\n",
- " 3,\n",
" 4,\n",
- " 0,\n",
- " 2\n",
+ " 2,\n",
+ " 2,\n",
+ " 0\n",
" ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\",\n",
- " \"stack\": \"continent\"\n",
+ " \"name\": \"Europe\"\n",
" },\n",
" {\n",
+ " \"type\": \"bar\",\n",
+ " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 2,\n",
" 0,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1414,17 +1516,13 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1437,7 +1535,6 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 2,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1447,7 +1544,6 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 2,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1458,9 +1554,7 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
- " 1,\n",
" 0,\n",
" 0,\n",
" 0,\n",
@@ -1469,135 +1563,39 @@
" 0,\n",
" 0,\n",
" 0,\n",
- " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 2,\n",
+ " 3,\n",
+ " 3,\n",
" 1,\n",
" 0,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
" 0,\n",
" 0,\n",
+ " 2,\n",
" 0,\n",
+ " 2,\n",
" 0,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
" 0,\n",
- " 1\n",
+ " 0\n",
" ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\",\n",
- " \"stack\": \"continent\"\n",
+ " \"name\": \"Oceania\"\n",
" }\n",
" ],\n",
- " \"tooltip\": {\n",
- " \"trigger\": \"axis\",\n",
- " \"fontStyle\": \"normal\",\n",
- " \"fontSize\": 14,\n",
- " \"fontFamily\": \"sans-serif\",\n",
- " \"triggerOn\": \"mousemove\",\n",
- " \"axisPointer\": {\n",
- " \"type\": \"shadow\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 69,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 70,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 71,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 72,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 73,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 74,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 75,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 76,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 77,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 78,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 79,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 80,\n",
- " 81,\n",
- " 81\n",
- " ]\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" },\n",
" \"legend\": {\n",
" \"data\": [\n",
@@ -1608,7 +1606,16 @@
" \"Oceania\"\n",
" ]\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"tooltip\": {\n",
+ " \"fontStyle\": \"normal\",\n",
+ " \"triggerOn\": \"mousemove\",\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"shadow\"\n",
+ " },\n",
+ " \"trigger\": \"axis\",\n",
+ " \"fontSize\": 14,\n",
+ " \"fontFamily\": \"sans-serif\"\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1618,16 +1625,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x107005240>"
+ "<krisk.chart.Chart at 0x106760710>"
]
},
- "execution_count": 9,
+ "execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "p = (kk.hist(df,x='lifeExp',category='continent',stacked=True,bins=100))\n",
+ "p = (kk.hist(df,x='lifeExp',c='continent',stacked=True,bins=100))\n",
"p.set_tooltip_style(trigger='axis',axis_pointer='shadow')\n",
"p.set_theme('vintage')"
]
@@ -1641,7 +1648,7 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 11,
"metadata": {
"collapsed": false
},
@@ -1650,8 +1657,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#22b45fdb-7632-4501-9d94-bd643af6468e').attr('id','22b45fdb-7632-4501-9d94-bd643af6468e'+'_old');\n",
- "element.append('<div id=\"22b45fdb-7632-4501-9d94-bd643af6468e\" style=\"width: 1000px;height:500px;\"></div>');\n",
+ "$('#63aab6de-0759-45ed-8abc-f5e2bb842534').attr('id','63aab6de-0759-45ed-8abc-f5e2bb842534'+'_old');\n",
+ "element.append('<div id=\"63aab6de-0759-45ed-8abc-f5e2bb842534\" style=\"width: 1000px;height:500px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1659,16 +1666,43 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"22b45fdb-7632-4501-9d94-bd643af6468e\"),\"dark\");\n",
+ " var myChart = echarts.init(document.getElementById(\"63aab6de-0759-45ed-8abc-f5e2bb842534\"),\"dark\");\n",
" \n",
" var option = {\n",
- " \"title\": {\n",
- " \"left\": \"center\",\n",
- " \"top\": \"5%\",\n",
- " \"text\": \"GapMinder of 2007\"\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\",\n",
+ " \"align\": \"auto\",\n",
+ " \"feature\": {\n",
+ " \"dataZoom\": {\n",
+ " \"title\": \"Zoom\",\n",
+ " \"show\": true\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"title\": \"Reset\",\n",
+ " \"show\": true\n",
+ " },\n",
+ " \"saveAsImage\": {\n",
+ " \"title\": \"Download as Image\",\n",
+ " \"type\": \"png\",\n",
+ " \"show\": true\n",
+ " }\n",
+ " }\n",
+ " },\n",
+ " \"yAxis\": {\n",
+ " \"max\": 49357,\n",
+ " \"type\": \"value\",\n",
+ " \"name\": \"gdpPercap\"\n",
+ " },\n",
+ " \"xAxis\": {\n",
+ " \"max\": 82,\n",
+ " \"type\": \"value\",\n",
+ " \"name\": \"lifeExp\"\n",
" },\n",
" \"series\": [\n",
" {\n",
+ " \"type\": \"scatter\",\n",
" \"data\": [\n",
" [\n",
" 72.301,\n",
@@ -2087,10 +2121,10 @@
" 2007\n",
" ]\n",
" ],\n",
- " \"type\": \"scatter\",\n",
" \"name\": \"Africa\"\n",
" },\n",
" {\n",
+ " \"type\": \"scatter\",\n",
" \"data\": [\n",
" [\n",
" 75.32,\n",
@@ -2293,10 +2327,10 @@
" 2007\n",
" ]\n",
" ],\n",
- " \"type\": \"scatter\",\n",
" \"name\": \"Americas\"\n",
" },\n",
" {\n",
+ " \"type\": \"scatter\",\n",
" \"data\": [\n",
" [\n",
" 43.828,\n",
@@ -2563,10 +2597,10 @@
" 2007\n",
" ]\n",
" ],\n",
- " \"type\": \"scatter\",\n",
" \"name\": \"Asia\"\n",
" },\n",
" {\n",
+ " \"type\": \"scatter\",\n",
" \"data\": [\n",
" [\n",
" 76.423,\n",
@@ -2809,10 +2843,10 @@
" 2007\n",
" ]\n",
" ],\n",
- " \"type\": \"scatter\",\n",
" \"name\": \"Europe\"\n",
" },\n",
" {\n",
+ " \"type\": \"scatter\",\n",
" \"data\": [\n",
" [\n",
" 81.235,\n",
@@ -2831,43 +2865,18 @@
" 2007\n",
" ]\n",
" ],\n",
- " \"type\": \"scatter\",\n",
" \"name\": \"Oceania\"\n",
" }\n",
" ],\n",
- " \"toolbox\": {\n",
- " \"feature\": {\n",
- " \"saveAsImage\": {\n",
- " \"title\": \"Download as Image\",\n",
- " \"show\": true,\n",
- " \"type\": \"png\"\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": true\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"align\": \"auto\",\n",
- " \"orient\": \"horizontal\",\n",
- " \"bottom\": \"auto\"\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"formatter\": \"function (obj) {\\n var value = obj.value;\\n return 'country' + '\\uff1a' + value[4] + ' ' +'<br>'+'lifeExp' + '\\uff1a' + value[0] + ' ' +'<br>'+'gdpPercap' + '\\uff1a' + value[1] + ' ' +'<br>'+'pop' + '\\uff1a' + value[2] + ' ' +'<br>'+'continent' + '\\uff1a' + value[3] + ' ' +'<br>';\\n }\",\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"xAxis\": {\n",
- " \"max\": 82,\n",
- " \"type\": \"value\",\n",
- " \"name\": \"lifeExp\"\n",
+ " \"title\": {\n",
+ " \"left\": \"center\",\n",
+ " \"text\": \"GapMinder of 2007\",\n",
+ " \"top\": \"5%\"\n",
" },\n",
" \"legend\": {\n",
+ " \"orient\": \"vertical\",\n",
+ " \"top\": \"3%\",\n",
+ " \"align\": \"auto\",\n",
" \"data\": [\n",
" \"Africa\",\n",
" \"Americas\",\n",
@@ -2875,18 +2884,13 @@
" \"Europe\",\n",
" \"Oceania\"\n",
" ],\n",
- " \"orient\": \"vertical\",\n",
- " \"right\": \"1%\",\n",
- " \"top\": \"3%\",\n",
- " \"align\": \"auto\"\n",
- " },\n",
- " \"yAxis\": {\n",
- " \"max\": 49357,\n",
- " \"type\": \"value\",\n",
- " \"name\": \"gdpPercap\"\n",
+ " \"right\": \"1%\"\n",
" },\n",
" \"visualMap\": [\n",
" {\n",
+ " \"max\": 1318683096.0,\n",
+ " \"precision\": 0.1,\n",
+ " \"min\": 199579.0,\n",
" \"show\": false,\n",
" \"inRange\": {\n",
" \"symbolSize\": [\n",
@@ -2894,12 +2898,15 @@
" 70\n",
" ]\n",
" },\n",
- " \"dimension\": 2,\n",
- " \"max\": 1318683096.0,\n",
- " \"precision\": 0.1,\n",
- " \"min\": 199579.0\n",
+ " \"dimension\": 2\n",
" }\n",
- " ]\n",
+ " ],\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " },\n",
+ " \"formatter\": \"function (obj) {\\n var value = obj.value;\\n return 'country' + '\\uff1a' + value[4] + ' ' +'<br>'+'lifeExp' + '\\uff1a' + value[0] + ' ' +'<br>'+'gdpPercap' + '\\uff1a' + value[1] + ' ' +'<br>'+'pop' + '\\uff1a' + value[2] + ' ' +'<br>'+'continent' + '\\uff1a' + value[3] + ' ' +'<br>';\\n }\"\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -2909,16 +2916,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x107029080>"
+ "<krisk.chart.Chart at 0x1067b9a90>"
]
},
- "execution_count": 10,
+ "execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "p = kk.scatter(df[df.year == 2007],'lifeExp','gdpPercap',size='pop',category='continent')\n",
+ "p = kk.scatter(df[df.year == 2007],'lifeExp','gdpPercap',s='pop',c='continent')\n",
"p.set_size(width=1000, height=500)\n",
"p.set_tooltip_format(['country','lifeExp','gdpPercap','pop','continent'])\n",
"p.set_theme('dark')\n",
diff --git a/notebooks/legend-title-toolbox.ipynb b/notebooks/legend-title-toolbox.ipynb
index 64ae87b..47506b1 100644
--- a/notebooks/legend-title-toolbox.ipynb
+++ b/notebooks/legend-title-toolbox.ipynb
@@ -102,8 +102,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#94b7122b-a264-453c-aeed-15b105b6030a').attr('id','94b7122b-a264-453c-aeed-15b105b6030a'+'_old');\n",
- "element.append('<div id=\"94b7122b-a264-453c-aeed-15b105b6030a\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#5156887a-9bf1-4b28-a050-fc4844b2be69').attr('id','5156887a-9bf1-4b28-a050-fc4844b2be69'+'_old');\n",
+ "element.append('<div id=\"5156887a-9bf1-4b28-a050-fc4844b2be69\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -111,9 +111,137 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"94b7122b-a264-453c-aeed-15b105b6030a\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"5156887a-9bf1-4b28-a050-fc4844b2be69\"),\"\");\n",
" \n",
" var option = {\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
+ " }\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"feature\": {\n",
+ " \"saveAsImage\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Download as Image\",\n",
+ " \"type\": \"png\"\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": false,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" 1952,\n",
@@ -130,139 +258,11 @@
" 2007\n",
" ]\n",
" },\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ]\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\"\n",
- " },\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"saveAsImage\": {\n",
- " \"title\": \"Download as Image\",\n",
- " \"type\": \"png\",\n",
- " \"show\": true\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": false\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
- " \"series\": [\n",
- " {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
- " }\n",
- " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -272,7 +272,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746d68>"
+ "<krisk.chart.Chart at 0x106858cc0>"
]
},
"execution_count": 4,
@@ -281,7 +281,7 @@
}
],
"source": [
- "p = kk.bar(df,'year',y='pop',how='mean',category='continent')\n",
+ "p = kk.bar(df,'year',y='pop',how='mean',c='continent')\n",
"p.set_size(width=800)\n",
"p.set_title('GapMinder Average Population Across Continent')\n",
"p.set_toolbox(save_format='png',restore=True)"
@@ -305,8 +305,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#a771ab53-5779-4227-b875-80e8382410b1').attr('id','a771ab53-5779-4227-b875-80e8382410b1'+'_old');\n",
- "element.append('<div id=\"a771ab53-5779-4227-b875-80e8382410b1\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#16c6943e-43a5-4410-9c71-7a44ade43b48').attr('id','16c6943e-43a5-4410-9c71-7a44ade43b48'+'_old');\n",
+ "element.append('<div id=\"16c6943e-43a5-4410-9c71-7a44ade43b48\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -314,9 +314,138 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"a771ab53-5779-4227-b875-80e8382410b1\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"16c6943e-43a5-4410-9c71-7a44ade43b48\"),\"\");\n",
" \n",
" var option = {\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
+ " }\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"feature\": {\n",
+ " \"saveAsImage\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Download as Image\",\n",
+ " \"type\": \"png\"\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": false,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"top\": \"7%\",\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" 1952,\n",
@@ -333,140 +462,11 @@
" 2007\n",
" ]\n",
" },\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ]\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"top\": \"7%\"\n",
- " },\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"saveAsImage\": {\n",
- " \"title\": \"Download as Image\",\n",
- " \"type\": \"png\",\n",
- " \"show\": true\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": false\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
- " \"series\": [\n",
- " {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
- " }\n",
- " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -476,7 +476,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746400>"
+ "<krisk.chart.Chart at 0x1068580b8>"
]
},
"execution_count": 5,
@@ -523,8 +523,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#3b624e51-9604-4241-8d69-862dc0453f03').attr('id','3b624e51-9604-4241-8d69-862dc0453f03'+'_old');\n",
- "element.append('<div id=\"3b624e51-9604-4241-8d69-862dc0453f03\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#cc166926-f8d3-42fc-aa58-888075c77f0c').attr('id','cc166926-f8d3-42fc-aa58-888075c77f0c'+'_old');\n",
+ "element.append('<div id=\"cc166926-f8d3-42fc-aa58-888075c77f0c\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -532,163 +532,163 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"3b624e51-9604-4241-8d69-862dc0453f03\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"cc166926-f8d3-42fc-aa58-888075c77f0c\"),\"\");\n",
" \n",
" var option = {\n",
- " \"xAxis\": {\n",
- " \"data\": [\n",
- " 1952,\n",
- " 1957,\n",
- " 1962,\n",
- " 1967,\n",
- " 1972,\n",
- " 1977,\n",
- " 1982,\n",
- " 1987,\n",
- " 1992,\n",
- " 1997,\n",
- " 2002,\n",
- " 2007\n",
- " ]\n",
- " },\n",
- " \"legend\": {\n",
- " \"align\": \"auto\",\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ],\n",
- " \"left\": \"12%\",\n",
- " \"orient\": \"vertical\",\n",
- " \"top\": \"16%\"\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"top\": \"7%\"\n",
- " },\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"saveAsImage\": {\n",
- " \"title\": \"Download as Image\",\n",
- " \"type\": \"png\",\n",
- " \"show\": true\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": false\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
" \"series\": [\n",
" {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
+ " \"name\": \"Africa\",\n",
" \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
" },\n",
" {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
+ " \"name\": \"Americas\",\n",
" \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
" },\n",
" {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
+ " \"name\": \"Asia\",\n",
" \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
" },\n",
" {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
+ " \"name\": \"Europe\",\n",
" \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
" },\n",
" {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
+ " \"name\": \"Oceania\",\n",
" \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
" }\n",
- " ]\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"feature\": {\n",
+ " \"saveAsImage\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Download as Image\",\n",
+ " \"type\": \"png\"\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": false,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"top\": \"16%\",\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ],\n",
+ " \"align\": \"auto\",\n",
+ " \"orient\": \"vertical\",\n",
+ " \"left\": \"12%\"\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"top\": \"7%\",\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
+ " 1972,\n",
+ " 1977,\n",
+ " 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
+ " 1997,\n",
+ " 2002,\n",
+ " 2007\n",
+ " ]\n",
+ " },\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -698,7 +698,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746630>"
+ "<krisk.chart.Chart at 0x106858390>"
]
},
"execution_count": 6,
@@ -778,7 +778,6 @@
" \n",
" Parameters\n",
" ----------\n",
- " \n",
" align: str, {'auto','left','right'}, default to 'auto'\n",
" orient: str, {'horizontal','vertical'} default to 'horizontal'\n",
" x_pos: str, {'auto', left', 'center', 'right', 'i%'}, default to 'auto'\n",
@@ -827,8 +826,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#ab0751f1-0ab2-4668-be72-5a98bbc07068').attr('id','ab0751f1-0ab2-4668-be72-5a98bbc07068'+'_old');\n",
- "element.append('<div id=\"ab0751f1-0ab2-4668-be72-5a98bbc07068\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#de51a035-9a63-4069-aa06-7b34c21cf19d').attr('id','de51a035-9a63-4069-aa06-7b34c21cf19d'+'_old');\n",
+ "element.append('<div id=\"de51a035-9a63-4069-aa06-7b34c21cf19d\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -836,9 +835,142 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ab0751f1-0ab2-4668-be72-5a98bbc07068\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"de51a035-9a63-4069-aa06-7b34c21cf19d\"),\"\");\n",
" \n",
" var option = {\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
+ " }\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"feature\": {\n",
+ " \"saveAsImage\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Download as Image\",\n",
+ " \"type\": \"png\"\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"top\": \"16%\",\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ],\n",
+ " \"align\": \"auto\",\n",
+ " \"orient\": \"vertical\",\n",
+ " \"left\": \"12%\"\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"top\": \"7%\",\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" 1952,\n",
@@ -855,144 +987,11 @@
" 2007\n",
" ]\n",
" },\n",
- " \"legend\": {\n",
- " \"align\": \"auto\",\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ],\n",
- " \"left\": \"12%\",\n",
- " \"orient\": \"vertical\",\n",
- " \"top\": \"16%\"\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"top\": \"7%\"\n",
- " },\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"saveAsImage\": {\n",
- " \"title\": \"Download as Image\",\n",
- " \"type\": \"png\",\n",
- " \"show\": true\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": true\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
- " \"series\": [\n",
- " {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
- " }\n",
- " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1002,7 +1001,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746748>"
+ "<krisk.chart.Chart at 0x106858cf8>"
]
},
"execution_count": 9,
@@ -1039,8 +1038,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#146dd336-6569-4a4b-932f-0b203ba39ead').attr('id','146dd336-6569-4a4b-932f-0b203ba39ead'+'_old');\n",
- "element.append('<div id=\"146dd336-6569-4a4b-932f-0b203ba39ead\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#cab7e27d-a392-457b-8ee7-09d52f96ed8c').attr('id','cab7e27d-a392-457b-8ee7-09d52f96ed8c'+'_old');\n",
+ "element.append('<div id=\"cab7e27d-a392-457b-8ee7-09d52f96ed8c\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1048,9 +1047,147 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"146dd336-6569-4a4b-932f-0b203ba39ead\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"cab7e27d-a392-457b-8ee7-09d52f96ed8c\"),\"\");\n",
" \n",
" var option = {\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
+ " }\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"bottom\": \"auto\",\n",
+ " \"feature\": {\n",
+ " \"dataView\": {\n",
+ " \"show\": true,\n",
+ " \"readOnly\": false,\n",
+ " \"title\": \"Table View\",\n",
+ " \"lang\": [\n",
+ " \"Table View\",\n",
+ " \"Back\",\n",
+ " \"Modify\"\n",
+ " ]\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": false,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"top\": \"16%\",\n",
+ " \"orient\": \"vertical\",\n",
+ " \"align\": \"auto\",\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ],\n",
+ " \"left\": \"12%\"\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"top\": \"7%\",\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" 1952,\n",
@@ -1067,149 +1204,11 @@
" 2007\n",
" ]\n",
" },\n",
- " \"legend\": {\n",
- " \"align\": \"auto\",\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ],\n",
- " \"left\": \"12%\",\n",
- " \"top\": \"16%\",\n",
- " \"orient\": \"vertical\"\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
- " },\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"top\": \"7%\"\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"dataZoom\": {\n",
- " \"show\": false,\n",
- " \"title\": \"Zoom\"\n",
- " },\n",
- " \"restore\": {\n",
- " \"show\": true,\n",
- " \"title\": \"Reset\"\n",
- " },\n",
- " \"dataView\": {\n",
- " \"lang\": [\n",
- " \"Table View\",\n",
- " \"Back\",\n",
- " \"Modify\"\n",
- " ],\n",
- " \"show\": true,\n",
- " \"readOnly\": false,\n",
- " \"title\": \"Table View\"\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
- " \"series\": [\n",
- " {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
- " }\n",
- " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1219,7 +1218,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746978>"
+ "<krisk.chart.Chart at 0x106858630>"
]
},
"execution_count": 10,
@@ -1257,8 +1256,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#9d6d791b-d5b6-4774-b514-c8ba01bb247e').attr('id','9d6d791b-d5b6-4774-b514-c8ba01bb247e'+'_old');\n",
- "element.append('<div id=\"9d6d791b-d5b6-4774-b514-c8ba01bb247e\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#de5a7cd8-7b69-4645-a044-c1f27610e3ce').attr('id','de5a7cd8-7b69-4645-a044-c1f27610e3ce'+'_old');\n",
+ "element.append('<div id=\"de5a7cd8-7b69-4645-a044-c1f27610e3ce\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1266,9 +1265,145 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"9d6d791b-d5b6-4774-b514-c8ba01bb247e\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"de5a7cd8-7b69-4645-a044-c1f27610e3ce\"),\"\");\n",
" \n",
" var option = {\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 4570010,\n",
+ " 5093033,\n",
+ " 5702247,\n",
+ " 6447875,\n",
+ " 7305376,\n",
+ " 8328097,\n",
+ " 9602857,\n",
+ " 11054502,\n",
+ " 12674645,\n",
+ " 14304480,\n",
+ " 16033152,\n",
+ " 17875763\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13806098,\n",
+ " 15478157,\n",
+ " 17330810,\n",
+ " 19229865,\n",
+ " 21175368,\n",
+ " 23122708,\n",
+ " 25211637,\n",
+ " 27310159,\n",
+ " 29570964,\n",
+ " 31876016,\n",
+ " 33990910,\n",
+ " 35954847\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 42283556,\n",
+ " 47356988,\n",
+ " 51404763,\n",
+ " 57747361,\n",
+ " 65180977,\n",
+ " 72257987,\n",
+ " 79095018,\n",
+ " 87006690,\n",
+ " 94948248,\n",
+ " 102523803,\n",
+ " 109145521,\n",
+ " 115513752\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 13937362,\n",
+ " 14596345,\n",
+ " 15345172,\n",
+ " 16039299,\n",
+ " 16687835,\n",
+ " 17238818,\n",
+ " 17708897,\n",
+ " 18103139,\n",
+ " 18604760,\n",
+ " 18964805,\n",
+ " 19274129,\n",
+ " 19536618\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\",\n",
+ " \"data\": [\n",
+ " 5343003,\n",
+ " 5970988,\n",
+ " 6641759,\n",
+ " 7300207,\n",
+ " 8053050,\n",
+ " 8619500,\n",
+ " 9197425,\n",
+ " 9787208,\n",
+ " 10459826,\n",
+ " 11120715,\n",
+ " 11727414,\n",
+ " 12274974\n",
+ " ]\n",
+ " }\n",
+ " ],\n",
+ " \"toolbox\": {\n",
+ " \"orient\": \"horizontal\",\n",
+ " \"feature\": {\n",
+ " \"magicType\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Chart Options\",\n",
+ " \"type\": [\n",
+ " \"line\",\n",
+ " \"bar\"\n",
+ " ]\n",
+ " },\n",
+ " \"restore\": {\n",
+ " \"show\": true,\n",
+ " \"title\": \"Reset\"\n",
+ " },\n",
+ " \"dataZoom\": {\n",
+ " \"show\": false,\n",
+ " \"title\": \"Zoom\"\n",
+ " }\n",
+ " },\n",
+ " \"align\": \"auto\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"legend\": {\n",
+ " \"top\": \"16%\",\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ],\n",
+ " \"align\": \"auto\",\n",
+ " \"orient\": \"vertical\",\n",
+ " \"left\": \"12%\"\n",
+ " },\n",
+ " \"title\": {\n",
+ " \"top\": \"7%\",\n",
+ " \"text\": \"GapMinder Average Population Across Continent\",\n",
+ " \"bottom\": \"auto\",\n",
+ " \"left\": \"auto\"\n",
+ " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" 1952,\n",
@@ -1285,147 +1420,11 @@
" 2007\n",
" ]\n",
" },\n",
- " \"legend\": {\n",
- " \"align\": \"auto\",\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\",\n",
- " \"Oceania\"\n",
- " ],\n",
- " \"left\": \"12%\",\n",
- " \"orient\": \"vertical\",\n",
- " \"top\": \"16%\"\n",
- " },\n",
" \"tooltip\": {\n",
" \"axisPointer\": {\n",
" \"type\": \"\"\n",
" }\n",
- " },\n",
- " \"yAxis\": {},\n",
- " \"title\": {\n",
- " \"text\": \"GapMinder Average Population Across Continent\",\n",
- " \"left\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"top\": \"7%\"\n",
- " },\n",
- " \"toolbox\": {\n",
- " \"align\": \"auto\",\n",
- " \"bottom\": \"auto\",\n",
- " \"feature\": {\n",
- " \"magicType\": {\n",
- " \"title\": \"Chart Options\",\n",
- " \"type\": [\n",
- " \"line\",\n",
- " \"bar\"\n",
- " ],\n",
- " \"show\": true\n",
- " },\n",
- " \"dataZoom\": {\n",
- " \"title\": \"Zoom\",\n",
- " \"show\": false\n",
- " },\n",
- " \"restore\": {\n",
- " \"title\": \"Reset\",\n",
- " \"show\": true\n",
- " }\n",
- " },\n",
- " \"left\": \"auto\",\n",
- " \"orient\": \"horizontal\"\n",
- " },\n",
- " \"series\": [\n",
- " {\n",
- " \"data\": [\n",
- " 4570009.635,\n",
- " 5093033.423,\n",
- " 5702247.404,\n",
- " 6447874.788,\n",
- " 7305375.788,\n",
- " 8328096.558,\n",
- " 9602857.442,\n",
- " 11054502.115,\n",
- " 12674644.558,\n",
- " 14304480.462,\n",
- " 16033152.231,\n",
- " 17875763.308\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13806097.84,\n",
- " 15478156.64,\n",
- " 17330810.16,\n",
- " 19229864.92,\n",
- " 21175368.4,\n",
- " 23122707.96,\n",
- " 25211636.8,\n",
- " 27310158.84,\n",
- " 29570964.16,\n",
- " 31876016.4,\n",
- " 33990910.48,\n",
- " 35954847.36\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 42283556.121,\n",
- " 47356987.848,\n",
- " 51404763.091,\n",
- " 57747360.606,\n",
- " 65180977.212,\n",
- " 72257986.545,\n",
- " 79095017.636,\n",
- " 87006689.758,\n",
- " 94948248.212,\n",
- " 102523803.03,\n",
- " 109145521.303,\n",
- " 115513752.333\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 13937361.533,\n",
- " 14596345.033,\n",
- " 15345171.833,\n",
- " 16039298.6,\n",
- " 16687835.3,\n",
- " 17238817.7,\n",
- " 17708896.7,\n",
- " 18103138.667,\n",
- " 18604759.9,\n",
- " 18964804.933,\n",
- " 19274128.967,\n",
- " 19536617.633\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
- " },\n",
- " {\n",
- " \"data\": [\n",
- " 5343003.0,\n",
- " 5970988.0,\n",
- " 6641759.0,\n",
- " 7300207.0,\n",
- " 8053050.0,\n",
- " 8619500.0,\n",
- " 9197425.0,\n",
- " 9787207.5,\n",
- " 10459825.5,\n",
- " 11120715.0,\n",
- " 11727414.5,\n",
- " 12274973.5\n",
- " ],\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Oceania\"\n",
- " }\n",
- " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1435,7 +1434,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106746c18>"
+ "<krisk.chart.Chart at 0x1068586a0>"
]
},
"execution_count": 11,
diff --git a/notebooks/resync-reproducible.ipynb b/notebooks/resync-reproducible.ipynb
index cdeb6e8..e55050a 100644
--- a/notebooks/resync-reproducible.ipynb
+++ b/notebooks/resync-reproducible.ipynb
@@ -66,8 +66,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d').attr('id','ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d'+'_old');\n",
- "element.append('<div id=\"ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#e067b491-63a2-4efc-8f72-680f0f3ea7b3').attr('id','e067b491-63a2-4efc-8f72-680f0f3ea7b3'+'_old');\n",
+ "element.append('<div id=\"e067b491-63a2-4efc-8f72-680f0f3ea7b3\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -75,17 +75,9 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e067b491-63a2-4efc-8f72-680f0f3ea7b3\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -99,10 +91,11 @@
" \"type\": \"bar\"\n",
" }\n",
" ],\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Africa\",\n",
@@ -111,6 +104,13 @@
" \"Europe\",\n",
" \"Oceania\"\n",
" ]\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -121,7 +121,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x106748748>"
+ "<krisk.chart.Chart at 0x106741438>"
]
},
"execution_count": 4,
@@ -159,17 +159,9 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e067b491-63a2-4efc-8f72-680f0f3ea7b3\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -183,10 +175,11 @@
" \"type\": \"bar\"\n",
" }\n",
" ],\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Africa\",\n",
@@ -195,6 +188,13 @@
" \"Europe\",\n",
" \"Oceania\"\n",
" ]\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -242,17 +242,9 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e067b491-63a2-4efc-8f72-680f0f3ea7b3\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -266,10 +258,11 @@
" \"type\": \"bar\"\n",
" }\n",
" ],\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Africa\",\n",
@@ -278,6 +271,13 @@
" \"Europe\",\n",
" \"Oceania\"\n",
" ]\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -338,17 +338,9 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"ad4b9f2d-ae20-483b-a2cd-fb9ebe7e2e6d\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"e067b491-63a2-4efc-8f72-680f0f3ea7b3\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -362,10 +354,11 @@
" \"type\": \"line\"\n",
" }\n",
" ],\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Africa\",\n",
@@ -374,6 +367,13 @@
" \"Americas\",\n",
" \"Oceania\"\n",
" ]\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -414,8 +414,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#dc33e2ab-79ec-433f-936f-51ba74baa8d5').attr('id','dc33e2ab-79ec-433f-936f-51ba74baa8d5'+'_old');\n",
- "element.append('<div id=\"dc33e2ab-79ec-433f-936f-51ba74baa8d5\" style=\"width: 800px;height:400px;\"></div>');\n",
+ "$('#11cd0150-0990-4a3b-a3ed-c1312a2e4bdc').attr('id','11cd0150-0990-4a3b-a3ed-c1312a2e4bdc'+'_old');\n",
+ "element.append('<div id=\"11cd0150-0990-4a3b-a3ed-c1312a2e4bdc\" style=\"width: 800px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -423,17 +423,9 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"dc33e2ab-79ec-433f-936f-51ba74baa8d5\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"11cd0150-0990-4a3b-a3ed-c1312a2e4bdc\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": []\n",
- " },\n",
- " \"tooltip\": {\n",
- " \"axisPointer\": {\n",
- " \"type\": \"\"\n",
- " }\n",
- " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
@@ -447,10 +439,11 @@
" \"type\": \"bar\"\n",
" }\n",
" ],\n",
- " \"title\": {\n",
- " \"text\": \"\"\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
" },\n",
- " \"yAxis\": {},\n",
" \"xAxis\": {\n",
" \"data\": [\n",
" \"Africa\",\n",
@@ -459,6 +452,13 @@
" \"Europe\",\n",
" \"Oceania\"\n",
" ]\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": []\n",
+ " },\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
" }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
@@ -469,7 +469,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x1067488d0>"
+ "<krisk.chart.Chart at 0x1067412e8>"
]
},
"execution_count": 8,
@@ -502,7 +502,7 @@
},
"widgets": {
"state": {
- "caa1cce065a74f4d9f560aa724b160b4": {
+ "ddb923a981b74375b404f8c0b8884759": {
"views": [
{
"cell_index": 8
diff --git a/notebooks/themes-colors.ipynb b/notebooks/themes-colors.ipynb
index a1157c8..14d9b26 100644
--- a/notebooks/themes-colors.ipynb
+++ b/notebooks/themes-colors.ipynb
@@ -87,8 +87,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#e28f22b2-5a1c-4749-b7fa-f0e4537ca0b0').attr('id','e28f22b2-5a1c-4749-b7fa-f0e4537ca0b0'+'_old');\n",
- "element.append('<div id=\"e28f22b2-5a1c-4749-b7fa-f0e4537ca0b0\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#262a0900-7288-46fa-9d19-b051dfdabbaf').attr('id','262a0900-7288-46fa-9d19-b051dfdabbaf'+'_old');\n",
+ "element.append('<div id=\"262a0900-7288-46fa-9d19-b051dfdabbaf\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -96,27 +96,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"e28f22b2-5a1c-4749-b7fa-f0e4537ca0b0\"),\"\");\n",
+ " var myChart = echarts.init(document.getElementById(\"262a0900-7288-46fa-9d19-b051dfdabbaf\"),\"\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -124,64 +120,110 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
- " \"type\": \"bar\",\n",
- " \"stack\": \"continent\",\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
- " \"name\": \"Africa\"\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
- " \"type\": \"bar\",\n",
- " \"stack\": \"continent\",\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
- " \"name\": \"Americas\"\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
- " \"type\": \"bar\",\n",
- " \"stack\": \"continent\",\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
- " \"name\": \"Asia\"\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
- " \"type\": \"bar\",\n",
- " \"stack\": \"continent\",\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
- " \"name\": \"Europe\"\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
" \"yAxis\": {},\n",
@@ -197,7 +239,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663ae80>"
+ "<krisk.chart.Chart at 0x10672ed30>"
]
},
"execution_count": 4,
@@ -206,7 +248,7 @@
}
],
"source": [
- "p = kk.bar(df,'year',category='continent',stacked=True)\n",
+ "p = kk.bar(df,'year',c='continent',stacked=True)\n",
"p"
]
},
@@ -228,8 +270,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#dde73bb9-a07d-4198-a77b-3575c41f6de5').attr('id','dde73bb9-a07d-4198-a77b-3575c41f6de5'+'_old');\n",
- "element.append('<div id=\"dde73bb9-a07d-4198-a77b-3575c41f6de5\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#324b1171-1c61-4c60-adea-bb2da0b444fe').attr('id','324b1171-1c61-4c60-adea-bb2da0b444fe'+'_old');\n",
+ "element.append('<div id=\"324b1171-1c61-4c60-adea-bb2da0b444fe\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -237,27 +279,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"dde73bb9-a07d-4198-a77b-3575c41f6de5\"),\"vintage\");\n",
+ " var myChart = echarts.init(document.getElementById(\"324b1171-1c61-4c60-adea-bb2da0b444fe\"),\"vintage\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -265,70 +303,116 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
- " },\n",
- " \"yAxis\": {}\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -338,7 +422,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x104a770b8>"
+ "<krisk.chart.Chart at 0x104b54e80>"
]
},
"execution_count": 5,
@@ -368,8 +452,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#3116446b-1db2-4910-8da2-9a5c4daebced').attr('id','3116446b-1db2-4910-8da2-9a5c4daebced'+'_old');\n",
- "element.append('<div id=\"3116446b-1db2-4910-8da2-9a5c4daebced\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#3dc7ef0d-3d2e-4c55-8ece-7a7b5ab138fb').attr('id','3dc7ef0d-3d2e-4c55-8ece-7a7b5ab138fb'+'_old');\n",
+ "element.append('<div id=\"3dc7ef0d-3d2e-4c55-8ece-7a7b5ab138fb\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -377,27 +461,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"3116446b-1db2-4910-8da2-9a5c4daebced\"),\"dark\");\n",
+ " var myChart = echarts.init(document.getElementById(\"3dc7ef0d-3d2e-4c55-8ece-7a7b5ab138fb\"),\"dark\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -405,70 +485,116 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
- " },\n",
- " \"yAxis\": {}\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -478,7 +604,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663af28>"
+ "<krisk.chart.Chart at 0x10671ef28>"
]
},
"execution_count": 6,
@@ -508,8 +634,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#38385714-f42d-49c0-8952-1144029d82da').attr('id','38385714-f42d-49c0-8952-1144029d82da'+'_old');\n",
- "element.append('<div id=\"38385714-f42d-49c0-8952-1144029d82da\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#c7a8812b-9114-4932-860f-46817912dad7').attr('id','c7a8812b-9114-4932-860f-46817912dad7'+'_old');\n",
+ "element.append('<div id=\"c7a8812b-9114-4932-860f-46817912dad7\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -517,27 +643,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"38385714-f42d-49c0-8952-1144029d82da\"),\"macarons\");\n",
+ " var myChart = echarts.init(document.getElementById(\"c7a8812b-9114-4932-860f-46817912dad7\"),\"macarons\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -545,70 +667,116 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
- " },\n",
- " \"yAxis\": {}\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -618,7 +786,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663a940>"
+ "<krisk.chart.Chart at 0x104b54ac8>"
]
},
"execution_count": 7,
@@ -648,8 +816,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#989c4732-bb6a-46bd-8e4f-fa3f8550aa39').attr('id','989c4732-bb6a-46bd-8e4f-fa3f8550aa39'+'_old');\n",
- "element.append('<div id=\"989c4732-bb6a-46bd-8e4f-fa3f8550aa39\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#d9c88487-12b1-4b04-b87b-844f0f342f2d').attr('id','d9c88487-12b1-4b04-b87b-844f0f342f2d'+'_old');\n",
+ "element.append('<div id=\"d9c88487-12b1-4b04-b87b-844f0f342f2d\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -657,27 +825,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"989c4732-bb6a-46bd-8e4f-fa3f8550aa39\"),\"infographic\");\n",
+ " var myChart = echarts.init(document.getElementById(\"d9c88487-12b1-4b04-b87b-844f0f342f2d\"),\"infographic\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -685,70 +849,116 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
- " },\n",
- " \"yAxis\": {}\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -758,7 +968,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663aa58>"
+ "<krisk.chart.Chart at 0x10672e7f0>"
]
},
"execution_count": 8,
@@ -788,8 +998,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#2926812a-ca84-48e7-af59-88806870dadb').attr('id','2926812a-ca84-48e7-af59-88806870dadb'+'_old');\n",
- "element.append('<div id=\"2926812a-ca84-48e7-af59-88806870dadb\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#b4476886-0522-4e04-82dd-969cd176b1ce').attr('id','b4476886-0522-4e04-82dd-969cd176b1ce'+'_old');\n",
+ "element.append('<div id=\"b4476886-0522-4e04-82dd-969cd176b1ce\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -797,27 +1007,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"2926812a-ca84-48e7-af59-88806870dadb\"),\"roma\");\n",
+ " var myChart = echarts.init(document.getElementById(\"b4476886-0522-4e04-82dd-969cd176b1ce\"),\"roma\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -825,70 +1031,116 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
- " },\n",
- " \"yAxis\": {}\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -898,7 +1150,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663a4e0>"
+ "<krisk.chart.Chart at 0x10672e668>"
]
},
"execution_count": 9,
@@ -928,8 +1180,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#7f519025-714e-449b-b8e3-0d33ed377229').attr('id','7f519025-714e-449b-b8e3-0d33ed377229'+'_old');\n",
- "element.append('<div id=\"7f519025-714e-449b-b8e3-0d33ed377229\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#47ee0a64-eaf4-49e3-9308-760ac3782467').attr('id','47ee0a64-eaf4-49e3-9308-760ac3782467'+'_old');\n",
+ "element.append('<div id=\"47ee0a64-eaf4-49e3-9308-760ac3782467\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -937,27 +1189,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"7f519025-714e-449b-b8e3-0d33ed377229\"),\"shine\");\n",
+ " var myChart = echarts.init(document.getElementById(\"47ee0a64-eaf4-49e3-9308-760ac3782467\"),\"shine\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -965,70 +1213,322 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
" \"series\": [\n",
" {\n",
" \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
- " 1\n",
+ " 0,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 3,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
" 1,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
+ " }\n",
+ " ],\n",
+ " \"yAxis\": {},\n",
+ " \"title\": {\n",
+ " \"text\": \"\"\n",
+ " }\n",
+ "};\n",
+ " option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
+ " myChart.setOption(option);\n",
+ " \n",
+ " \n",
+ " \n",
+ "});\n"
+ ],
+ "text/plain": [
+ "<krisk.chart.Chart at 0x10672e6a0>"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "p.set_theme('shine')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Colors (Palette and Background)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Krisk doesn't have existing based colormap. But you can feed CSS Color Codes, hex, or RGB colors manually."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 48,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "application/javascript": [
+ "\n",
+ "$('#7f221661-12e3-4fdc-86c4-4d47f53c4b59').attr('id','7f221661-12e3-4fdc-86c4-4d47f53c4b59'+'_old');\n",
+ "element.append('<div id=\"7f221661-12e3-4fdc-86c4-4d47f53c4b59\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
+ "function(echarts){\n",
+ " \n",
+ " function parseFunction(str){\n",
+ " return eval('(' + str + ')');\n",
+ " }\n",
+ " \n",
+ " var myChart = echarts.init(document.getElementById(\"7f221661-12e3-4fdc-86c4-4d47f53c4b59\"),\"shine\");\n",
+ " \n",
+ " var option = {\n",
+ " \"xAxis\": {\n",
+ " \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
+ " 1972,\n",
+ " 1977,\n",
+ " 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
+ " 1997,\n",
+ " 2002,\n",
+ " 2007\n",
+ " ]\n",
+ " },\n",
+ " \"tooltip\": {\n",
+ " \"axisPointer\": {\n",
+ " \"type\": \"\"\n",
+ " }\n",
+ " },\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
+ " ]\n",
+ " },\n",
+ " \"series\": [\n",
+ " {\n",
+ " \"data\": [\n",
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
" 1,\n",
- " 1\n",
+ " 2,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 3,\n",
+ " 1,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
- " \"yAxis\": {}\n",
+ " \"color\": [\n",
+ " \"Navy\",\n",
+ " \"#FF0000\",\n",
+ " \"rgb(205,92,92)\",\n",
+ " \"#65c3bf\",\n",
+ " \"hsl(60, 100%, 87%)\"\n",
+ " ],\n",
+ " \"graph\": {\n",
+ " \"color\": [\n",
+ " \"Navy\",\n",
+ " \"#FF0000\",\n",
+ " \"rgb(205,92,92)\",\n",
+ " \"#65c3bf\",\n",
+ " \"hsl(60, 100%, 87%)\"\n",
+ " ]\n",
+ " },\n",
+ " \"backgroundColor\": \"Aqua\"\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1038,30 +1538,24 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10663a160>"
+ "<krisk.chart.Chart at 0x10cb5ccc0>"
]
},
- "execution_count": 10,
+ "execution_count": 48,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "p.set_theme('shine')"
+ "pallete = ['Navy','#FF0000','rgb(205,92,92)', '#65c3bf','hsl(60, 100%, 87%)']\n",
+ "p.set_color(background='Aqua', palette=pallete)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "# Colors (Palette and Background)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Krisk doesn't have existing based colormap. Fortunately, you can feed hex and RGB colors manually, or using existing palettes provided by visualization libraries you already know. Here I will use libraries like Seaborn, Colorlover, and Bokeh."
+ "You also can using existing palettes provided by visualization libraries you already know. Here I will use libraries like Seaborn, Colorlover, and Bokeh."
]
},
{
@@ -1084,18 +1578,7 @@
},
{
"cell_type": "code",
- "execution_count": 12,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "palette_sns1 = sns.color_palette('muted').as_hex()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
+ "execution_count": 47,
"metadata": {
"collapsed": false
},
@@ -1104,8 +1587,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#619be6b4-6809-42dd-aaff-b80401b45bd2').attr('id','619be6b4-6809-42dd-aaff-b80401b45bd2'+'_old');\n",
- "element.append('<div id=\"619be6b4-6809-42dd-aaff-b80401b45bd2\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#d47ef872-e06a-407b-bd47-659a7a533c9b').attr('id','d47ef872-e06a-407b-bd47-659a7a533c9b'+'_old');\n",
+ "element.append('<div id=\"d47ef872-e06a-407b-bd47-659a7a533c9b\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1113,27 +1596,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"619be6b4-6809-42dd-aaff-b80401b45bd2\"),\"shine\");\n",
+ " var myChart = echarts.init(document.getElementById(\"d47ef872-e06a-407b-bd47-659a7a533c9b\"),\"shine\");\n",
" \n",
- " var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
+ " var option = {\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -1141,14 +1620,13 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
- " \"graph\": {\n",
- " \"color\": [\n",
- " \"#4878cf\",\n",
- " \"#6acc65\",\n",
- " \"#d65f5f\",\n",
- " \"#b47cc7\",\n",
- " \"#c4ad66\",\n",
- " \"#77bedb\"\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
" ]\n",
" },\n",
" \"series\": [\n",
@@ -1157,60 +1635,98 @@
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
@@ -1222,7 +1738,16 @@
" \"#c4ad66\",\n",
" \"#77bedb\"\n",
" ],\n",
- " \"yAxis\": {}\n",
+ " \"graph\": {\n",
+ " \"color\": [\n",
+ " \"#4878cf\",\n",
+ " \"#6acc65\",\n",
+ " \"#d65f5f\",\n",
+ " \"#b47cc7\",\n",
+ " \"#c4ad66\",\n",
+ " \"#77bedb\"\n",
+ " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1232,15 +1757,16 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10c3960b8>"
+ "<krisk.chart.Chart at 0x10cb5c5f8>"
]
},
- "execution_count": 13,
+ "execution_count": 47,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
+ "palette_sns1 = sns.color_palette('muted').as_hex()\n",
"p.set_color(palette=palette_sns1)"
]
},
@@ -1262,8 +1788,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#5fb3b959-afe4-4db1-99ce-b88a99013781').attr('id','5fb3b959-afe4-4db1-99ce-b88a99013781'+'_old');\n",
- "element.append('<div id=\"5fb3b959-afe4-4db1-99ce-b88a99013781\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#bc669975-f836-4efe-8e82-c6eca3391c79').attr('id','bc669975-f836-4efe-8e82-c6eca3391c79'+'_old');\n",
+ "element.append('<div id=\"bc669975-f836-4efe-8e82-c6eca3391c79\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1271,27 +1797,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"5fb3b959-afe4-4db1-99ce-b88a99013781\"),\"shine\");\n",
+ " var myChart = echarts.init(document.getElementById(\"bc669975-f836-4efe-8e82-c6eca3391c79\"),\"shine\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -1299,14 +1821,13 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
- " \"graph\": {\n",
- " \"color\": [\n",
- " \"#e8f6b1\",\n",
- " \"#b2e1b6\",\n",
- " \"#65c3bf\",\n",
- " \"#2ca1c2\",\n",
- " \"#216daf\",\n",
- " \"#253997\"\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
" ]\n",
" },\n",
" \"series\": [\n",
@@ -1315,60 +1836,98 @@
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
@@ -1380,7 +1939,16 @@
" \"#216daf\",\n",
" \"#253997\"\n",
" ],\n",
- " \"yAxis\": {}\n",
+ " \"graph\": {\n",
+ " \"color\": [\n",
+ " \"#e8f6b1\",\n",
+ " \"#b2e1b6\",\n",
+ " \"#65c3bf\",\n",
+ " \"#2ca1c2\",\n",
+ " \"#216daf\",\n",
+ " \"#253997\"\n",
+ " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1390,7 +1958,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10c3b8e80>"
+ "<krisk.chart.Chart at 0x10cabf1d0>"
]
},
"execution_count": 14,
@@ -1432,8 +2000,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#0e0ddc3f-0788-4a0f-aa0d-2359d18092a9').attr('id','0e0ddc3f-0788-4a0f-aa0d-2359d18092a9'+'_old');\n",
- "element.append('<div id=\"0e0ddc3f-0788-4a0f-aa0d-2359d18092a9\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#4c538264-615c-491e-8357-685f8c80e4bb').attr('id','4c538264-615c-491e-8357-685f8c80e4bb'+'_old');\n",
+ "element.append('<div id=\"4c538264-615c-491e-8357-685f8c80e4bb\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1441,27 +2009,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"0e0ddc3f-0788-4a0f-aa0d-2359d18092a9\"),\"shine\");\n",
+ " var myChart = echarts.init(document.getElementById(\"4c538264-615c-491e-8357-685f8c80e4bb\"),\"shine\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -1469,11 +2033,13 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
- " \"graph\": {\n",
- " \"color\": [\n",
- " \"hsl(19, 96%, 67%)\",\n",
- " \"hsl(60, 100%, 87%)\",\n",
- " \"hsl(203, 51%, 71%)\"\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
" ]\n",
" },\n",
" \"series\": [\n",
@@ -1482,60 +2048,98 @@
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
@@ -1544,7 +2148,13 @@
" \"hsl(60, 100%, 87%)\",\n",
" \"hsl(203, 51%, 71%)\"\n",
" ],\n",
- " \"yAxis\": {}\n",
+ " \"graph\": {\n",
+ " \"color\": [\n",
+ " \"hsl(19, 96%, 67%)\",\n",
+ " \"hsl(60, 100%, 87%)\",\n",
+ " \"hsl(203, 51%, 71%)\"\n",
+ " ]\n",
+ " }\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1554,7 +2164,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10c406be0>"
+ "<krisk.chart.Chart at 0x10cabfe80>"
]
},
"execution_count": 16,
@@ -1598,8 +2208,8 @@
"data": {
"application/javascript": [
"\n",
- "$('#27924755-af14-41b8-9e7c-8d1509eea87f').attr('id','27924755-af14-41b8-9e7c-8d1509eea87f'+'_old');\n",
- "element.append('<div id=\"27924755-af14-41b8-9e7c-8d1509eea87f\" style=\"width: 600px;height:400px;\"></div>');\n",
+ "$('#2d033a74-89bc-4946-898b-59601b3c38d2').attr('id','2d033a74-89bc-4946-898b-59601b3c38d2'+'_old');\n",
+ "element.append('<div id=\"2d033a74-89bc-4946-898b-59601b3c38d2\" style=\"width: 600px;height:400px;\"></div>');\n",
"require(['echarts', 'dark', 'vintage', 'roma', 'shine', 'infographic', 'macarons'],\n",
"function(echarts){\n",
" \n",
@@ -1607,27 +2217,23 @@
" return eval('(' + str + ')');\n",
" }\n",
" \n",
- " var myChart = echarts.init(document.getElementById(\"27924755-af14-41b8-9e7c-8d1509eea87f\"),\"shine\");\n",
+ " var myChart = echarts.init(document.getElementById(\"2d033a74-89bc-4946-898b-59601b3c38d2\"),\"shine\");\n",
" \n",
" var option = {\n",
- " \"legend\": {\n",
- " \"data\": [\n",
- " \"Africa\",\n",
- " \"Americas\",\n",
- " \"Asia\",\n",
- " \"Europe\"\n",
- " ]\n",
- " },\n",
" \"xAxis\": {\n",
" \"data\": [\n",
+ " 1952,\n",
+ " 1957,\n",
+ " 1962,\n",
+ " 1967,\n",
" 1972,\n",
+ " 1977,\n",
" 1982,\n",
+ " 1987,\n",
+ " 1992,\n",
" 1997,\n",
- " 1957,\n",
- " 1952,\n",
- " 1962,\n",
- " 2007,\n",
- " 2002\n",
+ " 2002,\n",
+ " 2007\n",
" ]\n",
" },\n",
" \"tooltip\": {\n",
@@ -1635,14 +2241,13 @@
" \"type\": \"\"\n",
" }\n",
" },\n",
- " \"graph\": {\n",
- " \"color\": [\n",
- " \"#016c59\",\n",
- " \"#1c9099\",\n",
- " \"#67a9cf\",\n",
- " \"#a6bddb\",\n",
- " \"#d0d1e6\",\n",
- " \"#f6eff7\"\n",
+ " \"legend\": {\n",
+ " \"data\": [\n",
+ " \"Africa\",\n",
+ " \"Americas\",\n",
+ " \"Asia\",\n",
+ " \"Europe\",\n",
+ " \"Oceania\"\n",
" ]\n",
" },\n",
" \"series\": [\n",
@@ -1651,61 +2256,98 @@
" 2,\n",
" 2,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
+ " 1,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
- " 1\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Africa\"\n",
+ " \"name\": \"Africa\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
+ " 1,\n",
" 2,\n",
+ " 0,\n",
" 2,\n",
+ " 0,\n",
" 1,\n",
" 1,\n",
" 1,\n",
" 1,\n",
+ " 3,\n",
" 1,\n",
- " 1\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Americas\"\n",
+ " \"name\": \"Americas\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
" 2,\n",
+ " 1,\n",
+ " 0,\n",
" 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
" 1,\n",
- " 1\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Asia\"\n",
+ " \"name\": \"Asia\",\n",
+ " \"type\": \"bar\"\n",
" },\n",
" {\n",
" \"data\": [\n",
- " 4,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
- " 2,\n",
+ " 0,\n",
+ " 1,\n",
" 1,\n",
" 1,\n",
+ " 1,\n",
+ " 2,\n",
+ " 1,\n",
+ " 0,\n",
+ " 2,\n",
+ " 0,\n",
+ " 3,\n",
" 1\n",
" ],\n",
" \"stack\": \"continent\",\n",
- " \"type\": \"bar\",\n",
- " \"name\": \"Europe\"\n",
+ " \"name\": \"Europe\",\n",
+ " \"type\": \"bar\"\n",
+ " },\n",
+ " {\n",
+ " \"data\": [\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 1,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0,\n",
+ " 0\n",
+ " ],\n",
+ " \"stack\": \"continent\",\n",
+ " \"name\": \"Oceania\",\n",
+ " \"type\": \"bar\"\n",
" }\n",
" ],\n",
- " \"backgroundColor\": \"#F0F8FF\",\n",
+ " \"yAxis\": {},\n",
" \"title\": {\n",
" \"text\": \"\"\n",
" },\n",
@@ -1717,7 +2359,17 @@
" \"#d0d1e6\",\n",
" \"#f6eff7\"\n",
" ],\n",
- " \"yAxis\": {}\n",
+ " \"graph\": {\n",
+ " \"color\": [\n",
+ " \"#016c59\",\n",
+ " \"#1c9099\",\n",
+ " \"#67a9cf\",\n",
+ " \"#a6bddb\",\n",
+ " \"#d0d1e6\",\n",
+ " \"#f6eff7\"\n",
+ " ]\n",
+ " },\n",
+ " \"backgroundColor\": \"#F0F8FF\"\n",
"};\n",
" option['tooltip']['formatter'] = parseFunction(option['tooltip']['formatter']);\n",
" myChart.setOption(option);\n",
@@ -1727,7 +2379,7 @@
"});\n"
],
"text/plain": [
- "<krisk.chart.Chart at 0x10c444da0>"
+ "<krisk.chart.Chart at 0x10cb3a0f0>"
]
},
"execution_count": 18,
| Consistent API parameters with Pandas and Matplotlib (BREAKING API)
This will break API change.
* Convert size to s
* Convert category to c | napjon/krisk | diff --git a/krisk/tests/conftest.py b/krisk/tests/conftest.py
index a1c5f29..4d132f1 100644
--- a/krisk/tests/conftest.py
+++ b/krisk/tests/conftest.py
@@ -28,8 +28,8 @@ def gap_chart(gapminder):
gapminder[gapminder.year == 2007],
'lifeExp',
'gdpPercap',
- size='pop',
- category='continent')
+ s='pop',
+ c='continent')
p.set_size(width=1000, height=500)
p.set_tooltip_format(
['country', 'lifeExp', 'gdpPercap', 'pop', 'continent'])
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 5b01429..2fecdff 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -11,7 +11,7 @@ def test_bar(gapminder):
p = kk.bar(gapminder,
'year',
y='pop',
- category='continent',
+ c='continent',
how='mean',
stacked=True,
annotate=True)
@@ -22,12 +22,23 @@ def test_bar(gapminder):
p = kk.bar(gapminder,
'year',
y='pop',
- category='continent',
+ c='continent',
how='mean',
stacked=True,
annotate='all')
assert p.get_option() == true_option
+ p = kk.bar(gapminder,'continent',y='gdpPercap',how='mean')
+ assert p.get_option() == {'legend': {'data': []},
+ 'series': [{'data': [4426.026, 8955.554, 802.675, 3255.367, 19980.596],
+ 'name': 'continent',
+ 'type': 'bar'}],
+ 'title': {'text': ''},
+ 'tooltip': {'axisPointer': {'type': ''}},
+ 'xAxis': {'data': ['Africa', 'Americas', 'Asia', 'Europe', 'Oceania']},
+ 'yAxis': {}}
+
+
def test_line(gapminder):
@@ -36,7 +47,7 @@ def test_line(gapminder):
gapminder,
'year',
y='lifeExp',
- category='continent',
+ c='continent',
how='mean',
stacked=True,
area=True,
@@ -51,7 +62,7 @@ def test_hist(gapminder):
p = kk.hist(
gapminder,
'lifeExp',
- category='continent',
+ c='continent',
bins=20,
normed=True,
stacked=True)
@@ -67,12 +78,12 @@ def test_scatter(gapminder):
gapminder[gapminder.year == 1952],
'lifeExp',
'gdpPercap',
- size='pop',
- category='continent')
+ s='pop',
+ c='continent')
assert p.get_option() == true_option
# Scatter
true_option = json.load(open(DATA_DIR + '/scatter_single.json', 'r'))
p = kk.scatter(
- gapminder[gapminder.year == 1952], 'lifeExp', 'gdpPercap', size='pop')
+ gapminder[gapminder.year == 1952], 'lifeExp', 'gdpPercap', s='pop')
assert p.get_option() == true_option
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": -1,
"issue_text_score": 2,
"test_score": -1
},
"num_modified_files": 9
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@7aeaac1b566ffe71d7282c41eb09567845eecf01#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 735 | [
"notebooks/resync-reproducible.ipynb",
"krisk/plot/__init__.py",
"notebooks/Intro.ipynb",
"krisk/plot/make_chart.py",
"krisk/chart.py",
"krisk/plot/bar_line.py",
"krisk/plot/points.py",
"notebooks/themes-colors.ipynb",
"notebooks/legend-title-toolbox.ipynb"
]
| [
"notebooks/resync-reproducible.ipynb",
"krisk/plot/__init__.py",
"notebooks/Intro.ipynb",
"krisk/plot/make_chart.py",
"krisk/chart.py",
"krisk/plot/bar_line.py",
"krisk/plot/points.py",
"notebooks/themes-colors.ipynb",
"notebooks/legend-title-toolbox.ipynb"
]
|
ipython__ipykernel-188 | 6c63f53000e07b7f4f8542501dc94b42578f1d15 | 2016-09-01 11:53:24 | 6c63f53000e07b7f4f8542501dc94b42578f1d15 | diff --git a/ipykernel/iostream.py b/ipykernel/iostream.py
index 5c7f96e..dae05af 100644
--- a/ipykernel/iostream.py
+++ b/ipykernel/iostream.py
@@ -205,8 +205,6 @@ class OutStream(object):
warnings.warn("pipe argument to OutStream is deprecated and ignored",
DeprecationWarning)
self.encoding = 'UTF-8'
- # This is necessary for compatibility with Python built-in streams
- self.errors = None
self.session = session
if not isinstance(pub_thread, IOPubThread):
# Backward-compat: given socket, not thread. Wrap in a thread.
diff --git a/ipykernel/zmqshell.py b/ipykernel/zmqshell.py
index b209ee6..cb17c70 100644
--- a/ipykernel/zmqshell.py
+++ b/ipykernel/zmqshell.py
@@ -68,7 +68,7 @@ class ZMQDisplayPublisher(DisplayPublisher):
# thread_local:
# An attribute used to ensure the correct output message
# is processed. See ipykernel Issue 113 for a discussion.
- thread_local = Any()
+ _thread_local = Any()
def set_parent(self, parent):
"""Set the parent for outbound messages."""
@@ -79,14 +79,17 @@ class ZMQDisplayPublisher(DisplayPublisher):
sys.stdout.flush()
sys.stderr.flush()
- @default('thread_local')
+ @default('_thread_local')
def _default_thread_local(self):
- """ Initialises the threadlocal attribute and
- gives it a 'hooks' attribute.
- """
- loc = local()
- loc.hooks = []
- return loc
+ """Initialize our thread local storage"""
+ return local()
+
+ @property
+ def _hooks(self):
+ if not hasattr(self._thread_local, 'hooks'):
+ # create new list for a new thread
+ self._thread_local.hooks = []
+ return self._thread_local.hooks
def publish(self, data, metadata=None, source=None):
self._flush_streams()
@@ -108,7 +111,7 @@ class ZMQDisplayPublisher(DisplayPublisher):
# Each transform either returns a new
# message or None. If None is returned,
# the message has been 'used' and we return.
- for hook in self.thread_local.hooks:
+ for hook in self._hooks:
msg = hook(msg)
if msg is None:
return
@@ -143,7 +146,7 @@ class ZMQDisplayPublisher(DisplayPublisher):
Returning `None` will halt that execution path, and
session.send will not be called.
"""
- self.thread_local.hooks.append(hook)
+ self._hooks.append(hook)
def unregister_hook(self, hook):
"""
@@ -160,7 +163,7 @@ class ZMQDisplayPublisher(DisplayPublisher):
found.
"""
try:
- self.thread_local.hooks.remove(hook)
+ self._hooks.remove(hook)
return True
except ValueError:
return False
| 'thread._local' object has no attribute 'hooks'
I just installed the newest versions of ipython and GraphLab and noticed this error. Here is the relevant code and the traceback.
# sales is an SFrame
graphlab.canvas.set_target('ipynb')
sales.show(view="Scatter Plot", x="CrimeRate", y="HousePrice")
Traceback:
```
AttributeErrorTraceback (most recent call last)
<ipython-input-7-6f077ba0dabb> in <module>()
1 graphlab.canvas.set_target('ipynb')
----> 2 sales.show(view="Scatter Plot", x="CrimeRate", y="HousePrice")
/usr/local/lib/python2.7/site-packages/graphlab/data_structures/sframe.pyc in show(self, columns, view, x, y)
4950 __LOGGER__.warn("Column selection for SFrame.show is deprecated. To show only certain columns, use the sf[['column1', 'column2']] syntax or construct a new SFrame with the desired columns.")
4951 from ..visualization.show import show
-> 4952 show(self, view=view, x=x, y=y)
4953
4954 def pack_columns(self, columns=None, column_prefix=None, dtype=list,
/usr/local/lib/python2.7/site-packages/multipledispatch/dispatcher.pyc in __call__(self, *args, **kwargs)
162 self._cache[types] = func
163 try:
--> 164 return func(*args, **kwargs)
165
166 except MDNotImplementedError:
/usr/local/lib/python2.7/site-packages/graphlab/canvas/glc_display_dispatch.pyc in show(obj, **kwargs)
8 import graphlab.canvas.views.sframe
9 graphlab.canvas.inspect.find_vars(obj)
---> 10 return graphlab.canvas.show(graphlab.canvas.views.sframe.SFrameView(obj, params=kwargs))
11
12
/usr/local/lib/python2.7/site-packages/graphlab/canvas/utils.pyc in show(variable)
129 get_target().state.set_selected_variable(variable)
130 variable.validate_js_component_name(variable.get_js_component())
--> 131 return get_target().show()
132
133 def _get_id(ref):
/usr/local/lib/python2.7/site-packages/graphlab/canvas/target.pyc in show(self, variable)
175 IPython.core.display.Javascript(
176 data=self.__makeJS(_to_json(data), view.get_js_file(), view.get_js_component()),
--> 177 css=['//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.1.0/css/font-awesome.min.css', self.get_asset_url() + 'css/canvas.css']
178 )
179 )
/usr/local/lib/python2.7/site-packages/IPython/core/display.pyc in display_javascript(*objs, **kwargs)
328 Metadata to be associated with the specific mimetype output.
329 """
--> 330 _display_mimetype('application/javascript', objs, **kwargs)
331
332
/usr/local/lib/python2.7/site-packages/IPython/core/display.pyc in _display_mimetype(mimetype, objs, raw, metadata)
74 # turn list of pngdata into list of { 'image/png': pngdata }
75 objs = [ {mimetype: obj} for obj in objs ]
---> 76 display(*objs, raw=raw, metadata=metadata, include=[mimetype])
77
78 #-----------------------------------------------------------------------------
/usr/local/lib/python2.7/site-packages/IPython/core/display.pyc in display(*objs, **kwargs)
169 # kwarg-specified metadata gets precedence
170 _merge(md_dict, metadata)
--> 171 publish_display_data(data=format_dict, metadata=md_dict)
172
173
/usr/local/lib/python2.7/site-packages/IPython/core/display.pyc in publish_display_data(data, metadata, source)
119 InteractiveShell.instance().display_pub.publish(
120 data=data,
--> 121 metadata=metadata,
122 )
123
/usr/local/lib/python2.7/site-packages/ipykernel/zmqshell.pyc in publish(self, data, metadata, source)
109 # message or None. If None is returned,
110 # the message has been 'used' and we return.
--> 111 for hook in self.thread_local.hooks:
112 msg = hook(msg)
113 if msg is None:
AttributeError: 'thread._local' object has no attribute 'hooks'
``` | ipython/ipykernel | diff --git a/ipykernel/tests/test_zmq_shell.py b/ipykernel/tests/test_zmq_shell.py
index 6743db6..8426fa2 100644
--- a/ipykernel/tests/test_zmq_shell.py
+++ b/ipykernel/tests/test_zmq_shell.py
@@ -5,7 +5,14 @@
# Distributed under the terms of the Modified BSD License.
import os
+try:
+ from queue import Queue
+except ImportError:
+ # py2
+ from Queue import Queue
+from threading import Thread
import unittest
+
from traitlets import Int
import zmq
@@ -88,12 +95,24 @@ class ZMQDisplayPublisherTests(unittest.TestCase):
self.assertEqual(self.disp_pub.session, self.session)
self.assertEqual(self.disp_pub.pub_socket, self.socket)
- def test_thread_local_default(self):
+ def test_thread_local_hooks(self):
"""
Confirms that the thread_local attribute is correctly
initialised with an empty list for the display hooks
"""
- self.assertEqual(self.disp_pub.thread_local.hooks, [])
+ self.assertEqual(self.disp_pub._hooks, [])
+ def hook(msg):
+ return msg
+ self.disp_pub.register_hook(hook)
+ self.assertEqual(self.disp_pub._hooks, [hook])
+
+ q = Queue()
+ def set_thread_hooks():
+ q.put(self.disp_pub._hooks)
+ t = Thread(target=set_thread_hooks)
+ t.start()
+ thread_hooks = q.get(timeout=10)
+ self.assertEqual(thread_hooks, [])
def test_publish(self):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 4.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"nose-warnings-filters",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
backcall==0.2.0
certifi==2021.5.30
decorator==5.1.1
entrypoints==0.4
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/ipython/ipykernel.git@6c63f53000e07b7f4f8542501dc94b42578f1d15#egg=ipykernel
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
jupyter-client==7.1.2
jupyter-core==4.9.2
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nest-asyncio==1.6.0
nose==1.3.7
nose-warnings-filters==0.1.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
pyzmq==25.1.2
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tornado==6.1
traitlets==4.3.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
wcwidth==0.2.13
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: ipykernel
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- backcall==0.2.0
- decorator==5.1.1
- entrypoints==0.4
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- nest-asyncio==1.6.0
- nose==1.3.7
- nose-warnings-filters==0.1.5
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- pygments==2.14.0
- python-dateutil==2.9.0.post0
- pyzmq==25.1.2
- six==1.17.0
- tornado==6.1
- traitlets==4.3.3
- wcwidth==0.2.13
prefix: /opt/conda/envs/ipykernel
| [
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_thread_local_hooks"
]
| []
| [
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_display_hook_halts_send",
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_display_hook_return_calls_send",
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_display_publisher_creation",
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_publish",
"ipykernel/tests/test_zmq_shell.py::ZMQDisplayPublisherTests::test_unregister_hook"
]
| []
| BSD 3-Clause "New" or "Revised" License | 736 | [
"ipykernel/iostream.py",
"ipykernel/zmqshell.py"
]
| [
"ipykernel/iostream.py",
"ipykernel/zmqshell.py"
]
|
|
zalando-stups__senza-339 | 0429d0ed3bd768baa43b7bd595e08c5aed44bfc8 | 2016-09-01 12:18:21 | a72ed3ba8f330170d7dc9e923bd18294a03186af | diff --git a/senza/components/elastic_load_balancer.py b/senza/components/elastic_load_balancer.py
index fcaff24..74c10c5 100644
--- a/senza/components/elastic_load_balancer.py
+++ b/senza/components/elastic_load_balancer.py
@@ -164,6 +164,7 @@ def component_elastic_load_balancer(definition,
definition["Resources"][lb_name] = {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
+ "Scheme": loadbalancer_scheme,
"Subnets": {"Fn::FindInMap": [loadbalancer_subnet_map, {"Ref": "AWS::Region"}, "Subnets"]},
"HealthCheck": {
"HealthyThreshold": "2",
| ELB should have Scheme "internal" if "Scheme" is not defined
The ELB "Scheme" property should default to "internal" if it's not set. | zalando-stups/senza | diff --git a/tests/test_components.py b/tests/test_components.py
index 207fca2..651b318 100644
--- a/tests/test_components.py
+++ b/tests/test_components.py
@@ -13,6 +13,7 @@ from senza.components.auto_scaling_group import (component_auto_scaling_group,
to_iso8601_duration)
from senza.components.elastic_load_balancer import (component_elastic_load_balancer,
get_load_balancer_name)
+from senza.components.elastic_load_balancer_v2 import component_elastic_load_balancer_v2
from senza.components.iam_role import component_iam_role, get_merged_policies
from senza.components.redis_cluster import component_redis_cluster
from senza.components.redis_node import component_redis_node
@@ -976,3 +977,43 @@ def test_max_description_length():
component_configuration(definition, configuration, args, info, False, AccountArguments('dummyregion'))
assert definition['Description'].startswith('My Stack (Param1: my param value, SecondParam: 1234567890')
assert 0 < len(definition['Description']) <= 1024
+
+
+def test_component_load_balancer_default_internal_scheme(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999"
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer.resolve_security_groups', mock_string_result)
+
+ result = component_elastic_load_balancer(definition, configuration, args, info, False, MagicMock())
+ assert 'internal' == result["Resources"]["test_lb"]["Properties"]["Scheme"]
+
+
+def test_component_load_balancer_v2_default_internal_scheme(monkeypatch):
+ configuration = {
+ "Name": "test_lb",
+ "SecurityGroups": "",
+ "HTTPPort": "9999"
+ }
+ info = {'StackName': 'foobar', 'StackVersion': '0.1'}
+ definition = {"Resources": {}}
+
+ args = MagicMock()
+ args.region = "foo"
+
+ mock_string_result = MagicMock()
+ mock_string_result.return_value = "foo"
+ monkeypatch.setattr('senza.components.elastic_load_balancer_v2.resolve_security_groups', mock_string_result)
+
+ result = component_elastic_load_balancer_v2(definition, configuration, args, info, False, MagicMock())
+ assert 'internal' == result["Resources"]["test_lb"]["Properties"]["Scheme"]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==1.15.0
dnspython3==1.15.0
exceptiongroup==1.2.2
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jmespath==1.0.1
packaging==24.2
pluggy==1.5.0
pystache==0.6.8
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
s3transfer==0.11.4
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@0429d0ed3bd768baa43b7bd595e08c5aed44bfc8#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==2.2.1
typing==3.7.4.3
urllib3==1.26.20
zipp==3.21.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==1.15.0
- dnspython3==1.15.0
- exceptiongroup==1.2.2
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jmespath==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pystache==0.6.8
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- s3transfer==0.11.4
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==2.2.1
- typing==3.7.4.3
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/senza
| [
"tests/test_components.py::test_component_load_balancer_default_internal_scheme"
]
| [
"tests/test_components.py::test_weighted_dns_load_balancer",
"tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains",
"tests/test_components.py::test_check_docker_image_exists"
]
| [
"tests/test_components.py::test_invalid_component",
"tests/test_components.py::test_component_iam_role",
"tests/test_components.py::test_get_merged_policies",
"tests/test_components.py::test_component_load_balancer_healthcheck",
"tests/test_components.py::test_component_load_balancer_idletimeout",
"tests/test_components.py::test_component_load_balancer_cert_arn",
"tests/test_components.py::test_component_load_balancer_http_only",
"tests/test_components.py::test_component_load_balancer_namelength",
"tests/test_components.py::test_component_stups_auto_configuration",
"tests/test_components.py::test_component_stups_auto_configuration_vpc_id",
"tests/test_components.py::test_component_redis_node",
"tests/test_components.py::test_component_redis_cluster",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref",
"tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties",
"tests/test_components.py::test_component_auto_scaling_group_custom_tags",
"tests/test_components.py::test_component_auto_scaling_group_configurable_properties2",
"tests/test_components.py::test_component_auto_scaling_group_metric_type",
"tests/test_components.py::test_component_auto_scaling_group_optional_metric_type",
"tests/test_components.py::test_to_iso8601_duration",
"tests/test_components.py::test_normalize_asg_success",
"tests/test_components.py::test_normalize_network_threshold",
"tests/test_components.py::test_check_application_id",
"tests/test_components.py::test_check_application_version",
"tests/test_components.py::test_get_load_balancer_name",
"tests/test_components.py::test_weighted_dns_load_balancer_v2",
"tests/test_components.py::test_max_description_length",
"tests/test_components.py::test_component_load_balancer_v2_default_internal_scheme"
]
| []
| Apache License 2.0 | 737 | [
"senza/components/elastic_load_balancer.py"
]
| [
"senza/components/elastic_load_balancer.py"
]
|
|
mpdavis__python-jose-33 | 00827f66c371f40d3295d44bfa889f25ec2ca72f | 2016-09-01 14:32:16 | 00827f66c371f40d3295d44bfa889f25ec2ca72f | codecov-io: ## [Current coverage](https://codecov.io/gh/mpdavis/python-jose/pull/33?src=pr) is 95.60% (diff: 93.10%)
> Merging [#33](https://codecov.io/gh/mpdavis/python-jose/pull/33?src=pr) into [master](https://codecov.io/gh/mpdavis/python-jose/branch/master?src=pr) will decrease coverage by **0.18%**
```diff
@@ master #33 diff @@
==========================================
Files 7 7
Lines 498 523 +25
Methods 0 0
Messages 0 0
Branches 0 0
==========================================
+ Hits 477 500 +23
- Misses 21 23 +2
Partials 0 0
```
> Powered by [Codecov](https://codecov.io?src=pr). Last update [00827f6...b1cc5f0](https://codecov.io/gh/mpdavis/python-jose/compare/00827f66c371f40d3295d44bfa889f25ec2ca72f...b1cc5f01ec7b7b8f7b33306b628c6c32daaf587d?src=pr) | diff --git a/jose/jwk.py b/jose/jwk.py
index 21398e4..4f69f51 100644
--- a/jose/jwk.py
+++ b/jose/jwk.py
@@ -12,6 +12,7 @@ import Crypto.Hash.SHA512
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
+from Crypto.Util.asn1 import DerSequence
import ecdsa
@@ -188,18 +189,24 @@ class RSAKey(Key):
return
if isinstance(key, dict):
- self.prepared_key = self._process_jwk(key)
+ self._process_jwk(key)
return
if isinstance(key, six.string_types):
if isinstance(key, six.text_type):
key = key.encode('utf-8')
+ if key.startswith(b'-----BEGIN CERTIFICATE-----'):
+ try:
+ self._process_cert(key)
+ except Exception as e:
+ raise JWKError(e)
+ return
+
try:
self.prepared_key = RSA.importKey(key)
except Exception as e:
raise JWKError(e)
-
return
raise JWKError('Unable to parse an RSA_JWK from key: %s' % key)
@@ -214,6 +221,16 @@ class RSAKey(Key):
self.prepared_key = RSA.construct((n, e))
return self.prepared_key
+ def _process_cert(self, key):
+ pemLines = key.replace(b' ', b'').split()
+ certDer = base64url_decode(b''.join(pemLines[1:-1]))
+ certSeq = DerSequence()
+ certSeq.decode(certDer)
+ tbsSeq = DerSequence()
+ tbsSeq.decode(certSeq[0])
+ self.prepared_key = RSA.importKey(tbsSeq[6])
+ return
+
def sign(self, msg):
try:
return PKCS1_v1_5.new(self.prepared_key).sign(self.hash_alg.new(msg))
diff --git a/jose/jws.py b/jose/jws.py
index dc6ad5c..7d49a4c 100644
--- a/jose/jws.py
+++ b/jose/jws.py
@@ -214,15 +214,34 @@ def _sig_matches_keys(keys, signing_input, signature, alg):
def _get_keys(key):
- if 'keys' in key: # JWK Set per RFC 7517
- if not isinstance(key, Mapping): # Caller didn't JSON-decode
- key = json.loads(key)
+
+ try:
+ key = json.loads(key)
+ except Exception:
+ pass
+
+ # JWK Set per RFC 7517
+ if 'keys' in key:
return key['keys']
+
+ # Individual JWK per RFC 7517
+ elif 'kty' in key:
+ return (key,)
+
+ # Some other mapping. Firebase uses just dict of kid, cert pairs
+ elif isinstance(key, Mapping):
+ values = key.values()
+ if values:
+ return values
+ return (key,)
+
# Iterable but not text or mapping => list- or tuple-like
elif (isinstance(key, Iterable) and
not (isinstance(key, six.string_types) or isinstance(key, Mapping))):
return key
- else: # Scalar value, wrap in tuple.
+
+ # Scalar value, wrap in tuple.
+ else:
return (key,)
| Add message about lack of X.509 certificate support in documentation
I get this error when using algorithms='RS256' on google app engine.
Full stack trace
```
Traceback (most recent call last):
File "/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine/google/appengine/tools/devappserver2/python/request_handler.py", line 226, in handle_interactive_request
exec(compiled_code, self._command_globals)
File "<string>", line 12, in <module>
File "lib/jose/jwt.py", line 121, in decode
payload = jws.verify(token, key, algorithms, verify=verify_signature)
File "lib/jose/jws.py", line 75, in verify
_verify_signature(signing_input, header, signature, key, algorithms)
File "lib/jose/jws.py", line 218, in _verify_signature
key = jwk.construct(key, alg)
File "lib/jose/jwk.py", line 65, in construct
return RSAKey(key_data, algorithm)
File "lib/jose/jwk.py", line 201, in __init__
raise JWKError(e)
JWKError: RSA key format is not supported
``` | mpdavis/python-jose | diff --git a/tests/algorithms/test_RSA.py b/tests/algorithms/test_RSA.py
index 026674f..ae45c63 100644
--- a/tests/algorithms/test_RSA.py
+++ b/tests/algorithms/test_RSA.py
@@ -85,3 +85,8 @@ class TestRSAAlgorithm:
key = object()
with pytest.raises(JOSEError):
RSAKey(key, ALGORITHMS.RS256)
+
+ def test_bad_cert(self):
+ key = '-----BEGIN CERTIFICATE-----'
+ with pytest.raises(JOSEError):
+ RSAKey(key, ALGORITHMS.RS256)
diff --git a/tests/test_firebase.py b/tests/test_firebase.py
new file mode 100644
index 0000000..041739b
--- /dev/null
+++ b/tests/test_firebase.py
@@ -0,0 +1,42 @@
+
+import json
+
+from jose import jwt
+
+firebase_certs = {
+ "6f83ab6e516e718fba9ddeb6647fd5fb752a151b": "-----BEGIN CERTIFICATE-----\nMIIDHDCCAgSgAwIBAgIIP5V2bjX2bXUwDQYJKoZIhvcNAQEFBQAwMTEvMC0GA1UE\nAxMmc2VjdXJldG9rZW4uc3lzdGVtLmdzZXJ2aWNlYWNjb3VudC5jb20wHhcNMTYw\nODMxMDA0NTI2WhcNMTYwOTAzMDExNTI2WjAxMS8wLQYDVQQDEyZzZWN1cmV0b2tl\nbi5zeXN0ZW0uZ3NlcnZpY2VhY2NvdW50LmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\nggEPADCCAQoCggEBAKHHtOMXBD+0YTtZHuzFrERiiwa+D6Ybq4SUHlicgRPV3Uk2\nvnTOqg1EhxshEXqjkAQbbRop9hhHTc+p8rBxgYGuLcZsBhGrnRqU6FnTTiWB1x5V\nvOfCkPE60W07gi8p+HyB8cqw1Tz2LnRUw/15888CrspVeumtNUkhXSRKzeS2BI4l\nkuOMkqmsMSu1yB5IZm5meMyta1uhJnP93jKmdar19RkZXOlFcT+fsSY2FPuqvDvX\nssChgZgNV5qtk0CIzexmFJaUFzpKE/RxqdIJooB1H83fUBGVK+9v3Ko+BI+GEvUc\nxIGAEWu2KrbjwPNzzC3/UV9aSfHEOJxQoutPviECAwEAAaM4MDYwDAYDVR0TAQH/\nBAIwADAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwIwDQYJ\nKoZIhvcNAQEFBQADggEBAIHOiqxXm1IcuXE87ELyKYDG0/gZPzCHz98h/x0LExrs\nd0bOYOIA08rt6qllmP24oT3hQt86HmDb932pm/fjaLL68x81TjYq6cFO0JxOzts+\nY+9XxkdP8Qu7UJ8Dx+rRvDN1MUxLTvBVXdamhkhDusx7PB5kK1ixWtf91qrl/J9e\nUYQBnJ4E9wI8U5HVkW3IBWvsFt/+gMO1EcoNBdB2cY/4N3l3oxm5PSNDS4DTEs2f\nAYZDqo6PJt2tTRGSmvLBKSCqcT7eWBbIwBht3Uw8CvOMbVYGBWjbFeua3Q3fe+p7\n7UbFOLIvSGR516kyZqxy9pLoA9+2TvbpYwWu6mLCZtg=\n-----END CERTIFICATE-----\n",
+ "fc2da7fa53d92e3bcba8a17e74b34da9dd585065": "-----BEGIN CERTIFICATE-----\nMIIDHDCCAgSgAwIBAgIINfZYQW9uekMwDQYJKoZIhvcNAQEFBQAwMTEvMC0GA1UE\nAxMmc2VjdXJldG9rZW4uc3lzdGVtLmdzZXJ2aWNlYWNjb3VudC5jb20wHhcNMTYw\nODI5MDA0NTI2WhcNMTYwOTAxMDExNTI2WjAxMS8wLQYDVQQDEyZzZWN1cmV0b2tl\nbi5zeXN0ZW0uZ3NlcnZpY2VhY2NvdW50LmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\nggEPADCCAQoCggEBAMvfJ5DY7lV4txW0zn9ayMxwAp5BzUhyIbuZkmsmMLRrNl+i\nid4lawojB846YtcTPZLD/5QpXRumAAUI5NA023fxaUdriM25zewpSnZWs6eUf0O6\nONES8Xk4WD2fbyPz6cgnsFEfMslNd3NypRiB9fVG6LFj6TFHC64o/YEeQB2dwkJZ\nXknKSEkFJSRC83TiHUlWzaRjmTdGRrvGEWHxr+xJltP8tPPlJUKu2VadgMbGlkKU\n5dBRhvWwZZW0zJupuKzd27O2lPkxfbx9vrUbsfqZcN4OY5Xg+ijQJVTv0/qcplsd\nPZ9Uui0QsBOPbrIO+5/Tq9FIBqxzUlpWwetv6pMCAwEAAaM4MDYwDAYDVR0TAQH/\nBAIwADAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwIwDQYJ\nKoZIhvcNAQEFBQADggEBALqWwzIQSK94hxTmxlA+RoyMvb8fyTcECM2qY+n+PDb5\nMvt8zqM6AwGjK1hvcUg08BEsnqqRqC81dkSEReS9KCoTY/oQ0sCCpwL3QP3puoxp\nfZU9CSwvnrFTJjC2Q/b8BlWta4CSDwpxpy/K3wm6tRn5ED4rPcP4FRqWU5jyHiug\nRrNkKiG7TeBBvQ3ZlF9K4JSx1yn9g7EvPBcmygop5FIKI1uS+URxeyavtlwfnTTs\nDtRVV/x0LDkHoJ2Agy7l2MqT7eoRKh5VNucQONLrcZT1AY02eZi/WVSjgpzC48eP\nV9xlcgIaRbS/JDULYgW5h0uVdRNqSVGJ6yBLXT2uaBA=\n-----END CERTIFICATE-----\n",
+ "8226146523a1b8894ba03ad525667b9475d393f5": "-----BEGIN CERTIFICATE-----\nMIIDHDCCAgSgAwIBAgIIWAKW/IRYcAwwDQYJKoZIhvcNAQEFBQAwMTEvMC0GA1UE\nAxMmc2VjdXJldG9rZW4uc3lzdGVtLmdzZXJ2aWNlYWNjb3VudC5jb20wHhcNMTYw\nODMwMDA0NTI2WhcNMTYwOTAyMDExNTI2WjAxMS8wLQYDVQQDEyZzZWN1cmV0b2tl\nbi5zeXN0ZW0uZ3NlcnZpY2VhY2NvdW50LmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\nggEPADCCAQoCggEBALJAt+ws+XNdDnDSYFp0YnQ5e8QqfMFrwp1l1r/mNSUF840I\nsbm50Z89aNpQgFOsORS/TYyHOeasiBhsJ5HWmfxo0PBTFifKI/OedLlltxZZCHa+\nEO/75Fbeydexokvfq6thT7C+xL45kJzbvKKNAw4WCAW6vwzyz+d/IrWCs9Iqa2ZX\nSiKnMPzPxZj6s+AhHPVxsR8dBMZ+NdK/wh9OcPWjLAxLEWBvd0Gp315bIVjVc9pV\neYcTapu/s4DSwgz4twovAyUziwsa+HJ+2FFNDZExf/XQUVBW5le8gGEdfl3kW1yu\nzdO6e1LwVTDAXULydPBL5lb6vTX2/ICmMzHXzIUCAwEAAaM4MDYwDAYDVR0TAQH/\nBAIwADAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwIwDQYJ\nKoZIhvcNAQEFBQADggEBAHyACbK1WfP9WspLfxvgJaNvwvygnP6cggLMvqq/dRxP\nlemvxfVaHK19sIXI6p0H4RBjI9FID5otzuyV54p1LBKgLIMTWcMYdL0wieeBg4Ud\nwgLEutIERpJU5oRMpSuZZYW75d0o+U1qOEhDswliqW1xofxNjRgNyrOYc6hMJzIS\ng9U4C4fplT/m3x5uQNjfzN/0CxfQf54WaD15w1lPGQAMJSWQDaxDTi41bW0Jwp4N\ndshOVn+btUUwL5TXDKaVkg1IHfG57FwvPJ5hKs4pbP5SIm+Sc1utIMMTBsRDRJVK\nyHaB5Bj9KcpQk7FvdT/KtzetPowhnxu9ow+KJcnP+7w=\n-----END CERTIFICATE-----\n",
+ "dd694b16c1b0ce31878a72dfa6c0cd4db3dd7edf": "-----BEGIN CERTIFICATE-----\nMIIDHDCCAgSgAwIBAgIIffru9igojE4wDQYJKoZIhvcNAQEFBQAwMTEvMC0GA1UE\nAxMmc2VjdXJldG9rZW4uc3lzdGVtLmdzZXJ2aWNlYWNjb3VudC5jb20wHhcNMTYw\nOTAxMDA0NTI2WhcNMTYwOTA0MDExNTI2WjAxMS8wLQYDVQQDEyZzZWN1cmV0b2tl\nbi5zeXN0ZW0uZ3NlcnZpY2VhY2NvdW50LmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\nggEPADCCAQoCggEBALaxpG4i7EgYpzaJsykaZzKmTTnm+kIPJBKb6t41ByUWt7J+\nnoUmlMiAVkXj7GAmc3usroJdYNZ8iMSpAWsIMgg7HLrqv/hMDY6+33rCqsvXD2tF\nCtJbRKzSMKu+AIc1uirkX3L3aHfKRzFbsr+8JqOigY3sVAb42FeATVHB0uCRyoE5\nfqxbt8nIPCFR/lFP51L0Wf5hGIH5kHJEuXx/7GOUQPN196P3sRI9jLv6nrWqGTAR\nVhuY9KXRz0jlVQeKZV5mWstcIXgxn2MfzfoHx4nuSNknJdrfHNp0r2XPf9Fre7Jd\n73slrVUwL2VWyZJdIBxJuYz2QjEQLzz+eJGyWcMCAwEAAaM4MDYwDAYDVR0TAQH/\nBAIwADAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwIwDQYJ\nKoZIhvcNAQEFBQADggEBAFTpRr9/cEkFHSbP5c7gr926kSxe1e9u9JjzR7l9Zv5l\nfskkLxIZcGlx/FoccWCwDPYl2Nh0Pr++TJ2hWVe/LpjppUxl4QVqfbVzyJezn2UR\nhLnGASQ0ckPoNTJhxjA6PVGtyXWB67oCDEgz/Pl6jjKEMtilyXh93rBmOpt6jq9e\nlwiZaa5wTUwIhHI972rLveYkssVkspmp4RIWHoh1nxUjYPMtcTCf9GFjEMLNdDBj\nYldCEzL34V60ObBSkzV3Zx7UNwoa80+SEJc9gQsBHVJbjXl7V9ODL52OHnciiEA8\n+d/xy2tBzdCD5EUR3aaYZYqQ16VV6LeU8FoxFn6/nxw=\n-----END CERTIFICATE-----\n",
+ "f4b0a5c73ad85a5da09f0e7f76463631339e0bbf": "-----BEGIN CERTIFICATE-----\nMIIDHDCCAgSgAwIBAgIIWDhBeVUilCcwDQYJKoZIhvcNAQEFBQAwMTEvMC0GA1UE\nAxMmc2VjdXJldG9rZW4uc3lzdGVtLmdzZXJ2aWNlYWNjb3VudC5jb20wHhcNMTYw\nNzAxMDA0NTI2WhcNMTYwNzA0MDExNTI2WjAxMS8wLQYDVQQDEyZzZWN1cmV0b2tl\nbi5zeXN0ZW0uZ3NlcnZpY2VhY2NvdW50LmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD\nggEPADCCAQoCggEBALRWaRmoi5EFyj5TBrUGKFI6uBJ4x9wSHq9tlRL1qmnwzdNb\nlDoeoh6Gw3H54IqM0XqjZZwgV5KXOQDOaoUpMBRH93x7Ma7NjhiDtpQr0JSbFIQL\nsIay/VxQ9gfa/I83HViEAbF1FXjhBKniwFKUv26mU30upZfsDQkHM8OLc/iXRvhA\nYn7S732Oefdv0kJ9t3h+WOGKGVkYfDaAGn5Uyzx+9oyyLY33borKOBBzphSQlZCr\nL569zTXvvLgvdStrsPGaiRGj64DGXD6LCg6acLJcMUvlVUO6THHJHVgp8pzlrPQG\n3B1rZk61lZqJyjK/nTi2tY9GPLfdxOfDAMjNoz8CAwEAAaM4MDYwDAYDVR0TAQH/\nBAIwADAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwIwDQYJ\nKoZIhvcNAQEFBQADggEBAIlFwO3C+X2+na0nLjR+zQYGHzZYqFe4V67P6ugFJxun\nxP8pyDCYAGer1mkDcIyDacdQ3natNp0xv61a0yk5tSmDYZbXZRTFdLkf/GzH+VmH\nEMl5W4TvxjAe/x2opm3QUaPC+jVlvndcP99FF5ULFp7/PwSTp8uzyrd/fhSFaxhq\nuIW4syNzDSpDItzUsiKCtsKGYX/qvd/cNP8cXlPd5rWTM4Sic9Baf2nXuHaZRkBr\nSJYcxdh8xbGsY1tC8TIgWot6GXtldNvXDLqRUwb2t6Rr3Tqhbc0CcHndTCuHXf0i\n0s9jU/UCrNhhmaD0rZLHQ2tuN6W/xpOHKtO0a8Lys7c=\n-----END CERTIFICATE-----\n"
+}
+
+firebase_token = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImY0YjBhNWM3M2FkODVhNWRhMDlmMGU3Zjc2NDYzNjMxMzM5ZTBiYmYifQ.eyJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vd2Vkb3RyYW5zZmVyLTIwMTYiLCJhdWQiOiJ3ZWRvdHJhbnNmZXItMjAxNiIsImF1dGhfdGltZSI6MTQ2NzM0NjI3MCwidXNlcl9pZCI6IjRjemVXVllIekNNVnN0WEZOYldHVXBKYmJTZzEiLCJzdWIiOiI0Y3plV1ZZSHpDTVZzdFhGTmJXR1VwSmJiU2cxIiwiaWF0IjoxNDY3MzQ2MjcwLCJleHAiOjE0NjczNDk4NzAsImVtYWlsIjoic2V1bkBjbXUuY29tIiwiZW1haWxfdmVyaWZpZWQiOmZhbHNlLCJmaXJlYmFzZSI6eyJpZGVudGl0aWVzIjp7InBhc3N3b3JkIjpbInNldW5AY211LmNvbSJdLCJlbWFpbCI6WyJzZXVuQGNtdS5jb20iXX19fQ.U-fYjx8rMm5tYV24r0uEcNQtIe3UKULxsHecLdGzTbi1v-VKzKDk_QPL26SPDoU8JUMY3nJQ1hOE9AapBrQck8NVUZSKFMD49XdtsyoN2kKdinpFR1hSxIE0L2dRStS7OZ8sGiX866lNa52Cr6TXSsnMD6N2P0OtVE5EeD1Nf-AiJ-gsaLrP4tBnmj1MNYhEYVHb6sAUrT3nEI9gWmeKcPWPfn76FGTdGWZ2mjdaeAG4RbuFL4cHdOISA_0HVLGJxuNyEHAHybDX8mVdNW_F4yzL3H-SmPFY5Kv3tCdBzpzhUKfNOnFFmf2ggFOJnDsqMp-TZaIPk6ce_ltqhQ0dnQ"
+
+
+class TestFirebase:
+
+ def test_individual_cert(self):
+ jwt.decode(
+ firebase_token,
+ firebase_certs["f4b0a5c73ad85a5da09f0e7f76463631339e0bbf"],
+ algorithms='RS256',
+ options={'verify_exp': False, 'verify_aud': False}
+ )
+
+ def test_certs_dict(self):
+ jwt.decode(
+ firebase_token,
+ firebase_certs,
+ algorithms='RS256',
+ options={'verify_exp': False, 'verify_aud': False}
+ )
+
+ def test_certs_string(self):
+ certs = json.dumps(firebase_certs)
+ jwt.decode(
+ firebase_token,
+ certs,
+ algorithms='RS256',
+ options={'verify_exp': False, 'verify_aud': False}
+ )
diff --git a/tests/test_jws.py b/tests/test_jws.py
index a57c7f4..7cc2bc2 100644
--- a/tests/test_jws.py
+++ b/tests/test_jws.py
@@ -213,6 +213,10 @@ class TestGetKeys(object):
key = '{"keys": [{}, {}]}'
assert [{}, {}] == jws._get_keys(key)
+ def test_RFC7517_jwk(self):
+ key = {'kty': 'hsa', 'k': 'secret', 'alg': 'HS256', 'use': 'sig'}
+ assert (key, ) == jws._get_keys(key)
+
def test_RFC7517_mapping(self):
key = {"keys": [{}, {}]}
assert [{}, {}] == jws._get_keys(key)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-runner"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
ecdsa==0.19.1
future==0.18.3
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycrypto==2.6.1
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-runner==5.3.2
-e git+https://github.com/mpdavis/python-jose.git@00827f66c371f40d3295d44bfa889f25ec2ca72f#egg=python_jose
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-jose
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- ecdsa==0.19.1
- future==0.18.3
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycrypto==2.6.1
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-runner==5.3.2
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-jose
| [
"tests/test_firebase.py::TestFirebase::test_individual_cert",
"tests/test_firebase.py::TestFirebase::test_certs_dict",
"tests/test_firebase.py::TestFirebase::test_certs_string"
]
| []
| [
"tests/algorithms/test_RSA.py::TestRSAAlgorithm::test_RSA_key",
"tests/algorithms/test_RSA.py::TestRSAAlgorithm::test_RSA_key_instance",
"tests/algorithms/test_RSA.py::TestRSAAlgorithm::test_string_secret",
"tests/algorithms/test_RSA.py::TestRSAAlgorithm::test_object",
"tests/algorithms/test_RSA.py::TestRSAAlgorithm::test_bad_cert",
"tests/test_jws.py::TestJWS::test_unicode_token",
"tests/test_jws.py::TestJWS::test_not_enough_segments",
"tests/test_jws.py::TestJWS::test_header_invalid_padding",
"tests/test_jws.py::TestJWS::test_header_not_json",
"tests/test_jws.py::TestJWS::test_claims_invalid_padding",
"tests/test_jws.py::TestJWS::test_claims_not_json",
"tests/test_jws.py::TestJWS::test_invalid_key",
"tests/test_jws.py::TestHMAC::testHMAC256",
"tests/test_jws.py::TestHMAC::testHMAC384",
"tests/test_jws.py::TestHMAC::testHMAC512",
"tests/test_jws.py::TestHMAC::test_wrong_alg",
"tests/test_jws.py::TestHMAC::test_wrong_key",
"tests/test_jws.py::TestHMAC::test_unsupported_alg",
"tests/test_jws.py::TestHMAC::test_add_headers",
"tests/test_jws.py::TestGetKeys::test_dict",
"tests/test_jws.py::TestGetKeys::test_custom_object",
"tests/test_jws.py::TestGetKeys::test_RFC7517_string",
"tests/test_jws.py::TestGetKeys::test_RFC7517_jwk",
"tests/test_jws.py::TestGetKeys::test_RFC7517_mapping",
"tests/test_jws.py::TestGetKeys::test_string",
"tests/test_jws.py::TestGetKeys::test_tuple",
"tests/test_jws.py::TestGetKeys::test_list",
"tests/test_jws.py::TestRSA::test_jwk_set",
"tests/test_jws.py::TestRSA::test_jwk_set_failure",
"tests/test_jws.py::TestRSA::test_RSA256",
"tests/test_jws.py::TestRSA::test_RSA384",
"tests/test_jws.py::TestRSA::test_RSA512",
"tests/test_jws.py::TestRSA::test_wrong_alg",
"tests/test_jws.py::TestRSA::test_wrong_key",
"tests/test_jws.py::TestEC::test_EC256",
"tests/test_jws.py::TestEC::test_EC384",
"tests/test_jws.py::TestEC::test_EC512",
"tests/test_jws.py::TestEC::test_wrong_alg",
"tests/test_jws.py::TestLoad::test_header_not_mapping",
"tests/test_jws.py::TestLoad::test_claims_not_mapping",
"tests/test_jws.py::TestLoad::test_signature_padding"
]
| []
| MIT License | 738 | [
"jose/jws.py",
"jose/jwk.py"
]
| [
"jose/jws.py",
"jose/jwk.py"
]
|
Juniper__py-junos-eznc-586 | d4629f636b915a02c722f895d11b9f187970a60b | 2016-09-02 06:14:24 | 1aa12dab9511d4e6a2636b72eab67aa45bf9fd70 | diff --git a/lib/jnpr/junos/utils/scp.py b/lib/jnpr/junos/utils/scp.py
index adc49fd3..8a150d6c 100644
--- a/lib/jnpr/junos/utils/scp.py
+++ b/lib/jnpr/junos/utils/scp.py
@@ -73,11 +73,8 @@ class SCP(object):
.. note:: This method uses the same username/password authentication
credentials as used by :class:`jnpr.junos.device.Device`.
-
- .. warning:: The :class:`jnpr.junos.device.Device` ``ssh_private_key_file``
- option is currently **not** supported.
-
- .. todo:: add support for ``ssh_private_key_file``.
+ It can also use ``ssh_private_key_file`` option if provided
+ to the :class:`jnpr.junos.device.Device`
:returns: SCPClient object
"""
@@ -92,6 +89,7 @@ class SCP(object):
# through a jumphost.
config = {}
+ kwargs = {}
ssh_config = getattr(junos, '_sshconf_path')
if ssh_config:
config = paramiko.SSHConfig()
@@ -101,6 +99,9 @@ class SCP(object):
if config.get("proxycommand"):
sock = paramiko.proxy.ProxyCommand(config.get("proxycommand"))
+ if self._junos._ssh_private_key_file is not None:
+ kwargs['key_filename']=self._junos._ssh_private_key_file
+
self._ssh.connect(hostname=junos._hostname,
port=(
22, int(
@@ -108,7 +109,7 @@ class SCP(object):
junos._hostname == 'localhost'],
username=junos._auth_user,
password=junos._auth_password,
- sock=sock
+ sock=sock, **kwargs
)
return SCPClient(self._ssh.get_transport(), **scpargs)
| Support for private key files in scp.py
As noted [here](https://github.com/Juniper/py-junos-eznc/blob/master/lib/jnpr/junos/utils/scp.py#L77), private key files aren't supported. I need this for a project I am working on.
Is it possible to get this fixed?
Cheers, | Juniper/py-junos-eznc | diff --git a/tests/unit/utils/test_scp.py b/tests/unit/utils/test_scp.py
index c0ddd22e..d72571c8 100644
--- a/tests/unit/utils/test_scp.py
+++ b/tests/unit/utils/test_scp.py
@@ -99,6 +99,22 @@ class TestScp(unittest.TestCase):
self.assertEqual(mock_scpclient.mock_calls[0][2]['progress'].__name__,
'_scp_progress')
+ @patch('ncclient.manager.connect')
+ @patch('paramiko.SSHClient.connect')
+ @patch('scp.SCPClient.put')
+ @patch('scp.SCPClient.__init__')
+ def test_ssh_private_key_file(self, mock_scpclient, mock_put,
+ mock_sshclient, mock_ncclient):
+ mock_scpclient.return_value = None
+ package = 'test.tgz'
+ dev = Device(host='1.1.1.1', user='user',
+ ssh_private_key_file='/Users/test/testkey')
+ dev.open(gather_facts=False)
+ with SCP(dev) as scp:
+ scp.put(package)
+ self.assertEqual(mock_sshclient.mock_calls[0][2]['key_filename'],
+ '/Users/test/testkey')
+
@contextmanager
def capture(self, command, *args, **kwargs):
out, sys.stdout = sys.stdout, StringIO()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"mock",
"nose",
"pep8",
"pyflakes",
"coveralls",
"ntc_templates",
"cryptography==3.2",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bcrypt==4.2.1
certifi @ file:///croot/certifi_1671487769961/work/certifi
cffi==1.15.1
charset-normalizer==3.4.1
coverage==6.5.0
coveralls==3.3.1
cryptography==44.0.2
docopt==0.6.2
exceptiongroup==1.2.2
future==1.0.0
idna==3.10
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==3.1.6
-e git+https://github.com/Juniper/py-junos-eznc.git@d4629f636b915a02c722f895d11b9f187970a60b#egg=junos_eznc
lxml==5.3.1
MarkupSafe==2.1.5
mock==5.2.0
ncclient==0.6.19
netaddr==1.3.0
nose==1.3.7
ntc_templates==4.0.1
packaging==24.0
paramiko==3.5.1
pep8==1.7.1
pluggy==1.2.0
pycparser==2.21
pyflakes==3.0.1
PyNaCl==1.5.0
pyserial==3.5
pytest==7.4.4
PyYAML==6.0.1
requests==2.31.0
scp==0.15.0
six==1.17.0
textfsm==1.1.3
tomli==2.0.1
typing_extensions==4.7.1
urllib3==2.0.7
zipp==3.15.0
| name: py-junos-eznc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bcrypt==4.2.1
- cffi==1.15.1
- charset-normalizer==3.4.1
- coverage==6.5.0
- coveralls==3.3.1
- cryptography==44.0.2
- docopt==0.6.2
- exceptiongroup==1.2.2
- future==1.0.0
- idna==3.10
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==3.1.6
- lxml==5.3.1
- markupsafe==2.1.5
- mock==5.2.0
- ncclient==0.6.19
- netaddr==1.3.0
- nose==1.3.7
- ntc-templates==4.0.1
- packaging==24.0
- paramiko==3.5.1
- pep8==1.7.1
- pluggy==1.2.0
- pycparser==2.21
- pyflakes==3.0.1
- pynacl==1.5.0
- pyserial==3.5
- pytest==7.4.4
- pyyaml==6.0.1
- requests==2.31.0
- scp==0.15.0
- six==1.17.0
- textfsm==1.1.3
- tomli==2.0.1
- typing-extensions==4.7.1
- urllib3==2.0.7
- zipp==3.15.0
prefix: /opt/conda/envs/py-junos-eznc
| [
"tests/unit/utils/test_scp.py::TestScp::test_ssh_private_key_file"
]
| [
"tests/unit/utils/test_scp.py::TestScp::test_scp_proxycommand"
]
| [
"tests/unit/utils/test_scp.py::TestScp::test_scp_close",
"tests/unit/utils/test_scp.py::TestScp::test_scp_context",
"tests/unit/utils/test_scp.py::TestScp::test_scp_open",
"tests/unit/utils/test_scp.py::TestScp::test_scp_progress",
"tests/unit/utils/test_scp.py::TestScp::test_scp_progress_true",
"tests/unit/utils/test_scp.py::TestScp::test_scp_user_def_progress",
"tests/unit/utils/test_scp.py::TestScp::test_scp_user_def_progress_args_2"
]
| []
| Apache License 2.0 | 739 | [
"lib/jnpr/junos/utils/scp.py"
]
| [
"lib/jnpr/junos/utils/scp.py"
]
|
|
napjon__krisk-38 | c92fc16f5d02d14478e578cc778639c39eff2de2 | 2016-09-02 06:40:41 | 19fb69026ff1339649fac5ad82548ccbdc7b8d19 | diff --git a/krisk/plot/__init__.py b/krisk/plot/__init__.py
index 0ed9354..14e24b8 100644
--- a/krisk/plot/__init__.py
+++ b/krisk/plot/__init__.py
@@ -7,7 +7,8 @@ def bar(df,
c=None,
how='count',
stacked=False,
- annotate=None):
+ annotate=None,
+ full=False):
"""
Parameters
----------
@@ -26,7 +27,9 @@ def bar(df,
Whether to stacked category on top of the other categories.
annotate: string, {'all',True} default to None
if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ category. if 'all' and stacked, annotate all category
+ full: boolean, default to False.
+ If true, set to full area stacked chart. Only work if stacked is True.
Returns
-------
@@ -35,7 +38,7 @@ def bar(df,
# TODO: add optional argument trendline
- return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,
+ return make_chart(df,type='bar',x=x,y=y,c=c,how=how,stacked=stacked,full=full,
annotate='top' if annotate == True else annotate)
@@ -46,7 +49,8 @@ def line(df,
how=None,
stacked=False,
area=False,
- annotate=None):
+ annotate=None,
+ full=False):
"""
Parameters
----------
@@ -65,13 +69,15 @@ def line(df,
Whether to stacked category on top of the other categories.
annotate: string, {'all',True} default to None
if True, annotate value on top of the plot element. If stacked is also True, annotate the last
- category. if 'all' and stacked, annotate all category
+ category. if 'all' and stacked, annotate all category
+ full: boolean, default to False.
+ If true, set to full area stacked chart. Only work if stacked is True.
Returns
-------
Chart Object
"""
- return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,
+ return make_chart(df,type='line',x=x,y=y,c=c,how=how,stacked=stacked,area=area,full=full,
annotate='top' if annotate == True else annotate)
diff --git a/krisk/plot/bar_line.py b/krisk/plot/bar_line.py
index 5884aa4..057ed1d 100644
--- a/krisk/plot/bar_line.py
+++ b/krisk/plot/bar_line.py
@@ -40,16 +40,18 @@ def set_bar_line_chart(chart, df, x, c, **kwargs):
s['areaStyle'] = {'normal': {}}
if kwargs['annotate'] == 'all':
+ s['label'] = deepcopy(d_annotate)
if chart_type == 'bar':
- d_ant = deepcopy(d_annotate)
- d_ant['normal']['position'] = 'inside'
- s['label'] = deepcopy(d_ant)
- else:
- s['label'] = deepcopy(d_annotate)
+ s['label']['normal']['position'] = 'inside'
+
+ if kwargs['type'] in ['line','bar'] and kwargs['full']:
+ chart.option['yAxis']['max'] = 1
if kwargs['annotate'] == 'top':
series[-1]['label'] = d_annotate
+
+
# TODO: make annotate receive all kinds supported in echarts.
@@ -69,6 +71,10 @@ def get_bar_line_data(df, x, c, y, **kwargs):
data = df.groupby(x)[y].agg(kwargs['how'])
else:
data = df[x].value_counts()
+
+
+ if c and kwargs['stacked'] and kwargs['full']:
+ data = data.div(data.sum(1),axis=0)
return data
diff --git a/krisk/plot/make_chart.py b/krisk/plot/make_chart.py
index ceab454..ce77d49 100644
--- a/krisk/plot/make_chart.py
+++ b/krisk/plot/make_chart.py
@@ -34,7 +34,7 @@ def insert_series_data(data, x, chart_type, chart, cat=None):
def make_chart(df, **kwargs):
from krisk.plot.bar_line import set_bar_line_chart
- from krisk.plot.points import set_scatter_chart
+ from krisk.plot.scatter_geo import set_scatter_chart
chart = Chart(**kwargs)
chart._kwargs_chart_['data_columns'] = df.columns
| Add full area (100%) for stacked bar and line chart | napjon/krisk | diff --git a/krisk/plot/points.py b/krisk/plot/scatter_geo.py
similarity index 100%
rename from krisk/plot/points.py
rename to krisk/plot/scatter_geo.py
diff --git a/krisk/tests/data/bar_x_c.json b/krisk/tests/data/bar_x_c.json
new file mode 100644
index 0000000..db81bc8
--- /dev/null
+++ b/krisk/tests/data/bar_x_c.json
@@ -0,0 +1,133 @@
+{
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ },
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "type": "bar",
+ "name": "Africa",
+ "stack": "continent",
+ "data": [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1
+ ]
+ },
+ {
+ "type": "bar",
+ "name": "Americas",
+ "stack": "continent",
+ "data": [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1
+ ]
+ },
+ {
+ "type": "bar",
+ "name": "Asia",
+ "stack": "continent",
+ "data": [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1
+ ]
+ },
+ {
+ "type": "bar",
+ "name": "Europe",
+ "stack": "continent",
+ "data": [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1
+ ]
+ },
+ {
+ "type": "bar",
+ "name": "Oceania",
+ "stack": "continent",
+ "data": [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/krisk/tests/data/full_bar_line.json b/krisk/tests/data/full_bar_line.json
new file mode 100644
index 0000000..ea4fa05
--- /dev/null
+++ b/krisk/tests/data/full_bar_line.json
@@ -0,0 +1,130 @@
+{
+ "xAxis": {
+ "data": [
+ 1952,
+ 1957,
+ 1962,
+ 1967,
+ 1972,
+ 1977,
+ 1982,
+ 1987,
+ 1992,
+ 1997,
+ 2002,
+ 2007
+ ]
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ },
+ "legend": {
+ "data": [
+ "Africa",
+ "Americas",
+ "Asia",
+ "Europe",
+ "Oceania"
+ ]
+ },
+ "yAxis": {
+ "max": 1
+ },
+ "series": [
+ {
+ "name": "Africa",
+ "stack": "continent",
+ "data": [
+ 0.204,
+ 0.204,
+ 0.2,
+ 0.209,
+ 0.217,
+ 0.227,
+ 0.25,
+ 0.264,
+ 0.27,
+ 0.266,
+ 0.265,
+ 0.257
+ ]
+ },
+ {
+ "name": "Americas",
+ "stack": "continent",
+ "data": [
+ 0.392,
+ 0.39,
+ 0.386,
+ 0.375,
+ 0.364,
+ 0.357,
+ 0.366,
+ 0.359,
+ 0.349,
+ 0.331,
+ 0.325,
+ 0.311
+ ]
+ },
+ {
+ "name": "Asia",
+ "stack": "continent",
+ "data": [
+ 0.185,
+ 0.184,
+ 0.186,
+ 0.189,
+ 0.192,
+ 0.197,
+ 0.161,
+ 0.157,
+ 0.168,
+ 0.203,
+ 0.214,
+ 0.246
+ ]
+ },
+ {
+ "name": "Europe",
+ "stack": "continent",
+ "data": [
+ 0.028,
+ 0.029,
+ 0.031,
+ 0.032,
+ 0.033,
+ 0.033,
+ 0.035,
+ 0.035,
+ 0.034,
+ 0.031,
+ 0.03,
+ 0.028
+ ]
+ },
+ {
+ "name": "Oceania",
+ "stack": "continent",
+ "data": [
+ 0.191,
+ 0.193,
+ 0.196,
+ 0.194,
+ 0.194,
+ 0.186,
+ 0.189,
+ 0.185,
+ 0.18,
+ 0.17,
+ 0.166,
+ 0.158
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/krisk/tests/data/hist_x.json b/krisk/tests/data/hist_x.json
new file mode 100644
index 0000000..5bf0e15
--- /dev/null
+++ b/krisk/tests/data/hist_x.json
@@ -0,0 +1,47 @@
+{
+ "xAxis": {
+ "data": [
+ 28,
+ 34,
+ 39,
+ 44,
+ 49,
+ 55,
+ 60,
+ 65,
+ 70,
+ 75,
+ 81
+ ]
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ },
+ "legend": {
+ "data": []
+ },
+ "yAxis": {},
+ "series": [
+ {
+ "type": "bar",
+ "name": "lifeExp",
+ "data": [
+ 4,
+ 2,
+ 7,
+ 2,
+ 2,
+ 3,
+ 5,
+ 13,
+ 16,
+ 6
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/krisk/tests/data/simple_scatter.json b/krisk/tests/data/simple_scatter.json
new file mode 100644
index 0000000..15ef0b6
--- /dev/null
+++ b/krisk/tests/data/simple_scatter.json
@@ -0,0 +1,72 @@
+{
+ "xAxis": {
+ "max": 17876956,
+ "type": "value",
+ "name": "pop"
+ },
+ "tooltip": {
+ "axisPointer": {
+ "type": ""
+ }
+ },
+ "title": {
+ "text": ""
+ },
+ "legend": {
+ "data": []
+ },
+ "visualMap": [],
+ "yAxis": {
+ "max": 69,
+ "type": "value",
+ "name": "lifeExp"
+ },
+ "series": [
+ {
+ "type": "scatter",
+ "name": "pop",
+ "data": [
+ [
+ 9279525.0,
+ 43.077,
+ "Africa",
+ "Algeria",
+ 2449.008,
+ 1952
+ ],
+ [
+ 17876956.0,
+ 62.485,
+ "Americas",
+ "Argentina",
+ 5911.315,
+ 1952
+ ],
+ [
+ 8425333.0,
+ 28.801,
+ "Asia",
+ "Afghanistan",
+ 779.445,
+ 1952
+ ],
+ [
+ 1282697.0,
+ 55.23,
+ "Europe",
+ "Albania",
+ 1601.056,
+ 1952
+ ],
+ [
+ 8691212.0,
+ 69.12,
+ "Oceania",
+ "Australia",
+ 10039.596,
+ 1952
+ ]
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/krisk/tests/test_plot.py b/krisk/tests/test_plot.py
index 2fecdff..41aed67 100644
--- a/krisk/tests/test_plot.py
+++ b/krisk/tests/test_plot.py
@@ -3,11 +3,12 @@ import pytest
import krisk.plot as kk
DATA_DIR = 'krisk/tests/data'
+read_option_tests = lambda f: json.load(open(DATA_DIR + '/' + f, 'r'))
def test_bar(gapminder):
#Bar
- true_option = json.load(open(DATA_DIR + '/bar.json', 'r'))
+ true_option = read_option_tests('bar.json')
p = kk.bar(gapminder,
'year',
y='pop',
@@ -17,8 +18,14 @@ def test_bar(gapminder):
annotate=True)
assert p.get_option() == true_option
+ #Bar with x-axis and category
+ true_option = read_option_tests('bar_x_c.json')
+ p = kk.bar(gapminder,'year',c='continent',stacked=True)
+ assert p.get_option() == true_option
+
+
# Bar Annotate All
- true_option = json.load(open(DATA_DIR + '/bar_ann_all.json', 'r'))
+ true_option = read_option_tests('/bar_ann_all.json')
p = kk.bar(gapminder,
'year',
y='pop',
@@ -42,7 +49,7 @@ def test_bar(gapminder):
def test_line(gapminder):
- true_option = json.load(open(DATA_DIR + '/line.json', 'r'))
+ true_option = read_option_tests('line.json')
p = kk.line(
gapminder,
'year',
@@ -55,9 +62,28 @@ def test_line(gapminder):
assert p.get_option() == true_option
+def test_full_bar_line(gapminder):
+ bar = kk.bar(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
+ line = kk.line(gapminder,'year',c='continent',y='pop',how='mean',stacked=True,full=True,annotate='all')
+
+ for i in range(len(bar.option['series'])):
+ bar.option['series'][i].pop('type')
+ line.option['series'][i].pop('type')
+
+ bar.option['series'][i].pop('label')
+ line.option['series'][i].pop('label')
+
+ true_option = read_option_tests('full_bar_line.json')
+
+ assert bar.option == line.option == true_option
def test_hist(gapminder):
+ true_option = read_option_tests('hist_x.json')
+ p = kk.hist(gapminder,'lifeExp',bins=10)
+ assert p.get_option() == true_option
+
+
true_option = json.load(open(DATA_DIR + '/hist.json', 'r'))
p = kk.hist(
gapminder,
@@ -71,6 +97,10 @@ def test_hist(gapminder):
def test_scatter(gapminder):
+ # Simple Scatter
+ p = kk.scatter(gapminder[gapminder.year == 1952],'pop','lifeExp')
+ true_option = read_option_tests('simple_scatter.json')
+ assert p.get_option() == true_option
# Grouped Scatter
true_option = json.load(open(DATA_DIR + '/scatter.json', 'r'))
@@ -83,7 +113,7 @@ def test_scatter(gapminder):
assert p.get_option() == true_option
# Scatter
- true_option = json.load(open(DATA_DIR + '/scatter_single.json', 'r'))
+ true_option = read_option_tests('scatter_single.json')
p = kk.scatter(
gapminder[gapminder.year == 1952], 'lifeExp', 'gdpPercap', s='pop')
assert p.get_option() == true_option
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"pytest"
],
"pre_install": [],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
comm==0.1.4
contextvars==2.4
coverage==6.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
entrypoints==0.4
idna==3.10
immutables==0.19
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
-e git+https://github.com/napjon/krisk.git@c92fc16f5d02d14478e578cc778639c39eff2de2#egg=krisk
MarkupSafe==2.0.1
mistune==0.8.4
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pandocfilters==1.5.1
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
pyzmq==25.1.2
requests==2.27.1
Send2Trash==1.8.3
six==1.17.0
sniffio==1.2.0
terminado==0.12.1
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
zipp==3.6.0
| name: krisk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- entrypoints==0.4
- idna==3.10
- immutables==0.19
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- markupsafe==2.0.1
- mistune==0.8.4
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pandocfilters==1.5.1
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyzmq==25.1.2
- requests==2.27.1
- send2trash==1.8.3
- six==1.17.0
- sniffio==1.2.0
- terminado==0.12.1
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/krisk
| [
"krisk/tests/test_plot.py::test_bar",
"krisk/tests/test_plot.py::test_line",
"krisk/tests/test_plot.py::test_full_bar_line",
"krisk/tests/test_plot.py::test_hist",
"krisk/tests/test_plot.py::test_scatter"
]
| []
| []
| []
| BSD 3-Clause "New" or "Revised" License | 740 | [
"krisk/plot/make_chart.py",
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
| [
"krisk/plot/make_chart.py",
"krisk/plot/bar_line.py",
"krisk/plot/__init__.py"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.